From 60b90b18d6407b886ed9f12061406fd3ece05e29 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Wed, 7 Dec 2011 17:20:29 +0100 Subject: Migration message and version cleanup The @migration annotation can now be used like @deprecation. Old syntax is still supported, but deprecated. Improve wording and consistency of migration messages, migration warnings also print the version in which the change occurred now. Partially fixes SI-4990. --- test/files/neg/migration28.check | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) (limited to 'test/files') diff --git a/test/files/neg/migration28.check b/test/files/neg/migration28.check index 97146e88f7..d7dfacf3db 100644 --- a/test/files/neg/migration28.check +++ b/test/files/neg/migration28.check @@ -1,6 +1,5 @@ -migration28.scala:4: error: method scanRight in trait TraversableLike has changed semantics: -This scanRight definition has changed in 2.9. -The previous behavior can be reproduced with scanRight.reverse. +migration28.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0: +The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse. List(1,2,3,4,5).scanRight(0)(_+_) ^ one error found -- cgit v1.2.3 From a6152b4c2c0a26835e60a8ef209cca87bec8510e Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Wed, 7 Dec 2011 21:18:25 +0100 Subject: Fix reflective toolbox producing invalid bytecode Wrapper method for AST undergoing a reflective compilation has been incorrectly marked as static. This was off the radars until one day the code being compiled declared a top-level method. During flatten that method got hoisted into the wrapper module, and its invocation got translated into an instance call upon the module. This led to static wrapper method trying to call an instance method, and that blew up the bytecode verifier. More info: https://issues.scala-lang.org/browse/SI-5266. Fixes SI-5266, review by @odersky. --- src/compiler/scala/reflect/runtime/ToolBoxes.scala | 7 ++++--- test/files/run/t5239.check | 2 +- test/files/run/t5266_1.check | 2 ++ test/files/run/t5266_1.scala | 16 +++++++++++++++ test/files/run/t5266_2.check | 2 ++ test/files/run/t5266_2.scala | 17 ++++++++++++++++ test/pending/run/t5266_1.check | 2 -- test/pending/run/t5266_1.scala | 23 ---------------------- test/pending/run/t5266_2.check | 2 -- test/pending/run/t5266_2.scala | 17 ---------------- 10 files changed, 42 insertions(+), 48 deletions(-) create mode 100644 test/files/run/t5266_1.check create mode 100644 test/files/run/t5266_1.scala create mode 100644 test/files/run/t5266_2.check create mode 100644 test/files/run/t5266_2.scala delete mode 100644 test/pending/run/t5266_1.check delete mode 100644 test/pending/run/t5266_1.scala delete mode 100644 test/pending/run/t5266_2.check delete mode 100644 test/pending/run/t5266_2.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index e617239398..1114c908e6 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -42,11 +42,10 @@ trait ToolBoxes extends { self: Universe => def wrapInObject(expr: Tree, fvs: List[Symbol]): ModuleDef = { val obj = EmptyPackageClass.newModule(NoPosition, nextWrapperModuleName()) - val minfo = ClassInfoType(List(ObjectClass.tpe), new Scope, obj.moduleClass) + val minfo = ClassInfoType(List(ObjectClass.tpe, ScalaObjectClass.tpe), new Scope, obj.moduleClass) obj.moduleClass setInfo minfo obj setInfo obj.moduleClass.tpe val meth = obj.moduleClass.newMethod(NoPosition, wrapperMethodName) - meth setFlag Flags.STATIC def makeParam(fv: Symbol) = meth.newValueParameter(NoPosition, fv.name) setInfo fv.tpe meth setInfo MethodType(fvs map makeParam, expr.tpe) minfo.decls enter meth @@ -92,7 +91,9 @@ trait ToolBoxes extends { self: Universe => if (settings.debug.value) println("generated: "+className) val jclazz = jClass.forName(moduleFileName(className), true, classLoader) val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get - val result = jmeth.invoke(null, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*) + val jfield = jclazz.getDeclaredFields.find(_.getName == NameTransformer.MODULE_INSTANCE_NAME).get + val singleton = jfield.get(null) + val result = jmeth.invoke(singleton, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*) if (etpe.typeSymbol != FunctionClass(0)) result else { val applyMeth = result.getClass.getMethod("apply") diff --git a/test/files/run/t5239.check b/test/files/run/t5239.check index db5778f95b..40fe6a76e7 100644 --- a/test/files/run/t5239.check +++ b/test/files/run/t5239.check @@ -6,7 +6,7 @@ package { __wrapper$1.super.this(); () }; - def wrapper(): Int = 2 + def wrapper(): Int = 2 } } diff --git a/test/files/run/t5266_1.check b/test/files/run/t5266_1.check new file mode 100644 index 0000000000..3feac16a0b --- /dev/null +++ b/test/files/run/t5266_1.check @@ -0,0 +1,2 @@ +2 +evaluated = null \ No newline at end of file diff --git a/test/files/run/t5266_1.scala b/test/files/run/t5266_1.scala new file mode 100644 index 0000000000..18e288e685 --- /dev/null +++ b/test/files/run/t5266_1.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + def x = 2 + println(x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + val evaluated = toolbox.runExpr(ttree) + println("evaluated = " + evaluated) +} \ No newline at end of file diff --git a/test/files/run/t5266_2.check b/test/files/run/t5266_2.check new file mode 100644 index 0000000000..3feac16a0b --- /dev/null +++ b/test/files/run/t5266_2.check @@ -0,0 +1,2 @@ +2 +evaluated = null \ No newline at end of file diff --git a/test/files/run/t5266_2.scala b/test/files/run/t5266_2.scala new file mode 100644 index 0000000000..eb319583f8 --- /dev/null +++ b/test/files/run/t5266_2.scala @@ -0,0 +1,17 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + def x = 2 + def y = x + println(y) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + val evaluated = toolbox.runExpr(ttree) + println("evaluated = " + evaluated) +} diff --git a/test/pending/run/t5266_1.check b/test/pending/run/t5266_1.check deleted file mode 100644 index 3feac16a0b..0000000000 --- a/test/pending/run/t5266_1.check +++ /dev/null @@ -1,2 +0,0 @@ -2 -evaluated = null \ No newline at end of file diff --git a/test/pending/run/t5266_1.scala b/test/pending/run/t5266_1.scala deleted file mode 100644 index 06a81a04ea..0000000000 --- a/test/pending/run/t5266_1.scala +++ /dev/null @@ -1,23 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - def x = 2 - println(x) - }; - - val settings = new Settings - settings.debug.value = true - settings.Xshowtrees.value = true - settings.Xprint.value = List("typer") - settings.printtypes.value = true - settings.Ytyperdebug.value = true - - val reporter = new ConsoleReporter(settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - val evaluated = toolbox.runExpr(ttree) - println("evaluated = " + evaluated) -} \ No newline at end of file diff --git a/test/pending/run/t5266_2.check b/test/pending/run/t5266_2.check deleted file mode 100644 index 3feac16a0b..0000000000 --- a/test/pending/run/t5266_2.check +++ /dev/null @@ -1,2 +0,0 @@ -2 -evaluated = null \ No newline at end of file diff --git a/test/pending/run/t5266_2.scala b/test/pending/run/t5266_2.scala deleted file mode 100644 index cd841da021..0000000000 --- a/test/pending/run/t5266_2.scala +++ /dev/null @@ -1,17 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - def x = 2 - def y = x - println(y) - }; - - val reporter = new ConsoleReporter(settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - val evaluated = toolbox.runExpr(ttree) - println("evaluated = " + evaluated) -} -- cgit v1.2.3 From 04d13e6071b5daa0106d80c146048a148d7fad13 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 9 Dec 2011 11:52:56 -0800 Subject: Disabled another presentation compiler test. It foiled me right on the cusp of a successful windows nightly. https://scala-webapps.epfl.ch/jenkins/job/scala-nightly-windows/1170/consoleText --- test/disabled/properties.check | 158 +++++++++++++++++++++ test/disabled/properties/Runner.scala | 3 + test/disabled/properties/src/properties.scala | 54 +++++++ test/files/presentation/properties.check | 158 --------------------- test/files/presentation/properties/Runner.scala | 3 - .../presentation/properties/src/properties.scala | 54 ------- 6 files changed, 215 insertions(+), 215 deletions(-) create mode 100644 test/disabled/properties.check create mode 100644 test/disabled/properties/Runner.scala create mode 100644 test/disabled/properties/src/properties.scala delete mode 100644 test/files/presentation/properties.check delete mode 100644 test/files/presentation/properties/Runner.scala delete mode 100644 test/files/presentation/properties/src/properties.scala (limited to 'test/files') diff --git a/test/disabled/properties.check b/test/disabled/properties.check new file mode 100644 index 0000000000..a721d49e3a --- /dev/null +++ b/test/disabled/properties.check @@ -0,0 +1,158 @@ +reload: properties.scala + +askTypeCompletion at properties.scala(29,33) +================================================================================ +[response] aksTypeCompletion at (29,33) +retrieved 50 members +`method !=(x$1: Any)Boolean` +`method !=(x$1: AnyRef)Boolean` +`method ##()Int` +`method +(other: String)String` +`method ->[B](y: B)(properties.Property[String], B)` +`method ==(x$1: Any)Boolean` +`method ==(x$1: AnyRef)Boolean` +`method apply()String` +`method asInstanceOf[T0]=> T0` +`method canEqual(that: Any)Boolean` +`method clone()Object` +`method ensuring(cond: Boolean)properties.Property[String]` +`method ensuring(cond: Boolean, msg: => Any)properties.Property[String]` +`method ensuring(cond: properties.Property[String] => Boolean)properties.Property[String]` +`method ensuring(cond: properties.Property[String] => Boolean, msg: => Any)properties.Property[String]` +`method eq(x$1: AnyRef)Boolean` +`method equals(x$1: Any)Boolean` +`method finalize()Unit` +`method formatted(fmtstr: String)String` +`method get(newGetter: String => String)properties.Property[String]` +`method hashCode()Int` +`method isInstanceOf[T0]=> Boolean` +`method ne(x$1: AnyRef)Boolean` +`method notify()Unit` +`method notifyAll()Unit` +`method productArity=> Int` +`method productElement(n: Int)Any` +`method productIterator=> Iterator[Any]` +`method productPrefix=> String` +`method set(newSetter: String => String)properties.Property[String]` +`method synchronized[T0](x$1: T0)T0` +`method toString()String` +`method update(newValue: String)Unit` +`method wait()Unit` +`method wait(x$1: Long)Unit` +`method wait(x$1: Long, x$2: Int)Unit` +`method x=> properties.Property[String]` +`method →[B](y: B)(properties.Property[String], B)` +`value __leftOfArrowproperties.Property[String]` +`value __resultOfEnsuringproperties.Property[String]` +`value initString` +`value selfAny` +`variable getterString => String` +`variable setterString => String` +`variable valueString` +================================================================================ + +askTypeCompletion at properties.scala(29,67) +================================================================================ +[response] aksTypeCompletion at (29,67) +retrieved 50 members +`method !=(x$1: Any)Boolean` +`method !=(x$1: AnyRef)Boolean` +`method ##()Int` +`method +(other: String)String` +`method ->[B](y: B)(properties.Property[String], B)` +`method ==(x$1: Any)Boolean` +`method ==(x$1: AnyRef)Boolean` +`method apply()String` +`method asInstanceOf[T0]=> T0` +`method canEqual(that: Any)Boolean` +`method clone()Object` +`method ensuring(cond: Boolean)properties.Property[String]` +`method ensuring(cond: Boolean, msg: => Any)properties.Property[String]` +`method ensuring(cond: properties.Property[String] => Boolean)properties.Property[String]` +`method ensuring(cond: properties.Property[String] => Boolean, msg: => Any)properties.Property[String]` +`method eq(x$1: AnyRef)Boolean` +`method equals(x$1: Any)Boolean` +`method finalize()Unit` +`method formatted(fmtstr: String)String` +`method get(newGetter: String => String)properties.Property[String]` +`method hashCode()Int` +`method isInstanceOf[T0]=> Boolean` +`method ne(x$1: AnyRef)Boolean` +`method notify()Unit` +`method notifyAll()Unit` +`method productArity=> Int` +`method productElement(n: Int)Any` +`method productIterator=> Iterator[Any]` +`method productPrefix=> String` +`method set(newSetter: String => String)properties.Property[String]` +`method synchronized[T0](x$1: T0)T0` +`method toString()String` +`method update(newValue: String)Unit` +`method wait()Unit` +`method wait(x$1: Long)Unit` +`method wait(x$1: Long, x$2: Int)Unit` +`method x=> properties.Property[String]` +`method →[B](y: B)(properties.Property[String], B)` +`value __leftOfArrowproperties.Property[String]` +`value __resultOfEnsuringproperties.Property[String]` +`value initString` +`value selfAny` +`variable getterString => String` +`variable setterString => String` +`variable valueString` +================================================================================ + +askTypeCompletion at properties.scala(45,10) +================================================================================ +[response] aksTypeCompletion at (45,10) +retrieved 38 members +`method !=(x$1: Any)Boolean` +`method !=(x$1: AnyRef)Boolean` +`method ##()Int` +`method +(other: String)String` +`method ->[B](y: B)(properties.User, B)` +`method ==(x$1: Any)Boolean` +`method ==(x$1: AnyRef)Boolean` +`method asInstanceOf[T0]=> T0` +`method clone()Object` +`method ensuring(cond: Boolean)properties.User` +`method ensuring(cond: Boolean, msg: => Any)properties.User` +`method ensuring(cond: properties.User => Boolean)properties.User` +`method ensuring(cond: properties.User => Boolean, msg: => Any)properties.User` +`method eq(x$1: AnyRef)Boolean` +`method equals(x$1: Any)Boolean` +`method finalize()Unit` +`method formatted(fmtstr: String)String` +`method hashCode()Int` +`method isInstanceOf[T0]=> Boolean` +`method ne(x$1: AnyRef)Boolean` +`method notify()Unit` +`method notifyAll()Unit` +`method synchronized[T0](x$1: T0)T0` +`method toString()String` +`method wait()Unit` +`method wait(x$1: Long)Unit` +`method wait(x$1: Long, x$2: Int)Unit` +`method x=> properties.User` +`method →[B](y: B)(properties.User, B)` +`value __leftOfArrowproperties.User` +`value __resultOfEnsuringproperties.User` +`value firstnameproperties.Property[String]` +`value lastnameproperties.Property[String]` +`value selfAny` +================================================================================ + +askType at properties.scala(18,28) +================================================================================ +[response] askTypeAt at (18,28) +def update(newValue: T): Unit = Property.this.value_=(Property.this.setter.apply(newValue)) +================================================================================ + +askType at properties.scala(21,31) +================================================================================ +[response] askTypeAt at (21,31) +def get(newGetter: T => T): properties.Property[T] = { + Property.this.getter_=(newGetter); + this +} +================================================================================ diff --git a/test/disabled/properties/Runner.scala b/test/disabled/properties/Runner.scala new file mode 100644 index 0000000000..1ef3cf9025 --- /dev/null +++ b/test/disabled/properties/Runner.scala @@ -0,0 +1,3 @@ +import scala.tools.nsc.interactive.tests._ + +object Test extends InteractiveTest \ No newline at end of file diff --git a/test/disabled/properties/src/properties.scala b/test/disabled/properties/src/properties.scala new file mode 100644 index 0000000000..35b6a92221 --- /dev/null +++ b/test/disabled/properties/src/properties.scala @@ -0,0 +1,54 @@ +/** Illustrate the use of custom 'apply/update' methods. */ +object properties { + + /** A mutable property whose getter and setter may be customized. */ + case class Property[T](init: T) { + private var value: T = init + + /** The getter function, defaults to identity. */ + private var setter: T => T = identity[T] + + /** The setter function, defaults to identity. */ + private var getter: T => T = identity[T] + + /** Retrive the value held in this property. */ + def apply(): T = getter(value) + + /** Update the value held in this property, through the setter. */ + def update(newValue: T) /*?*/ = value = setter(newValue) + + /** Change the getter. */ + def get(newGetter: T => T) /*?*/ = { getter = newGetter; this } + + /** Change the setter */ + def set(newSetter: T => T) = { setter = newSetter; this } + } + + class User { + // Create a property with custom getter and setter + val firstname = Property("")./*!*/get { v => v.toUpperCase() }./*!*/set { v => "Mr. " + v } + val lastname = Property("") + + /** Scala provides syntactic sugar for calling 'apply'. Simply + * adding a list of arguments between parenthesis (in this case, + * an empty list) is translated to a call to 'apply' with those + * arguments. + */ + override def toString() = firstname() + " " + lastname() + } + + def main(args: Array[String]) { + val user1 = new User + + // Syntactic sugar for 'update': an assignment is translated to a + // call to method 'update' + user1./*!*/firstname() = "Robert" + + val user2 = new User + user2.firstname() = "bob" + user2.lastname() = "KUZ" + + println("user1: " + user1) + println("user2: " + user2) + } +} \ No newline at end of file diff --git a/test/files/presentation/properties.check b/test/files/presentation/properties.check deleted file mode 100644 index a721d49e3a..0000000000 --- a/test/files/presentation/properties.check +++ /dev/null @@ -1,158 +0,0 @@ -reload: properties.scala - -askTypeCompletion at properties.scala(29,33) -================================================================================ -[response] aksTypeCompletion at (29,33) -retrieved 50 members -`method !=(x$1: Any)Boolean` -`method !=(x$1: AnyRef)Boolean` -`method ##()Int` -`method +(other: String)String` -`method ->[B](y: B)(properties.Property[String], B)` -`method ==(x$1: Any)Boolean` -`method ==(x$1: AnyRef)Boolean` -`method apply()String` -`method asInstanceOf[T0]=> T0` -`method canEqual(that: Any)Boolean` -`method clone()Object` -`method ensuring(cond: Boolean)properties.Property[String]` -`method ensuring(cond: Boolean, msg: => Any)properties.Property[String]` -`method ensuring(cond: properties.Property[String] => Boolean)properties.Property[String]` -`method ensuring(cond: properties.Property[String] => Boolean, msg: => Any)properties.Property[String]` -`method eq(x$1: AnyRef)Boolean` -`method equals(x$1: Any)Boolean` -`method finalize()Unit` -`method formatted(fmtstr: String)String` -`method get(newGetter: String => String)properties.Property[String]` -`method hashCode()Int` -`method isInstanceOf[T0]=> Boolean` -`method ne(x$1: AnyRef)Boolean` -`method notify()Unit` -`method notifyAll()Unit` -`method productArity=> Int` -`method productElement(n: Int)Any` -`method productIterator=> Iterator[Any]` -`method productPrefix=> String` -`method set(newSetter: String => String)properties.Property[String]` -`method synchronized[T0](x$1: T0)T0` -`method toString()String` -`method update(newValue: String)Unit` -`method wait()Unit` -`method wait(x$1: Long)Unit` -`method wait(x$1: Long, x$2: Int)Unit` -`method x=> properties.Property[String]` -`method →[B](y: B)(properties.Property[String], B)` -`value __leftOfArrowproperties.Property[String]` -`value __resultOfEnsuringproperties.Property[String]` -`value initString` -`value selfAny` -`variable getterString => String` -`variable setterString => String` -`variable valueString` -================================================================================ - -askTypeCompletion at properties.scala(29,67) -================================================================================ -[response] aksTypeCompletion at (29,67) -retrieved 50 members -`method !=(x$1: Any)Boolean` -`method !=(x$1: AnyRef)Boolean` -`method ##()Int` -`method +(other: String)String` -`method ->[B](y: B)(properties.Property[String], B)` -`method ==(x$1: Any)Boolean` -`method ==(x$1: AnyRef)Boolean` -`method apply()String` -`method asInstanceOf[T0]=> T0` -`method canEqual(that: Any)Boolean` -`method clone()Object` -`method ensuring(cond: Boolean)properties.Property[String]` -`method ensuring(cond: Boolean, msg: => Any)properties.Property[String]` -`method ensuring(cond: properties.Property[String] => Boolean)properties.Property[String]` -`method ensuring(cond: properties.Property[String] => Boolean, msg: => Any)properties.Property[String]` -`method eq(x$1: AnyRef)Boolean` -`method equals(x$1: Any)Boolean` -`method finalize()Unit` -`method formatted(fmtstr: String)String` -`method get(newGetter: String => String)properties.Property[String]` -`method hashCode()Int` -`method isInstanceOf[T0]=> Boolean` -`method ne(x$1: AnyRef)Boolean` -`method notify()Unit` -`method notifyAll()Unit` -`method productArity=> Int` -`method productElement(n: Int)Any` -`method productIterator=> Iterator[Any]` -`method productPrefix=> String` -`method set(newSetter: String => String)properties.Property[String]` -`method synchronized[T0](x$1: T0)T0` -`method toString()String` -`method update(newValue: String)Unit` -`method wait()Unit` -`method wait(x$1: Long)Unit` -`method wait(x$1: Long, x$2: Int)Unit` -`method x=> properties.Property[String]` -`method →[B](y: B)(properties.Property[String], B)` -`value __leftOfArrowproperties.Property[String]` -`value __resultOfEnsuringproperties.Property[String]` -`value initString` -`value selfAny` -`variable getterString => String` -`variable setterString => String` -`variable valueString` -================================================================================ - -askTypeCompletion at properties.scala(45,10) -================================================================================ -[response] aksTypeCompletion at (45,10) -retrieved 38 members -`method !=(x$1: Any)Boolean` -`method !=(x$1: AnyRef)Boolean` -`method ##()Int` -`method +(other: String)String` -`method ->[B](y: B)(properties.User, B)` -`method ==(x$1: Any)Boolean` -`method ==(x$1: AnyRef)Boolean` -`method asInstanceOf[T0]=> T0` -`method clone()Object` -`method ensuring(cond: Boolean)properties.User` -`method ensuring(cond: Boolean, msg: => Any)properties.User` -`method ensuring(cond: properties.User => Boolean)properties.User` -`method ensuring(cond: properties.User => Boolean, msg: => Any)properties.User` -`method eq(x$1: AnyRef)Boolean` -`method equals(x$1: Any)Boolean` -`method finalize()Unit` -`method formatted(fmtstr: String)String` -`method hashCode()Int` -`method isInstanceOf[T0]=> Boolean` -`method ne(x$1: AnyRef)Boolean` -`method notify()Unit` -`method notifyAll()Unit` -`method synchronized[T0](x$1: T0)T0` -`method toString()String` -`method wait()Unit` -`method wait(x$1: Long)Unit` -`method wait(x$1: Long, x$2: Int)Unit` -`method x=> properties.User` -`method →[B](y: B)(properties.User, B)` -`value __leftOfArrowproperties.User` -`value __resultOfEnsuringproperties.User` -`value firstnameproperties.Property[String]` -`value lastnameproperties.Property[String]` -`value selfAny` -================================================================================ - -askType at properties.scala(18,28) -================================================================================ -[response] askTypeAt at (18,28) -def update(newValue: T): Unit = Property.this.value_=(Property.this.setter.apply(newValue)) -================================================================================ - -askType at properties.scala(21,31) -================================================================================ -[response] askTypeAt at (21,31) -def get(newGetter: T => T): properties.Property[T] = { - Property.this.getter_=(newGetter); - this -} -================================================================================ diff --git a/test/files/presentation/properties/Runner.scala b/test/files/presentation/properties/Runner.scala deleted file mode 100644 index 1ef3cf9025..0000000000 --- a/test/files/presentation/properties/Runner.scala +++ /dev/null @@ -1,3 +0,0 @@ -import scala.tools.nsc.interactive.tests._ - -object Test extends InteractiveTest \ No newline at end of file diff --git a/test/files/presentation/properties/src/properties.scala b/test/files/presentation/properties/src/properties.scala deleted file mode 100644 index 35b6a92221..0000000000 --- a/test/files/presentation/properties/src/properties.scala +++ /dev/null @@ -1,54 +0,0 @@ -/** Illustrate the use of custom 'apply/update' methods. */ -object properties { - - /** A mutable property whose getter and setter may be customized. */ - case class Property[T](init: T) { - private var value: T = init - - /** The getter function, defaults to identity. */ - private var setter: T => T = identity[T] - - /** The setter function, defaults to identity. */ - private var getter: T => T = identity[T] - - /** Retrive the value held in this property. */ - def apply(): T = getter(value) - - /** Update the value held in this property, through the setter. */ - def update(newValue: T) /*?*/ = value = setter(newValue) - - /** Change the getter. */ - def get(newGetter: T => T) /*?*/ = { getter = newGetter; this } - - /** Change the setter */ - def set(newSetter: T => T) = { setter = newSetter; this } - } - - class User { - // Create a property with custom getter and setter - val firstname = Property("")./*!*/get { v => v.toUpperCase() }./*!*/set { v => "Mr. " + v } - val lastname = Property("") - - /** Scala provides syntactic sugar for calling 'apply'. Simply - * adding a list of arguments between parenthesis (in this case, - * an empty list) is translated to a call to 'apply' with those - * arguments. - */ - override def toString() = firstname() + " " + lastname() - } - - def main(args: Array[String]) { - val user1 = new User - - // Syntactic sugar for 'update': an assignment is translated to a - // call to method 'update' - user1./*!*/firstname() = "Robert" - - val user2 = new User - user2.firstname() = "bob" - user2.lastname() = "KUZ" - - println("user1: " + user1) - println("user2: " + user2) - } -} \ No newline at end of file -- cgit v1.2.3 From d718a7c7f31afee174958f63d23ede87394a7a4a Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Fri, 9 Dec 2011 22:33:01 +0100 Subject: Batch files no longer swallow exit codes Usually scripts like scala.bat and scalac.bat correctly propagate exit codes from underlying Java invocations. However, if you run these scripts as follows: "cmd /c scala ...", then errorlevel gets swallowed. This simple patch fixes the aforementioned problem. Fixes SI-5295, no review. --- src/compiler/scala/tools/ant/templates/tool-windows.tmpl | 1 + test/files/jvm/mkLibNatives.bat | 2 +- test/partest.bat | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) (limited to 'test/files') diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl index c59d46683e..9f1fbc4524 100644 --- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl @@ -86,3 +86,4 @@ goto :eof :end @@endlocal +exit /b %errorlevel% diff --git a/test/files/jvm/mkLibNatives.bat b/test/files/jvm/mkLibNatives.bat index e11b6ee21c..2f99f7aab5 100755 --- a/test/files/jvm/mkLibNatives.bat +++ b/test/files/jvm/mkLibNatives.bat @@ -67,4 +67,4 @@ goto end :end if "%OS%"=="Windows_NT" @endlocal - +exit /b %errorlevel% diff --git a/test/partest.bat b/test/partest.bat index 0b3f5fbf33..4c97a53122 100755 --- a/test/partest.bat +++ b/test/partest.bat @@ -101,3 +101,4 @@ goto end :end if "%OS%"=="Windows_NT" @endlocal +exit /b %errorlevel% -- cgit v1.2.3 From 5aebaac08a0debfdc366330937e3a8ecf6892f78 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 12 Dec 2011 23:17:23 -0800 Subject: Test case closes SI-4273. --- test/files/pos/t4273.scala | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 test/files/pos/t4273.scala (limited to 'test/files') diff --git a/test/files/pos/t4273.scala b/test/files/pos/t4273.scala new file mode 100644 index 0000000000..9a942e8325 --- /dev/null +++ b/test/files/pos/t4273.scala @@ -0,0 +1,8 @@ +class A { + implicit def compareComparables[T](x: T)(implicit ord: Ordering[T]) = new ord.Ops(x) + + class Bippy + implicit val bippyOrdering = new Ordering[Bippy] { def compare(x: Bippy, y: Bippy) = util.Random.nextInt } + + (new Bippy) < (new Bippy) +} \ No newline at end of file -- cgit v1.2.3 From 177baffa6133a5f3e1308f6e3f1306cfa4804ce0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 12 Dec 2011 23:21:24 -0800 Subject: Test case closes SI-4063. --- test/files/pos/t4063.scala | 39 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 test/files/pos/t4063.scala (limited to 'test/files') diff --git a/test/files/pos/t4063.scala b/test/files/pos/t4063.scala new file mode 100644 index 0000000000..5e19c42edc --- /dev/null +++ b/test/files/pos/t4063.scala @@ -0,0 +1,39 @@ +trait Parallel +trait Parallelizable[+ParRepr <: Parallel] + +trait PIterableLike[+T, +Repr <: Parallel] extends Parallel with Parallelizable[PIterableLike[T, Repr]] + +trait PMap[K, V] extends PIterableLike[(K, V), PMap[K, V]] +trait PSet[T] extends PIterableLike[T, PSet[T]] + +trait CIterableLike[+T, +Repr] + +trait CSet[T] extends CIterableLike[T, CSet[T]] with Parallelizable[PSet[T]] + +trait CMap[K, V] extends CIterableLike[(K, V), CMap[K, V]] with Parallelizable[PMap[K, V]] + +object Test { + var x = 0 + + def main() { + val map: CMap[Int, CSet[Int]] = new CMap[Int, CSet[Int]] {} + val set: CSet[Int] = new CSet[Int] {} + + // should infer type argument + //map.synchronized[CIterableLike[Any, Any] with Parallelizable[PIterableLike[Any, Parallel with Parallelizable[Parallel]]]] { + // or: + //map.synchronized[CIterableLike[Any, Any] with Parallelizable[PIterableLike[Any, Parallel]]] { + // or, maybe it could also infer existential types: + //map.synchronized[CIterableLike[Any, _] with Parallelizable[PIterableLike[Any, _]]] { + + map.synchronized { + if (x == 0) { + map + } else { + set + } + } + + } +} + -- cgit v1.2.3 From 6912ff828db28a4277ab78fea8266f2904bc2a6b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 12 Dec 2011 23:59:05 -0800 Subject: Fix for seq/array varargs crasher. Closes SI-4024. --- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 4 +++- test/files/run/t4024.scala | 11 +++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 91ac00d946..f319abd060 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -374,7 +374,9 @@ abstract class UnCurry extends InfoTransform assert(toArraySym != NoSymbol) def getManifest(tp: Type): Tree = { val manifestOpt = localTyper.findManifest(tp, false) - if (!manifestOpt.tree.isEmpty) manifestOpt.tree + // Don't want bottom types getting any further than this (SI-4024) + if (tp.typeSymbol.isBottomClass) getManifest(AnyClass.tpe) + else if (!manifestOpt.tree.isEmpty) manifestOpt.tree else if (tp.bounds.hi ne tp) getManifest(tp.bounds.hi) else localTyper.getManifestTree(tree.pos, tp, false) } diff --git a/test/files/run/t4024.scala b/test/files/run/t4024.scala index ef768beb99..7c62a3fc6e 100644 --- a/test/files/run/t4024.scala +++ b/test/files/run/t4024.scala @@ -5,5 +5,16 @@ object Test extends App { val m = x.getClass.getMethod("toString") assert(m.invoke(x, (Nil: List[AnyRef]): _*) == "abc") + + Test2.main(Array()) } + +object Test2 { + def main(args: Array[String]): Unit = { + val x = "abc" + val m = x.getClass.getMethod("toString") + m.invoke(x, Nil: _*) + m.invoke(x, Seq(): _*) + } +} -- cgit v1.2.3 From bf2643764614f03eb7eb820a5f0c08f6ec799254 Mon Sep 17 00:00:00 2001 From: "Daniel C. Sobral" Date: Thu, 15 Dec 2011 18:14:03 -0200 Subject: Improve quality of scalacheck range tests input and output. Remove some dead code, activate ByOne generator again, add generators for inclusive ranges, add generators that concentrate on the boundaries, and add some print statements next to exceptions that might get eaten by out of memory errors. --- test/files/scalacheck/range.scala | 81 ++++++++++++++++++++++++++------------- 1 file changed, 55 insertions(+), 26 deletions(-) (limited to 'test/files') diff --git a/test/files/scalacheck/range.scala b/test/files/scalacheck/range.scala index 56295f204c..72979115be 100644 --- a/test/files/scalacheck/range.scala +++ b/test/files/scalacheck/range.scala @@ -12,10 +12,16 @@ class Counter(r: Range) { if (cnt % 500000000L == 0L) { println("Working: %s %d %d" format (str, cnt, x)) } - if (cnt > (Int.MaxValue.toLong + 1) * 2) - error("Count exceeds maximum possible for an Int Range") - if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x))) - error("Range wrapped: %d %s" format (x, last.toString)) + if (cnt > (Int.MaxValue.toLong + 1) * 2) { + val msg = "Count exceeds maximum possible for an Int Range: %s" format str + println(msg) // exception is likely to be eaten by an out of memory error + sys error msg + } + if ((r.step > 0 && last.exists(_ > x)) || (r.step < 0 && last.exists(_ < x))) { + val msg = "Range %s wrapped: %d %s" format (str, x, last.toString) + println(msg) // exception is likely to be eaten by an out of memory error + sys error msg + } last = Some(x) } } @@ -23,29 +29,40 @@ class Counter(r: Range) { abstract class RangeTest(kind: String) extends Properties("Range "+kind) { def myGen: Gen[Range] - val genRange = for { - start <- arbitrary[Int] - end <- arbitrary[Int] - step <- Gen.choose(1, (start - end).abs + 1) - } yield if (start < end) Range(start, end, step) else Range(start, end, -step) - - val genReasonableSizeRange = for { - start <- choose(-Int.MinValue, Int.MaxValue) - end <- choose(-Int.MinValue, Int.MaxValue) + def genReasonableSizeRange = oneOf(genArbitraryRange, genBoundaryRange) + + def genArbitraryRange = for { + start <- choose(Int.MinValue, Int.MaxValue) + end <- choose(Int.MinValue, Int.MaxValue) step <- choose(-Int.MaxValue, Int.MaxValue) } yield Range(start, end, if (step == 0) 100 else step) - val genSmallRange = for { + def genBoundaryRange = for { + boundary <- oneOf(Int.MinValue, -1, 0, 1, Int.MaxValue) + isStart <- arbitrary[Boolean] + size <- choose(1, 100) + step <- choose(1, 101) + } yield { + val signum = if (boundary == 0) 1 else boundary.signum + if (isStart) Range(boundary, boundary - size * boundary.signum, - step * signum) + else Range(boundary - size * boundary.signum, boundary, step * signum) + } + + + def genSmallRange = for { start <- choose(-100, 100) end <- choose(-100, 100) step <- choose(1, 1) } yield if (start < end) Range(start, end, step) else Range(start, end, -step) - val genRangeByOne = for { - start <- arbitrary[Int] - end <- arbitrary[Int] - if (end.toLong - start.toLong).abs <= 10000000L - } yield if (start < end) Range(start, end) else Range(end, start) + def genRangeByOne = oneOf(genRangeOpenByOne, genRangeClosedByOne) + + def genRangeOpenByOne = for { + r <- oneOf(genSmallRange, genBoundaryRange) + if (r.end.toLong - r.start.toLong).abs <= 10000000L + } yield if (r.start < r.end) Range(r.start, r.end) else Range(r.end, r.start) + + def genRangeClosedByOne = for (r <- genRangeOpenByOne) yield r.start to r.end def str(r: Range) = "Range["+r.start+", "+r.end+", "+r.step+(if (r.isInclusive) "]" else ")") @@ -71,7 +88,8 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { def multiple(r: Range, x: Int) = (x.toLong - r.start) % r.step == 0 - property("foreach.step") = forAll(myGen) { r => + property("foreach.step") = forAllNoShrink(myGen) { r => +// println("foreach.step "+str(r)) var allValid = true val cnt = new Counter(r) // println("--------------------") @@ -84,6 +102,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { } property("foreach.inside.range") = forAll(myGen) { r => +// println("foreach.inside.range "+str(r)) var allValid = true var last: Option[Int] = None val cnt = new Counter(r) @@ -94,6 +113,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { } property("foreach.visited.size") = forAll(myGen) { r => +// println("foreach.visited.size "+str(r)) var visited = 0L val cnt = new Counter(r) r foreach { x => cnt(x) @@ -108,14 +128,17 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { } property("length") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r => +// println("length "+str(r)) (r.length == expectedSize(r)) :| str(r) } property("isEmpty") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r => +// println("isEmpty "+str(r)) (r.isEmpty == (expectedSize(r) == 0L)) :| str(r) } property("contains") = forAll(myGen, arbInt.arbitrary) { (r, x) => +// println("contains "+str(r)) // println("----------------") // println(str(r)) // println(x) @@ -126,11 +149,13 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { } property("take") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) => +// println("take "+str(r)) val t = r take x (t.size == (0 max x min r.size) && t.start == r.start && t.step == r.step) :| str(r)+" / "+str(t)+": "+x } property("init") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r))) { r => +// println("init "+str(r)) (r.size == 0) || { val t = r.init (t.size + 1 == r.size) && (t.isEmpty || t.head == r.head) @@ -138,6 +163,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { } property("takeWhile") = forAll(myGen suchThat (r => expectedSize(r).toInt == expectedSize(r)), arbInt.arbitrary) { (r, x) => +// println("takeWhile "+str(r)) val t = (if (r.step > 0) r takeWhile (_ <= x) else r takeWhile(_ >= x)) if (r.size == 0) { (t.size == 0) :| str(r)+" / "+str(t)+": "+x @@ -148,6 +174,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { } property("reverse.toSet.equal") = forAll(myGen) { r => +// println("reverse.toSet.equal "+str(r)) val reversed = r.reverse val aresame = r.toSet == reversed.toSet if (!aresame) { @@ -157,7 +184,7 @@ abstract class RangeTest(kind: String) extends Properties("Range "+kind) { println(r.toSet) println(reversed.toSet) } - aresame + aresame :| str(r) } } @@ -178,11 +205,11 @@ object InclusiveRangeTest extends RangeTest("inclusive") { } object ByOneRangeTest extends RangeTest("byOne") { - override def myGen = genSmallRange + override def myGen = genRangeByOne } object InclusiveByOneRangeTest extends RangeTest("inclusiveByOne") { - override def myGen = for (r <- genSmallRange) yield r.inclusive + override def myGen = for (r <- genRangeByOne) yield r.inclusive } object SmallValuesRange extends RangeTest("smallValues") { @@ -207,9 +234,11 @@ object TooLargeRange extends Properties("Too Large Range") { object Test extends Properties("Range") { import org.scalacheck.{ Test => STest } - List(NormalRangeTest, InclusiveRangeTest, ByOneRangeTest, InclusiveByOneRangeTest, TooLargeRange) foreach { ps => - STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), ps) - } + include(NormalRangeTest) + include(InclusiveRangeTest) + include(ByOneRangeTest) + include(InclusiveByOneRangeTest) + include(TooLargeRange) } /* Mini-benchmark -- cgit v1.2.3 From 2f5f7c16870ae1fa97bbca1642659ab8c104b442 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 15 Dec 2011 14:19:45 -0800 Subject: Fixed scalacheck test to fail if it's failing. --- test/files/scalacheck/CheckEither.scala | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) (limited to 'test/files') diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala index a7e50877a7..0145d3321f 100644 --- a/test/files/scalacheck/CheckEither.scala +++ b/test/files/scalacheck/CheckEither.scala @@ -8,7 +8,7 @@ import org.scalacheck.Test.{Params, check} import org.scalacheck.ConsoleReporter.testStatsEx import Function.tupled -object CheckEither extends Properties("Either") { +object Test extends Properties("Either") { implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] = Arbitrary[Either[X, Y]](oneOf(arbitrary[X].map(Left(_)), arbitrary[Y].map(Right(_)))) @@ -186,9 +186,3 @@ object CheckEither extends Properties("Either") { STest.checkProperties(STest.Params(testCallback = ConsoleReporter(0)), this) } } - -object Test { - def main(args: Array[String]): Unit = { - CheckEither.runTests() - } -} -- cgit v1.2.3 From ab07db12cc09fd34cfab5abca9dd0f01df5f77a5 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Sun, 18 Dec 2011 12:05:12 -0500 Subject: unzip(3) on view now returns view. * Added unzip and unzip3 to TraversableViewLike * Added partest tests for unzip on views returning specific collection types. Closes SI-5053 Review by @paulp --- .../scala/collection/TraversableViewLike.scala | 6 ++++++ test/files/run/t5053.check | 6 ++++++ test/files/run/t5053.scala | 20 ++++++++++++++++++++ 3 files changed, 32 insertions(+) create mode 100644 test/files/run/t5053.check create mode 100644 test/files/run/t5053.scala (limited to 'test/files') diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala index 60870cc835..fbecad98fe 100644 --- a/src/library/scala/collection/TraversableViewLike.scala +++ b/src/library/scala/collection/TraversableViewLike.scala @@ -192,6 +192,12 @@ trait TraversableViewLike[+A, override def groupBy[K](f: A => K): immutable.Map[K, This] = thisSeq groupBy f mapValues (xs => newForced(xs)) + override def unzip[A1, A2](implicit asPair: A => (A1, A2)) = + (newMapped(x => asPair(x)._1), newMapped(x => asPair(x)._2)) // TODO - Performance improvements. + + override def unzip3[A1, A2, A3](implicit asTriple: A => (A1, A2, A3)) = + (newMapped(x => asTriple(x)._1), newMapped(x => asTriple(x)._2), newMapped(x => asTriple(x)._3)) // TODO - Performance improvements. + override def toString = viewToString } diff --git a/test/files/run/t5053.check b/test/files/run/t5053.check new file mode 100644 index 0000000000..5ec39bbdeb --- /dev/null +++ b/test/files/run/t5053.check @@ -0,0 +1,6 @@ +true +true +true +true +true +true diff --git a/test/files/run/t5053.scala b/test/files/run/t5053.scala new file mode 100644 index 0000000000..e46dad5ac6 --- /dev/null +++ b/test/files/run/t5053.scala @@ -0,0 +1,20 @@ +object Test extends App { + { + val (left, right) = Seq((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip + println(left.isInstanceOf[scala.collection.SeqViewLike[_,_,_]]) + val (l, m, r) = Seq((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3 + println(l.isInstanceOf[scala.collection.SeqViewLike[_,_,_]]) + } + { + val (left, right) = Iterable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip + println(left.isInstanceOf[scala.collection.IterableViewLike[_,_,_]]) + val (l, m, r) = Iterable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3 + println(l.isInstanceOf[scala.collection.IterableViewLike[_,_,_]]) + } + { + val (left, right) = Traversable((1, "a"), (1, "a"), (1, "a"), (3, "c")).view.unzip + println(left.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]]) + val (l, m, r) = Traversable((1, 1.0, "a"), (1, 1.0, "a"), (1, 1.0, "a"), (3, 3.0, "c")).view.unzip3 + println(l.isInstanceOf[scala.collection.TraversableViewLike[_,_,_]]) + } +} -- cgit v1.2.3 From 832e3179cb2d0e3dbf1ff63234ec0cbc36a2b2fe Mon Sep 17 00:00:00 2001 From: aleksandar Date: Mon, 19 Dec 2011 15:21:59 +0100 Subject: Fix #5293 - changed the way hashcode is improved in hash sets. The hash code is further improved by using a special value in the hash sets called a `seed`. For sequential hash tables, this value depends on the size of the hash table. It determines the number of bits the hashcode should be rotated. This ensures that hash tables with different sizes use different bits to compute the position of the element. This way traversing the elements of the source hash table will yield them in the order where they had similar hashcodes (and hence, positions) in the source table, but different ones in the destination table. Ideally, in the future we want to be able to have a family of hash functions and assign a different hash function from that family to each hash table instance. That would statistically almost completely eliminate the possibility that the hash table element traversal causes excessive collisions. I should probably @mention extempore here. --- .../scala/collection/mutable/FlatHashTable.scala | 99 ++++++++++++++++------ .../collection/parallel/mutable/ParHashSet.scala | 13 +-- test/files/jvm/serialization.check | 8 +- test/files/run/t5293.scala | 83 ++++++++++++++++++ 4 files changed, 168 insertions(+), 35 deletions(-) create mode 100644 test/files/run/t5293.scala (limited to 'test/files') diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index 0740d97e09..f3fb6738eb 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -24,7 +24,7 @@ package mutable trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { import FlatHashTable._ - private final val tableDebug = false + private final def tableDebug = false @transient private[collection] var _loadFactor = defaultLoadFactor @@ -43,11 +43,19 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { /** The array keeping track of number of elements in 32 element blocks. */ @transient protected var sizemap: Array[Int] = null - + + @transient var seedvalue: Int = tableSizeSeed + import HashTable.powerOfTwo + protected def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize) + private def initialCapacity = capacity(initialSize) - + + protected def randomSeed = seedGenerator.get.nextInt() + + protected def tableSizeSeed = Integer.bitCount(table.length - 1) + /** * Initializes the collection from the input stream. `f` will be called for each element * read from the input stream in the order determined by the stream. This is useful for @@ -57,23 +65,25 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { */ private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) { in.defaultReadObject - - _loadFactor = in.readInt + + _loadFactor = in.readInt() assert(_loadFactor > 0) - - val size = in.readInt + + val size = in.readInt() tableSize = 0 assert(size >= 0) - + table = new Array(capacity(sizeForThreshold(size, _loadFactor))) threshold = newThreshold(_loadFactor, table.size) - - val smDefined = in.readBoolean + + seedvalue = in.readInt() + + val smDefined = in.readBoolean() if (smDefined) sizeMapInit(table.length) else sizemap = null - + var index = 0 while (index < size) { - val elem = in.readObject.asInstanceOf[A] + val elem = in.readObject().asInstanceOf[A] f(elem) addEntry(elem) index += 1 @@ -89,6 +99,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { out.defaultWriteObject out.writeInt(_loadFactor) out.writeInt(tableSize) + out.writeInt(seedvalue) out.writeBoolean(isSizeMapDefined) iterator.foreach(out.writeObject) } @@ -125,6 +136,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { if (entry == elem) return false h = (h + 1) % table.length entry = table(h) + //Statistics.collisions += 1 } table(h) = elem.asInstanceOf[AnyRef] tableSize = tableSize + 1 @@ -185,6 +197,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { table = new Array[AnyRef](table.length * 2) tableSize = 0 nnSizeMapReset(table.length) + seedvalue = tableSizeSeed threshold = newThreshold(_loadFactor, table.length) var i = 0 while (i < oldtable.length) { @@ -280,10 +293,24 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { /* End of size map handling code */ protected final def index(hcode: Int) = { + // version 1 (no longer used - did not work with parallel hash tables) // improve(hcode) & (table.length - 1) - val improved = improve(hcode) + + // version 2 (allows for parallel hash table construction) + val improved = improve(hcode, seedvalue) val ones = table.length - 1 (improved >>> (32 - java.lang.Integer.bitCount(ones))) & ones + + // version 3 (solves SI-5293 in most cases, but such a case would still arise for parallel hash tables) + // val hc = improve(hcode) + // val bbp = blockbitpos + // val ones = table.length - 1 + // val needed = Integer.bitCount(ones) + // val blockbits = ((hc >>> bbp) & 0x1f) << (needed - 5) + // val rest = ((hc >>> (bbp + 5)) << bbp) | (((1 << bbp) - 1) & hc) + // val restmask = (1 << (needed - 5)) - 1 + // val improved = blockbits | (rest & restmask) + // improved } protected def clearTable() { @@ -298,6 +325,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { table, tableSize, threshold, + seedvalue, sizemap ) @@ -307,6 +335,7 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { table = c.table tableSize = c.tableSize threshold = c.threshold + seedvalue = c.seedvalue sizemap = c.sizemap } if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild @@ -315,21 +344,30 @@ trait FlatHashTable[A] extends FlatHashTable.HashUtils[A] { } - private[collection] object FlatHashTable { - + + /** Creates a specific seed to improve hashcode of a hash table instance + * and ensure that iteration order vulnerabilities are not 'felt' in other + * hash tables. + * + * See SI-5293. + */ + final def seedGenerator = new ThreadLocal[util.Random] { + override def initialValue = new util.Random + } + /** The load factor for the hash table; must be < 500 (0.5) */ - private[collection] def defaultLoadFactor: Int = 450 - private[collection] final def loadFactorDenum = 1000 + def defaultLoadFactor: Int = 450 + final def loadFactorDenum = 1000 /** The initial size of the hash table. */ - private[collection] def initialSize: Int = 16 + def initialSize: Int = 32 - private[collection] def sizeForThreshold(size: Int, _loadFactor: Int) = (size.toLong * loadFactorDenum / _loadFactor).toInt + def sizeForThreshold(size: Int, _loadFactor: Int) = math.max(32, (size.toLong * loadFactorDenum / _loadFactor).toInt) - private[collection] def newThreshold(_loadFactor: Int, size: Int) = { + def newThreshold(_loadFactor: Int, size: Int) = { val lf = _loadFactor assert(lf < (loadFactorDenum / 2), "loadFactor too large; must be < 0.5") (size.toLong * lf / loadFactorDenum ).toInt @@ -340,6 +378,7 @@ private[collection] object FlatHashTable { val table: Array[AnyRef], val tableSize: Int, val threshold: Int, + val seedvalue: Int, val sizemap: Array[Int] ) @@ -352,16 +391,24 @@ private[collection] object FlatHashTable { if (elem == null) throw new IllegalArgumentException("Flat hash tables cannot contain null elements.") else elem.hashCode() - protected final def improve(hcode: Int) = { - // var h: Int = hcode + ~(hcode << 9) - // h = h ^ (h >>> 14) - // h = h + (h << 4) - // h ^ (h >>> 10) + protected final def improve(hcode: Int, seed: Int) = { + //var h: Int = hcode + ~(hcode << 9) + //h = h ^ (h >>> 14) + //h = h + (h << 4) + //h ^ (h >>> 10) + var i = hcode * 0x9e3775cd i = java.lang.Integer.reverseBytes(i) - i * 0x9e3775cd + val improved = i * 0x9e3775cd + + // for the remainder, see SI-5293 + // to ensure that different bits are used for different hash tables, we have to rotate based on the seed + val rotation = seed % 32 + val rotated = (improved >>> rotation) | (improved << (32 - rotation)) + rotated } } } + diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index 9dbc7dc6c4..7763cdf318 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -119,10 +119,11 @@ with collection.mutable.FlatHashTable.HashUtils[T] { import collection.parallel.tasksupport._ private var mask = ParHashSetCombiner.discriminantmask private var nonmasklen = ParHashSetCombiner.nonmasklength - + private var seedvalue = 27 + def +=(elem: T) = { sz += 1 - val hc = improve(elemHashCode(elem)) + val hc = improve(elemHashCode(elem), seedvalue) val pos = hc >>> nonmasklen if (buckets(pos) eq null) { // initialize bucket @@ -140,7 +141,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] { private def parPopulate: FlatHashTable.Contents[T] = { // construct it in parallel - val table = new AddingFlatHashTable(size, tableLoadFactor) + val table = new AddingFlatHashTable(size, tableLoadFactor, seedvalue) val (inserted, leftovers) = executeAndWaitResult(new FillBlocks(buckets, table, 0, buckets.length)) var leftinserts = 0 for (elem <- leftovers) leftinserts += table.insertEntry(0, table.tableLength, elem.asInstanceOf[T]) @@ -153,6 +154,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] { // TODO parallelize by keeping separate size maps and merging them val tbl = new FlatHashTable[T] { sizeMapInit(table.length) + seedvalue = ParHashSetCombiner.this.seedvalue } for { buffer <- buckets; @@ -168,13 +170,13 @@ with collection.mutable.FlatHashTable.HashUtils[T] { * it has to take and allocates the underlying hash table in advance. * Elements can only be added to it. The final size has to be adjusted manually. * It is internal to `ParHashSet` combiners. - * */ - class AddingFlatHashTable(numelems: Int, lf: Int) extends FlatHashTable[T] { + class AddingFlatHashTable(numelems: Int, lf: Int, inseedvalue: Int) extends FlatHashTable[T] { _loadFactor = lf table = new Array[AnyRef](capacity(FlatHashTable.sizeForThreshold(numelems, _loadFactor))) tableSize = 0 threshold = FlatHashTable.newThreshold(_loadFactor, table.length) + seedvalue = inseedvalue sizeMapInit(table.length) override def toString = "AFHT(%s)".format(table.length) @@ -310,6 +312,7 @@ with collection.mutable.FlatHashTable.HashUtils[T] { } + private[parallel] object ParHashSetCombiner { private[mutable] val discriminantbits = 5 private[mutable] val numblocks = 1 << discriminantbits diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index 8704bcc643..15708f0c3b 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -160,8 +160,8 @@ x = Map(C -> 3, B -> 2, A -> 1) y = Map(C -> 3, A -> 1, B -> 2) x equals y: true, y equals x: true -x = Set(layers, title, buffers) -y = Set(layers, title, buffers) +x = Set(buffers, title, layers) +y = Set(buffers, title, layers) x equals y: true, y equals x: true x = History() @@ -279,8 +279,8 @@ x = ParHashMap(1 -> 2, 2 -> 4) y = ParHashMap(1 -> 2, 2 -> 4) x equals y: true, y equals x: true -x = ParHashSet(2, 1, 3) -y = ParHashSet(2, 1, 3) +x = ParHashSet(1, 2, 3) +y = ParHashSet(1, 2, 3) x equals y: true, y equals x: true x = ParRange(0, 1, 2, 3, 4) diff --git a/test/files/run/t5293.scala b/test/files/run/t5293.scala new file mode 100644 index 0000000000..de1efaec4a --- /dev/null +++ b/test/files/run/t5293.scala @@ -0,0 +1,83 @@ + + + +import scala.collection.JavaConverters._ + + + +object Test extends App { + + def bench(label: String)(body: => Unit): Long = { + val start = System.nanoTime + + 0.until(10).foreach(_ => body) + + val end = System.nanoTime + + //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0)) + + end - start + } + + def benchJava(values: java.util.Collection[Int]) = { + bench("Java Set") { + val set = new java.util.HashSet[Int] + + set.addAll(values) + } + } + + def benchScala(values: Iterable[Int]) = { + bench("Scala Set") { + val set = new scala.collection.mutable.HashSet[Int] + + set ++= values + } + } + + def benchScalaSorted(values: Iterable[Int]) = { + bench("Scala Set sorted") { + val set = new scala.collection.mutable.HashSet[Int] + + set ++= values.toArray.sorted + } + } + + def benchScalaPar(values: Iterable[Int]) = { + bench("Scala ParSet") { + val set = new scala.collection.parallel.mutable.ParHashSet[Int] map { x => x } + + set ++= values + } + } + + val values = 0 until 50000 + val set = scala.collection.mutable.HashSet.empty[Int] + + set ++= values + + // warmup + for (x <- 0 until 5) { + benchJava(set.asJava) + benchScala(set) + benchScalaPar(set) + benchJava(set.asJava) + benchScala(set) + benchScalaPar(set) + } + + val javaset = benchJava(set.asJava) + val scalaset = benchScala(set) + val scalaparset = benchScalaPar(set) + + assert(scalaset < (javaset * 4)) + assert(scalaparset < (javaset * 4)) +} + + + + + + + + -- cgit v1.2.3 From 6226468fdf81d46f005b97fb49c4ec08c856ea3f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 19 Dec 2011 21:20:52 -0800 Subject: Test case closes SI-5119. --- test/files/pos/t5119.scala | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 test/files/pos/t5119.scala (limited to 'test/files') diff --git a/test/files/pos/t5119.scala b/test/files/pos/t5119.scala new file mode 100644 index 0000000000..4a67244e50 --- /dev/null +++ b/test/files/pos/t5119.scala @@ -0,0 +1,13 @@ +import collection.mutable + +object Test { + class IMap0[K[_], V[_]](backing: Map[K[_], V[_]]) { + def mapSeparate[VL[_], VR[_]](f: V[_] => ({type l[T] = Either[VL[T], VR[T]]})#l[_] ) = { + backing.view.map { case (k,v) => f(v) match { + case Left(l) => Left((k, l)) + case Right(r) => Right((k, r)) + } + } + } + } +} -- cgit v1.2.3 From bba3b00cf737528de9dcb4823806d6928a00474e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 19 Dec 2011 22:37:58 -0800 Subject: Fix for classOf NPE. Let type parameter be inferred. Closes SI-4871. --- .../scala/reflect/internal/Definitions.scala | 10 +++++++ .../scala/tools/nsc/typechecker/Implicits.scala | 11 +------ .../scala/tools/nsc/typechecker/Typers.scala | 34 +++++++++++++++------- test/files/run/t4871.check | 2 ++ test/files/run/t4871.scala | 12 ++++++++ 5 files changed, 49 insertions(+), 20 deletions(-) create mode 100644 test/files/run/t4871.check create mode 100644 test/files/run/t4871.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index 4d71d2a769..6ee9347aab 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -232,6 +232,16 @@ trait Definitions extends reflect.api.StandardDefinitions { def Predef_identity = getMember(PredefModule, nme.identity) def Predef_conforms = getMember(PredefModule, nme.conforms) def Predef_wrapRefArray = getMember(PredefModule, nme.wrapRefArray) + + /** Is `sym` a member of Predef with the given name? + * Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def` + * which does a member lookup (it can't be a lazy val because we might reload Predef + * during resident compilations). + */ + def isPredefMemberNamed(sym: Symbol, name: Name) = ( + (sym.name == name) && (sym.owner == PredefModule.moduleClass) + ) + lazy val ConsoleModule: Symbol = getModule("scala.Console") lazy val ScalaRunTimeModule: Symbol = getModule("scala.runtime.ScalaRunTime") lazy val SymbolModule: Symbol = getModule("scala.Symbol") diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 92be241951..d54cb248cf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -740,7 +740,7 @@ trait Implicits { ) private def isIneligible(info: ImplicitInfo) = ( info.isCyclicOrErroneous - || isView && isConforms(info.sym) + || isView && isPredefMemberNamed(info.sym, nme.conforms) || isShadowed(info.name) ) @@ -760,15 +760,6 @@ trait Implicits { */ private def checkValid(sym: Symbol) = isValid(sym) || { invalidImplicits += sym ; false } - /** Is `sym` the standard conforms method in Predef? - * Note: DON't replace this by sym == Predef_conforms, as Predef_conforms is a `def` - * which does a member lookup (it can't be a lazy val because we might reload Predef - * during resident compilations). - */ - private def isConforms(sym: Symbol) = ( - (sym.name == nme.conforms) && (sym.owner == PredefModule.moduleClass) - ) - /** Preventing a divergent implicit from terminating implicit search, * so that if there is a best candidate it can still be selected. */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9b03d59216..341e1bc5ea 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2950,6 +2950,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { new DeSkolemizeMap mapOver tp } + def typedClassOf(tree: Tree, tpt: Tree) = { + checkClassType(tpt, true, false) + atPos(tree.pos)(gen.mkClassOf(tpt.tpe)) + } + protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = { for (wc <- tree.whereClauses) if (wc.symbol == NoSymbol) { namer.enterSym(wc); wc.symbol setFlag EXISTENTIAL } @@ -2989,10 +2994,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (sameLength(tparams, args)) { val targs = args map (_.tpe) checkBounds(tree.pos, NoPrefix, NoSymbol, tparams, targs, "") - if (fun.symbol == Predef_classOf) { - checkClassType(args.head, true, false) - atPos(tree.pos) { gen.mkClassOf(targs.head) } - } else { + if (fun.symbol == Predef_classOf) + typedClassOf(tree, args.head) + else { if (!isPastTyper && fun.symbol == Any_isInstanceOf && !targs.isEmpty) checkCheckable(tree.pos, targs.head, "") val resultpe = restpe.instantiateTypeParams(tparams, targs) @@ -3769,7 +3773,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { reallyExists(sym) && ((mode & PATTERNmode | FUNmode) != (PATTERNmode | FUNmode) || !sym.isSourceMethod || sym.hasFlag(ACCESSOR)) } - + if (defSym == NoSymbol) { var defEntry: ScopeEntry = null // the scope entry of defSym, if defined in a local scope @@ -3900,13 +3904,23 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } } } - if (defSym.owner.isPackageClass) pre = defSym.owner.thisType + if (defSym.owner.isPackageClass) + pre = defSym.owner.thisType + + // Inferring classOf type parameter from expected type. if (defSym.isThisSym) { typed1(This(defSym.owner) setPos tree.pos, mode, pt) - } else { - val tree1 = if (qual == EmptyTree) tree - else atPos(tree.pos)(Select(qual, name)) - // atPos necessary because qualifier might come from startContext + } + // Inferring classOf type parameter from expected type. Otherwise an + // actual call to the stubbed classOf method is generated, returning null. + else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) + typedClassOf(tree, TypeTree(pt.typeArgs.head)) + else { + val tree1 = ( + if (qual == EmptyTree) tree + // atPos necessary because qualifier might come from startContext + else atPos(tree.pos)(Select(qual, name)) + ) val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual) // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right? stabilize(tree2, pre2, mode, pt) match { diff --git a/test/files/run/t4871.check b/test/files/run/t4871.check new file mode 100644 index 0000000000..a60526a0f3 --- /dev/null +++ b/test/files/run/t4871.check @@ -0,0 +1,2 @@ +class Test$C +class Test$D diff --git a/test/files/run/t4871.scala b/test/files/run/t4871.scala new file mode 100644 index 0000000000..70d8b7145c --- /dev/null +++ b/test/files/run/t4871.scala @@ -0,0 +1,12 @@ +object Test { + class C + class D + + def main(args: Array[String]): Unit = { + val z: Class[C] = classOf + val z2: Class[D] = classOf[D] + + println(z) + println(z2) + } +} -- cgit v1.2.3 From 7d3ec837a3883e1e79700ee7c8fe6ab3f7bbd73c Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 22 Dec 2011 15:33:59 +0100 Subject: Omit non-essential TypeApply trees. Otherwise they cause type errors. --- src/compiler/scala/reflect/runtime/ToolBoxes.scala | 5 ++--- src/compiler/scala/tools/nsc/ast/Trees.scala | 4 ---- .../scala/tools/nsc/transform/LiftCode.scala | 4 ++++ test/files/run/t5239.check | 13 ------------- test/files/run/t5239.scala | 20 -------------------- 5 files changed, 6 insertions(+), 40 deletions(-) delete mode 100644 test/files/run/t5239.check delete mode 100644 test/files/run/t5239.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index 3bfdf1e6e7..7ef625b2ad 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -61,9 +61,9 @@ trait ToolBoxes extends { self: Universe => def makeParam(fv: Symbol) = meth.newValueParameter(NoPosition, fv.name) setInfo fv.tpe meth setInfo MethodType(fvs map makeParam, expr.tpe) minfo.decls enter meth - println("wrapping "+(defOwner(expr) -> meth)) + trace("wrapping ")(defOwner(expr) -> meth) val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth)) - println("wrapped: "+showAttributed(methdef)) + trace("wrapped: ")(showAttributed(methdef)) val objdef = ModuleDef( obj, Template( @@ -99,7 +99,6 @@ trait ToolBoxes extends { self: Universe => jclazz.getDeclaredMethods.find(_.getName == name).get def runExpr(expr: Tree): Any = { - settings.Xprint.value = List("typer", "erasure") val etpe = expr.tpe val fvs = (expr filter isFree map (_.symbol)).distinct diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 85849cfad4..9668debbbb 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -257,10 +257,6 @@ trait Trees extends reflect.internal.Trees { self: Global => case _: DefTree | Function(_, _) | Template(_, _, _) => resetDef(tree) tree.tpe = null - tree match { - case tree: DefDef => tree.tpt.tpe = null - case _ => () - } case tpt: TypeTree => if (tpt.wasEmpty) tree.tpe = null case This(_) if tree.symbol != null && tree.symbol.isPackageClass => diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala index f3f823d197..9404f0f699 100644 --- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala +++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala @@ -476,6 +476,10 @@ abstract class LiftCode extends Transform with TypingTransformers { if (!(boundSyms exists (tt.tpe contains _))) mirrorCall("TypeTree", reifyType(tt.tpe)) else if (tt.original != null) reify(tt.original) else mirrorCall("TypeTree") + case ta @ TypeApply(hk, ts) => + val thereAreOnlyTTs = ts collect { case t if !t.isInstanceOf[TypeTree] => t } isEmpty; + val ttsAreNotEssential = ts collect { case tt: TypeTree => tt } find { tt => tt.original != null } isEmpty; + if (thereAreOnlyTTs && ttsAreNotEssential) reifyTree(hk) else reifyProduct(ta) case global.emptyValDef => mirrorSelect("emptyValDef") case _ => diff --git a/test/files/run/t5239.check b/test/files/run/t5239.check deleted file mode 100644 index 40fe6a76e7..0000000000 --- a/test/files/run/t5239.check +++ /dev/null @@ -1,13 +0,0 @@ -result = 2{Int(2)} -[[syntax trees at end of typer]]// Scala source: NoSourceFile -package { - final object __wrapper$1 extends Object { - def this(): object __wrapper$1 = { - __wrapper$1.super.this(); - () - }; - def wrapper(): Int = 2 - } -} - -evaluated = 2 diff --git a/test/files/run/t5239.scala b/test/files/run/t5239.scala deleted file mode 100644 index 1f404196ba..0000000000 --- a/test/files/run/t5239.scala +++ /dev/null @@ -1,20 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - 2 - }; - - val settings = new Settings - settings.Xprint.value = List("typer") - - val reporter = new ConsoleReporter(settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - println("result = " + toolbox.showAttributed(ttree)) - - val evaluated = toolbox.runExpr(ttree) - println("evaluated = " + evaluated) -} -- cgit v1.2.3 From fc0c123e3560da190a3daae35214c2be50fd59e6 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 25 Nov 2011 17:00:16 +0100 Subject: [vpm] unapplyProd: faster matching for case classes behold the mythical unapplyProd: it does not exist, yet it promises to speed up pattern matching on case classes instead of calling the synthetic unapply/unapplySeq, we don't call the mythical synthetic unapplyProd, since -- if it existed -- it would be the identity anyway for case classes eventually, we will allow user-defined unapplyProd's, which should give you almost the same speed as case class matching for user-defined extractors (i.e., you don't have to wrap in an option, just return something on which we can select _i for i = 1 to N, unless it is null, which indicates match failure) still need to figure out a way to derive the types for the subpatterns, without requiring you to wrap your result in a ProductN unapplyProd support for vararg case classes using caseFieldAccessors instead of synthetic _i now the compiler bootstraps again, and after this optimization, quick.lib overhead is 70%, quick.comp is 50% (compiling with a locker built using -Yvirtpatmat, and itself generating code for -Yvirtpatmat) before the optimization, I think the overhead for quick.comp was close to 100% in this scenario more robust tupleSel for case classes TODO: - pos/t602 -- clean up after type inference as in fromCaseClassUnapply - run/pf-catch -- implement new-style orElse for partial function in uncurry --- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 262 ++++++++++++++------- test/files/run/virtpatmat_unapplyprod.check | 4 + test/files/run/virtpatmat_unapplyprod.flags | 1 + test/files/run/virtpatmat_unapplyprod.scala | 23 ++ 4 files changed, 206 insertions(+), 84 deletions(-) create mode 100644 test/files/run/virtpatmat_unapplyprod.check create mode 100644 test/files/run/virtpatmat_unapplyprod.flags create mode 100644 test/files/run/virtpatmat_unapplyprod.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 23d855f7b3..c04e4796c4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -136,7 +136,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = { if (!extractor.isTyped) throw new TypeError(pos, "Could not typecheck extractor call: "+ extractor) - if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe) + // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe) // must use type `tp`, which is provided by extractor's result, not the type expected by binder, // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation @@ -290,13 +290,16 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation object ExtractorCall { - def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCall(unfun, args) + def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args) + def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args)) + + // THE PRINCIPLED SLOW PATH -- NOT USED // generate a call to the (synthetically generated) extractor of a case class // NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one) // that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY), // and replace that dummy by a reference to the actual binder in translateExtractorPattern - def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = { + def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = { // TODO: can we rework the typer so we don't have to do all this twice? // undo rewrite performed in (5) of adapt val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun} @@ -342,25 +345,20 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => } } - class ExtractorCall(extractorCallIncludingDummy: Tree, val args: List[Tree]) { - private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false } + abstract class ExtractorCall(val args: List[Tree]) { + val nbSubPats = args.length - def tpe = extractorCall.tpe - def isTyped = (tpe ne NoType) && extractorCall.isTyped - def resultType = tpe.finalResultType - def paramType = tpe.paramTypes.head + // everything okay, captain? + def isTyped : Boolean - // what's the extractor's result type in the monad? - // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands - lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else { - if (resultType.typeSymbol == BooleanClass) UnitClass.tpe - else { - val monadArgs = resultType.baseType(matchingMonadType.typeSymbol).typeArgs - // assert(monadArgs.length == 1, "unhandled extractor type: "+ extractorTp) // TODO: overloaded unapply?? - if(monadArgs.length == 1) monadArgs(0) - else ErrorType - } - } + def isSeq: Boolean + lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last) + + // to which type should the previous binder be casted? + def paramType : Type + + // binder has been casted to paramType if necessary + def treeMaker(binder: Symbol, pos: Position): TreeMaker // `subPatBinders` are the variables bound by this pattern in the following patterns // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is) @@ -374,15 +372,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => case bp => bp } - def isSeq = extractorCall.symbol.name == nme.unapplySeq - lazy val nbSubPats = args.length - lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last) - - // the types for the binders corresponding to my subpatterns - // subPatTypes != args map (_.tpe) since the args may have more specific types than the constructor's parameter types - // replace last type (of shape Seq[A]) with RepeatedParam[A] so that formalTypes will - // repeat the last argument type to align the formals with the number of arguments - // require (nbSubPats > 0 && (!lastIsStar || isSeq)) def subPatTypes: List[Type] = if(isSeq) { val TypeRef(pre, SeqClass, args) = seqTp @@ -390,6 +379,128 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => formalTypes(rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args), nbSubPats) } else rawSubPatTypes + protected def rawSubPatTypes: List[Type] + + protected def seqTp = rawSubPatTypes.last baseType SeqClass + protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare + protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple + protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1 + protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1 + protected lazy val minLenToCheck = if(lastIsStar) 1 else 0 + protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1) + protected def tupleSel(binder: Symbol)(i: Int): Tree = pmgen.tupleSel(binder)(i) + + // the trees that select the subpatterns on the extractor's result, referenced by `binder` + // require isSeq + protected def subPatRefsSeq(binder: Symbol): List[Tree] = { + // only relevant if isSeq: (here to avoid capturing too much in the returned closure) + val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder)) + val nbIndexingIndices = indexingIndices.length + + // this error is checked by checkStarPatOK + // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats)) + // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq + (((1 to firstIndexingBinder) map tupleSel(binder)) ++ + // then we have to index the binder that represents the sequence for the remaining subpatterns, except for... + (indexingIndices map pmgen.index(seqTree(binder))) ++ + // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder + (if(!lastIsStar) Nil else List( + if(nbIndexingIndices == 0) seqTree(binder) + else pmgen.drop(seqTree(binder))(nbIndexingIndices)))).toList + } + + // the trees that select the subpatterns on the extractor's result, referenced by `binder` + // require (nbSubPats > 0 && (!lastIsStar || isSeq)) + protected def subPatRefs(binder: Symbol): List[Tree] = { + if (nbSubPats == 0) Nil + else if (isSeq) subPatRefsSeq(binder) + else ((1 to nbSubPats) map tupleSel(binder)).toList + } + + protected def lengthGuard(binder: Symbol): Option[Tree] = + // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied + if (!isSeq || (expectedLength < minLenToCheck)) None + else { import CODE._ + // `binder.lengthCompare(expectedLength)` + def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength)) + + // the comparison to perform + // when the last subpattern is a wildcard-star the expectedLength is but a lower bound + // (otherwise equality is required) + def compareOp: (Tree, Tree) => Tree = + if (lastIsStar) _ INT_>= _ + else _ INT_== _ + + // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero` + Some((seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)) + } + } + + // TODO: to be called when there's a def unapplyProd(x: T): Product_N + // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it) + class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) { + // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here: + /*override def equals(x$1: Any): Boolean = ... + val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span! + */ + // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun} + // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe + // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType) + // println("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType))) + // println("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe))) + private def extractorTp = fun.tpe + + def isTyped = fun.isTyped + + // to which type should the previous binder be casted? + def paramType = extractorTp.finalResultType + + def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last) + protected def rawSubPatTypes = extractorTp.paramTypes + + // binder has type paramType + def treeMaker(binder: Symbol, pos: Position): TreeMaker = { + // checks binder ne null before chaining to the next extractor + ProductExtractorTreeMaker(binder, lengthGuard(binder), Substitution(subPatBinders, subPatRefs(binder))) + } + +/* TODO: remove special case when the following bug is fixed +scala> :paste +// Entering paste mode (ctrl-D to finish) + +class Foo(x: Other) { x._1 } // BUG: can't refer to _1 if its defining class has not been type checked yet +case class Other(y: String) + +// Exiting paste mode, now interpreting. + +:8: error: value _1 is not a member of Other + class Foo(x: Other) { x._1 } + ^ + +scala> case class Other(y: String) +defined class Other + +scala> class Foo(x: Other) { x._1 } +defined class Foo */ + override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._ + // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component + val caseAccs = binder.info.typeSymbol.caseFieldAccessors + if (caseAccs isDefinedAt (i-1)) REF(binder) DOT caseAccs(i-1) + else pmgen.tupleSel(binder)(i) + } + + override def toString(): String = "case class "+ (if (extractorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args + } + + class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) { + private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false } + + def tpe = extractorCall.tpe + def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType) + def paramType = tpe.paramTypes.head + def resultType = tpe.finalResultType + def isSeq = extractorCall.symbol.name == nme.unapplySeq + def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = { // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern) val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted)) @@ -399,14 +510,22 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => else extractorApply val binder = freshSym(pos, resultInMonad) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type - val subpatRefs = if (subPatBinders isEmpty) Nil else subPatRefs(binder) - lengthGuard(binder) match { - case None => ExtractorTreeMaker(patTreeLifted, binder, Substitution(subPatBinders, subpatRefs)) - case Some(lenGuard) => FilteredExtractorTreeMaker(patTreeLifted, lenGuard, binder, Substitution(subPatBinders, subpatRefs)) + case None => ExtractorTreeMaker(patTreeLifted, binder, Substitution(subPatBinders, subPatRefs(binder))) + case Some(lenGuard) => FilteredExtractorTreeMaker(patTreeLifted, lenGuard, binder, Substitution(subPatBinders, subPatRefs(binder))) } } + override protected def seqTree(binder: Symbol): Tree = + if (firstIndexingBinder == 0) CODE.REF(binder) + else super.seqTree(binder) + + // the trees that select the subpatterns on the extractor's result, referenced by `binder` + // require (nbSubPats > 0 && (!lastIsStar || isSeq)) + override protected def subPatRefs(binder: Symbol): List[Tree] = + if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors + else super.subPatRefs(binder) + protected def spliceApply(binder: Symbol): Tree = { object splice extends Transformer { override def transform(t: Tree) = t match { @@ -418,7 +537,19 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => splice.transform(extractorCallIncludingDummy) } - private lazy val rawSubPatTypes = + // what's the extractor's result type in the monad? + // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands + protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else { + if (resultType.typeSymbol == BooleanClass) UnitClass.tpe + else { + val monadArgs = resultType.baseType(matchingMonadType.typeSymbol).typeArgs + // assert(monadArgs.length == 1, "unhandled extractor type: "+ extractorTp) // TODO: overloaded unapply?? + if(monadArgs.length == 1) monadArgs(0) + else ErrorType + } + } + + protected lazy val rawSubPatTypes = if (resultInMonad.typeSymbol eq UnitClass) Nil else if(nbSubPats == 1) List(resultInMonad) else getProductArgs(resultInMonad) match { @@ -426,56 +557,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => case x => x } - private def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare - private def seqTp = rawSubPatTypes.last baseType SeqClass - private lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple - private lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1 - private lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1 - private lazy val minLenToCheck = if(lastIsStar) 1 else 0 - private def seqTree(binder: Symbol) = if(firstIndexingBinder == 0) CODE.REF(binder) else pmgen.tupleSel(binder)(firstIndexingBinder+1) - - // the trees that select the subpatterns on the extractor's result, referenced by `binder` - // require (nbSubPats > 0 && (!lastIsStar || isSeq)) - private def subPatRefs(binder: Symbol): List[Tree] = { - // only relevant if isSeq: (here to avoid capturing too much in the returned closure) - val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder)) - val nbIndexingIndices = indexingIndices.length - - // this error is checked by checkStarPatOK - // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats)) - - (if(isSeq) { - // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq - ((1 to firstIndexingBinder) map pmgen.tupleSel(binder)) ++ - // then we have to index the binder that represents the sequence for the remaining subpatterns, except for... - (indexingIndices map pmgen.index(seqTree(binder))) ++ - // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder - (if(!lastIsStar) Nil else List( - if(nbIndexingIndices == 0) seqTree(binder) - else pmgen.drop(seqTree(binder))(nbIndexingIndices))) - } - else if(nbSubPats == 1) List(CODE.REF(binder)) - else ((1 to nbSubPats) map pmgen.tupleSel(binder))).toList - } - - private def lengthGuard(binder: Symbol): Option[Tree] = - // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied - if (!isSeq || (expectedLength < minLenToCheck)) None - else { import CODE._ - // `binder.lengthCompare(expectedLength)` - def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength)) - - // the comparison to perform - // when the last subpattern is a wildcard-star the expectedLength is but a lower bound - // (otherwise equality is required) - def compareOp: (Tree, Tree) => Tree = - if (lastIsStar) _ INT_>= _ - else _ INT_== _ - - // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero` - Some((seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)) - } - override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")." } @@ -638,6 +719,17 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => */ case class ExtractorTreeMaker(extractor: Tree, nextBinder: Symbol, initialSubstitution: Substitution) extends SingleExtractorTreeMaker + case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], initialSubstitution: Substitution) extends TreeMaker { import CODE._ + def chainBefore(next: Tree): Tree = { + val nullCheck = REF(prevBinder) OBJ_NE NULL + val cond = extraCond match { + case None => nullCheck + case Some(c) => nullCheck AND c + } + pmgen.condOptimized(cond, substitution(next)) + } + } + case class FilteredExtractorTreeMaker(extractor: Tree, guard: Tree, nextBinder: Symbol, initialSubstitution: Substitution) extends FunTreeMaker { def chainBefore(next: Tree): Tree = pmgen.flatMap(extractor, wrapFunSubst(pmgen.condOptimized(guard, next))) setPos extractor.pos @@ -763,6 +855,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree def guard(c: Tree): Tree def zero: Tree + def one(res: Tree): Tree // TODO: defaults in traits + self types == broken? // def guard(c: Tree, then: Tree, tp: Type): Tree // def cond(c: Tree): Tree = cond(c, UNIT, NoType) @@ -875,6 +968,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // methods in MatchingStrategy (the monad companion) -- used directly in translation def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type): Tree = genTypeApply(matchingStrategy DOT vpmName.runOrElse, scrutTp, resTp) APPLY (scrut) APPLY (matcher) // matchingStrategy.runOrElse(scrut)(matcher) def zero: Tree = matchingStrategy DOT vpmName.zero // matchingStrategy.zero + def one(res: Tree): Tree = one(res, NoType) def one(res: Tree, tp: Type = NoType, oneName: Name = vpmName.one): Tree = genTypeApply(matchingStrategy DOT oneName, tp) APPLY (res) // matchingStrategy.one(res) def or(f: Tree, as: List[Tree]): Tree = (matchingStrategy DOT vpmName.or)((f :: as): _*) // matchingStrategy.or(f, as) def guard(c: Tree): Tree = (matchingStrategy DOT vpmName.guard)(c, UNIT) // matchingStrategy.guard(c, then) -- a user-defined guard diff --git a/test/files/run/virtpatmat_unapplyprod.check b/test/files/run/virtpatmat_unapplyprod.check new file mode 100644 index 0000000000..2660ff8f96 --- /dev/null +++ b/test/files/run/virtpatmat_unapplyprod.check @@ -0,0 +1,4 @@ +(2,3) +(2,3) +(2,3) +List(true, false, true) diff --git a/test/files/run/virtpatmat_unapplyprod.flags b/test/files/run/virtpatmat_unapplyprod.flags new file mode 100644 index 0000000000..9769db9257 --- /dev/null +++ b/test/files/run/virtpatmat_unapplyprod.flags @@ -0,0 +1 @@ + -Yvirtpatmat -Xexperimental diff --git a/test/files/run/virtpatmat_unapplyprod.scala b/test/files/run/virtpatmat_unapplyprod.scala new file mode 100644 index 0000000000..441e5e3968 --- /dev/null +++ b/test/files/run/virtpatmat_unapplyprod.scala @@ -0,0 +1,23 @@ +object Test extends App { + case class Foo(x: Int, y: String) + + Foo(2, "3") match { + case Foo(x, y) => println((x, y)) + } + + case class FooSeq(x: Int, y: String, z: Boolean*) + + FooSeq(2, "3") match { + case FooSeq(x, y) => println((x, y)) + } + + FooSeq(2, "3", true, false, true) match { + case FooSeq(x, y) => println("nope") + case FooSeq(x, y, true, false, true) => println((x, y)) + } + + FooSeq(1, "a", true, false, true) match { + case FooSeq(1, "a") => println("nope") + case FooSeq(1, "a", x@_* ) => println(x.toList) + } +} \ No newline at end of file -- cgit v1.2.3 From e0b8877cd916dca3b37fd39e1376bf0ca0f11082 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Sun, 27 Nov 2011 22:57:40 +0100 Subject: [vpm] common sub-expression elimination for conditions TreeMakers (esp. CondTreeMakers) are approximated by hash-cons'ed Conds sharing is detected for prefixes of Conds, and shared conditions are only tested once their results are stored, and repeated tests branch on the last shared condition, reusing the results from the first time they were checked a Test is 1-to-1 with a TreeMaker, but may share its Cond TODO: clean separation of the two translation strategies: - naive flatMap/orElse (for virtualization) - less-naive if-then-else (with CSE etc coming) sharing trees caused wrong bytecode to be emitted (verifyerror) tentative explanation: "because lambdalift uses mutable state to track which variables have been captured if you refer to the same variable with the same tree twice it'll get confused" Sent at 8:27 PM on Thursday >> grzegorz.kossakowski: so we found a bug in jvm according to http://java.sun.com/docs/books/jvms/second_edition/html/Instructions2.doc2.html checkcast should throw a classcastexception becuase it's a shorthand for if !(x instanceof T) throw ClassCastExcpt but jvm decided to throw verifyerror and yeah, the check is wrong if jvm was not throwing verifyerror it would throw classcast exception saying that ObjectRef cannot be casted to $colon$colon ... >> me: so now where does it come from? since a ref is involved, i thought LambdaLift >> grzegorz.kossakowski: yup or now I don't think lambalift introduces that kind of low-level casts but I might be wrong btw. it's interesting that it unpacks stuff from objectref twice in your code and in one place checkcast is correct and in another is wrong Sent at 9:33 PM on Thursday >> grzegorz.kossakowski: also, since it's a verifyerror I think genjvm should have an assertion >> grzegorz.kossakowski: 193: getfield #54; //Field scala/runtime/ObjectRef.elem:Ljava/lang/Object; 196: checkcast #8; //class scala/runtime/ObjectRef 199: invokevirtual #95; //Method scala/collection/immutable/$colon$colon.tl$1:()Lscala/collection/immutable/List; it's this see you have checkcast for ObjectRef and then on that value, you try to call tl() method from List Sent at 9:56 PM on Thursday >> me: fixed sharing trees is bad very bad because lambdalift uses mutable state to track which variables have been captured if you refer to the same variable with the same tree twice it'll get confused --- .../scala/tools/nsc/transform/UnCurry.scala | 28 +- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 497 +++++++++++++++++---- test/files/run/virtpatmat_opt_sharing.check | 1 + test/files/run/virtpatmat_opt_sharing.flags | 1 + test/files/run/virtpatmat_opt_sharing.scala | 10 + 5 files changed, 456 insertions(+), 81 deletions(-) create mode 100644 test/files/run/virtpatmat_opt_sharing.check create mode 100644 test/files/run/virtpatmat_opt_sharing.flags create mode 100644 test/files/run/virtpatmat_opt_sharing.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 769ef79546..2921607c24 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -290,7 +290,26 @@ abstract class UnCurry extends InfoTransform val idparam = m.paramss.head.head val substParam = new TreeSymSubstituter(List(vparam), List(idparam)) def substTree[T <: Tree](t: T): T = substParam(resetLocalAttrs(t)) - + object VirtPatmatOpt { + object Last { + def unapply[T](xs: List[T]) = xs.lastOption + } + // keep this in synch by what's generated by combineCases/runOrElse + object MatcherBlock { + def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree])] = matcher match { // TODO: BUG the unapplySeq version of the case below does not seem to work in virtpatmat?? + case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) => Some(zero, x, matchRes, keepGoing, stats) + case _ => None + } + } + // TODO: virtpatmat use case: would be nice if could abstract over the repeated pattern more easily + // case Block(Last(P)) => + // case P => + def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree], Tree => Tree)] = matcher match { + case MatcherBlock(zero, x, matchRes, keepGoing, stats) => Some(zero, x, matchRes, keepGoing, stats, identity[Tree]) + case Block(outerStats, MatcherBlock(zero, x, matchRes, keepGoing, stats)) => Some(zero, x, matchRes, keepGoing, stats, inner => Block(outerStats, inner)) + case b => treeBrowser browse b; None + } + } DefDef(m, (fun.body: @unchecked) match { case Match(selector, cases) => def transformCase(cdef: CaseDef): CaseDef = @@ -301,6 +320,7 @@ abstract class UnCurry extends InfoTransform if (cases exists treeInfo.isDefaultCase) Literal(Constant(true)) else Match(substTree(selector.duplicate), (cases map transformCase) :+ defaultCase) ) + // TODO: find a better way to keep this in synch with the code generard by patmatvirtualizer // TODO: check tgt.tpe.typeSymbol isNonBottomSubclass MatchingStrategyClass case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), args_scrut), args_pm) if opt.virtPatmat => object noOne extends Transformer { @@ -317,7 +337,7 @@ abstract class UnCurry extends InfoTransform } substTree(Apply(Apply(TypeApply(Select(tgt.duplicate, tgt.tpe.member("isSuccess".toTermName)), targs map (_.duplicate)), args_scrut map (_.duplicate)), args_pm map (noOne.transform))) // for no-option version of virtpatmat - case Block(List(zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats@_*), _) if opt.virtPatmat => import CODE._ + case VirtPatmatOpt(zero, x, matchRes, keepGoing, stats, addOuter) if opt.virtPatmat => import CODE._ object dropMatchResAssign extends Transformer { // override val treeCopy = newStrictTreeCopier // will duplicate below override def transform(tree: Tree): Tree = tree match { @@ -329,14 +349,14 @@ abstract class UnCurry extends InfoTransform } } val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList - val idaBlock = Block( + val idaBlock = addOuter(Block( zero :: x :: /* drop matchRes def */ keepGoing :: statsNoMatchRes, NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `!keepGoing` - ) + )) substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed }) } diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 05a85e97fe..f563f8bca2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -245,7 +245,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // `one(x) : T` where x is the binder before this pattern, which will be replaced by the binder for the alternative by TreeMaker.singleBinder below // T is the widened type of the previous binder -- this ascription is necessary to infer a clean type for `or` -- the alternative combinator -- in the presence of existential types // see pos/virtpatmat_exist1.scala - combineExtractors(translatePattern(patBinder, alt), pmgen.one(CODE.REF(patBinder), patBinder.info.widen)) // only RHS of actual case should use caseResult, else the optimized codegen breaks + combineExtractors(propagateSubstitution(translatePattern(patBinder, alt)), pmgen.one(CODE.REF(patBinder), patBinder.info.widen)) // only RHS of actual case should use caseResult, else the optimized codegen breaks } noFurtherSubPats(AlternativesTreeMaker(patBinder, altTrees : _*)) @@ -664,51 +664,97 @@ defined class Foo */ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait TreeMakers { - trait TreeMaker { - def substitution: Substitution ={ - if (currSub eq null) currSub = initialSubstitution - currSub - } + abstract class TreeMaker { + def substitution: Substitution = + if (currSub eq null) localSubstitution + else currSub - protected def initialSubstitution: Substitution + protected def localSubstitution: Substitution - private[TreeMakers] def addOuterSubstitution(outerSubst: Substitution): TreeMaker = { - currSub = outerSubst >> substitution + private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): TreeMaker = { + if (currSub ne null) { + println("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst)) + Thread.dumpStack() + } + else currSub = outerSubst >> substitution this } private[this] var currSub: Substitution = null def chainBefore(next: Tree): Tree + def treesToHoist: List[Tree] = Nil } - case class SubstOnlyTreeMaker(initialSubstitution: Substitution) extends TreeMaker { + case class SubstOnlyTreeMaker(localSubstitution: Substitution) extends TreeMaker { def chainBefore(next: Tree): Tree = substitution(next) } - trait FunTreeMaker extends TreeMaker { + abstract class FunTreeMaker extends TreeMaker { val nextBinder: Symbol // wrap a Fun (with binder nextBinder) around the next tree (unless nextBinder == NoSymbol) and perform our substitution - protected def wrapFunSubst(next: Tree): Tree = pmgen.fun(nextBinder, substitution(next)) + protected def wrapFunSubst(next: Tree): Tree + = pmgen.fun(nextBinder, substitution(next)) + + var reused: Boolean = false + def reusedBinders: List[Symbol] = Nil + override def treesToHoist: List[Tree] = { import CODE._ + reusedBinders map { b => VAL(b) === pmgen.mkZero(b.info) } + } } - trait FreshFunTreeMaker extends FunTreeMaker { + abstract class FreshFunTreeMaker extends FunTreeMaker { val pos: Position + val prevBinder: Symbol val nextBinderTp: Type lazy val nextBinder = freshSym(pos, nextBinderTp) + lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder))) } - trait SingleExtractorTreeMaker extends FunTreeMaker { + abstract class SingleExtractorTreeMaker extends FunTreeMaker { val extractor: Tree // build Tree that chains `next` after the current extractor - def chainBefore(next: Tree): Tree = pmgen.flatMap(extractor, wrapFunSubst(next)) setPos extractor.pos + def chainBefore(next: Tree): Tree = + pmgen.flatMap(extractor, wrapFunSubst(next)) setPos extractor.pos + } - trait SingleBinderTreeMaker extends FunTreeMaker { - val prevBinder: Symbol - lazy val initialSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder))) + // TODO: in the process of shifting optimized code gen into the treemakers: complete and make it conditional in the same way as is happening in pmgen + abstract class CondTreeMaker extends FreshFunTreeMaker { import CODE._ + val cond: Tree + val res: Tree + + // must set reused before! + override lazy val reusedBinders = if(reused) List(freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE, nextBinder setFlag MUTABLE) else Nil + def storedCond = reusedBinders(0) + def storedRes = reusedBinders(1) + + def chainBefore(next: Tree): Tree = + if (!reused) + atPos(pos)(pmgen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next))) + else { + IF (cond) THEN BLOCK( + storedCond === TRUE, + storedRes === res, + substitution(next).duplicate // TODO: finer-grained dup'ing + ) ELSE pmgen.zero + } } - abstract class SimpleTreeMaker extends SingleExtractorTreeMaker with SingleBinderTreeMaker with FreshFunTreeMaker + case class ReusingCondTreeMaker(dropped_priors: List[(TreeMaker, Option[TreeMaker])]) extends TreeMaker { import CODE._ + lazy val localSubstitution = { + val (from, to) = dropped_priors.collect {case (dropped: CondTreeMaker, Some(prior: CondTreeMaker)) => (dropped.nextBinder, REF(prior.storedRes))}.unzip + val oldSubs = dropped_priors.collect {case (dropped: TreeMaker, _) => dropped.substitution} + oldSubs.foldLeft(Substitution(from, to))(_ >> _) + } + + def chainBefore(next: Tree): Tree = { + val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: CondTreeMaker)) => ctm}.get.storedCond) + + IF (cond) THEN BLOCK( + substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S) + ) ELSE pmgen.zero + } + } /** * Make a TreeMaker that will result in an extractor call specified by `extractor` @@ -717,9 +763,12 @@ defined class Foo */ * the function's body is determined by the next TreeMaker * in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to` */ - case class ExtractorTreeMaker(extractor: Tree, nextBinder: Symbol, initialSubstitution: Substitution) extends SingleExtractorTreeMaker + case class ExtractorTreeMaker(extractor: Tree, nextBinder: Symbol, localSubstitution: Substitution) extends SingleExtractorTreeMaker { + override def toString = "X"+(extractor, nextBinder) + } - case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], initialSubstitution: Substitution) extends TreeMaker { import CODE._ + // TODO: allow user-defined unapplyProduct + case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends TreeMaker { import CODE._ def chainBefore(next: Tree): Tree = { val nullCheck = REF(prevBinder) OBJ_NE NULL val cond = extraCond match { @@ -728,34 +777,44 @@ defined class Foo */ } pmgen.condOptimized(cond, substitution(next)) } + + override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution) } - case class FilteredExtractorTreeMaker(extractor: Tree, guard: Tree, nextBinder: Symbol, initialSubstitution: Substitution) extends FunTreeMaker { + case class FilteredExtractorTreeMaker(extractor: Tree, guard: Tree, nextBinder: Symbol, localSubstitution: Substitution) extends FunTreeMaker { def chainBefore(next: Tree): Tree = pmgen.flatMap(extractor, wrapFunSubst(pmgen.condOptimized(guard, next))) setPos extractor.pos + override def toString = "FX"+(extractor, guard, nextBinder) } // need to substitute since binder may be used outside of the next extractor call (say, in the body of the case) - case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends SimpleTreeMaker { - val extractor = pmgen.condCast(typeTest(prevBinder, nextBinderTp), prevBinder, nextBinderTp) + case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends CondTreeMaker { + val cond = typeTest(prevBinder, nextBinderTp) + val res = pmgen._asInstanceOf(prevBinder, nextBinderTp) + override def toString = "TT"+(prevBinder, nextBinderTp) } // implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations) - case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends SimpleTreeMaker { + case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends CondTreeMaker { val nextBinderTp = glb(List(patBinder.info.widen, pt)) - val extractor = pmgen.condCast(typeAndEqualityTest(patBinder, pt), patBinder, nextBinderTp) + + val cond = typeAndEqualityTest(patBinder, pt) + val res = pmgen._asInstanceOf(patBinder, nextBinderTp) + override def toString = "TET"+(patBinder, pt) } // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp) - case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, pos: Position) extends SimpleTreeMaker { - val nextBinderTp: Type = prevBinder.info.widen + case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, pos: Position) extends CondTreeMaker { + val nextBinderTp = prevBinder.info.widen // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null) // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required) - val extractor = atPos(pos)(pmgen.cond(pmgen._equals(patTree, prevBinder), CODE.REF(prevBinder), nextBinderTp)) + val cond = pmgen._equals(patTree, prevBinder) + val res = CODE.REF(prevBinder) + override def toString = "ET"+(prevBinder, patTree) } - case class AlternativesTreeMaker(prevBinder: Symbol, alts: Tree*) extends SingleBinderTreeMaker with FreshFunTreeMaker { + case class AlternativesTreeMaker(prevBinder: Symbol, alts: Tree*) extends FreshFunTreeMaker { val nextBinderTp: Type = prevBinder.info.widen val pos = alts.head.pos def chainBefore(next: Tree): Tree = @@ -763,67 +822,296 @@ defined class Foo */ } case class GuardTreeMaker(guardTree: Tree) extends /*SingleExtractor*/TreeMaker { - val initialSubstitution: Substitution = EmptySubstitution + val localSubstitution: Substitution = EmptySubstitution // val nextBinder = freshSym(guardTree.pos, UnitClass.tpe) // val extractor = pmgen.guard(guardTree) def chainBefore(next: Tree): Tree = { import CODE._ IF (guardTree) THEN next ELSE pmgen.zero } - override def toString = "G("+ guardTree +")" } - // combineExtractors changes the current substitution's of the tree makers in `treeMakers` - def combineExtractors(treeMakers: List[TreeMaker], body: Tree): Tree = { - // a foldLeft to accumulate the initialSubstitution left-to-right, but written using a map and a var for clarity - def propagateSubstitution(treeMakers: List[TreeMaker]): List[TreeMaker] = { - var accumSubst: Substitution = EmptySubstitution - treeMakers foreach { maker => - // could mutate maker instead, but it doesn't seem to shave much time off of quick.comp - maker addOuterSubstitution accumSubst - accumSubst = maker.substitution +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// decisions, decisions +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + object Test { + var currId = 0 + } + case class Test(cond: Cond, treeMaker: TreeMaker) { + // def <:<(other: Test) = cond <:< other.cond + // def andThen_: (prev: List[Test]): List[Test] = + // prev.filterNot(this <:< _) :+ this + + private val reusedBy = new collection.mutable.HashSet[Test] + var reuses: Option[Test] = None + def registerReuseBy(later: Test): Unit = { + assert(later.reuses.isEmpty) + reusedBy += later + later.reuses = Some(this) + } + + val id = { Test.currId += 1; Test.currId} + override def toString = + if (cond eq Top) "T" + else if(cond eq Havoc) "!?" + else "T"+ id + (if(reusedBy nonEmpty) "!["+ treeMaker +"]" else (if(reuses.isEmpty) "["+ treeMaker +"]" else " cf. T"+reuses.get.id)) + } + + object Cond { + // def refines(self: Cond, other: Cond): Boolean = (self, other) match { + // case (Bottom, _) => true + // case (Havoc , _) => true + // case (_ , Top) => true + // case (_ , _) => false + // } + var currId = 0 + } + + abstract class Cond { + // def testedPath: Tree + // def <:<(other: Cond) = Cond.refines(this, other) + + val id = { Cond.currId += 1; Cond.currId} + } + + // does not contribute any knowledge + case object Top extends Cond + + // takes away knowledge. e.g., a user-defined guard + case object Havoc extends Cond + + // we know everything! everything! + // this either means the case is unreachable, + // or that it is statically known to be picked -- at this point in the decision tree --> no point in emitting further alternatives + // case object Bottom extends Cond + + + object EqualityCond { + private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond] + def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs)) + } + class EqualityCond(testedPath: Tree, rhs: Tree) extends Cond { + // def negation = TopCond // inequality doesn't teach us anything + // do simplification when we know enough about the tree statically: + // - collapse equal trees + // - accumulate tests when (in)equality not known statically + // - become bottom when we statically know this can never match + + override def toString = testedPath +" == "+ rhs +"#"+ id + } + + object TypeCond { + private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeCond] + def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt)) + } + class TypeCond(testedPath: Tree, pt: Type) extends Cond { + // def negation = TopCond // inequality doesn't teach us anything + // do simplification when we know enough about the tree statically: + // - collapse equal trees + // - accumulate tests when (in)equality not known statically + // - become bottom when we statically know this can never match + override def toString = testedPath +" <: "+ pt +"#"+ id + } + + object TypeAndEqualityCond { + private val uniques = new collection.mutable.HashMap[(Tree, Type), TypeAndEqualityCond] + def apply(testedPath: Tree, pt: Type): TypeAndEqualityCond = uniques getOrElseUpdate((testedPath, pt), new TypeAndEqualityCond(testedPath, pt)) + } + class TypeAndEqualityCond(testedPath: Tree, pt: Type) extends Cond { + // def negation = TopCond // inequality doesn't teach us anything + // do simplification when we know enough about the tree statically: + // - collapse equal trees + // - accumulate tests when (in)equality not known statically + // - become bottom when we statically know this can never match + override def toString = testedPath +" (<: && ==) "+ pt +"#"+ id + } + + /** a flow-sensitive, generalised, common sub-expression elimination + * reuse knowledge from performed tests + * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality) + * when a sub-expression is share, it is stored in a mutable variable + * the variable is floated up so that its scope includes all of the program that shares it + * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree) + * + * intended to be generalised to exhaustivity/reachability checking + */ + def doCSE(prevBinder: Symbol, cases: List[(List[TreeMaker], Tree)], pt: Type): List[(List[TreeMaker], Tree)] = { + // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively) + val pointsToBound = collection.mutable.HashSet(prevBinder) + + // the substitution that renames variables to variables in pointsToBound + var normalize: Substitution = EmptySubstitution + + // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound + // TODO check: + // pointsToBound -- accumSubst.from == Set(prevBinder) && (accumSubst.from.toSet -- pointsToBound) isEmpty + var accumSubst: Substitution = EmptySubstitution + + val trees = new collection.mutable.HashSet[Tree] + + def approximateTreeMaker(tm: TreeMaker): Test = { + val subst = tm.substitution + + // find part of substitution that replaces bound symbols by new symbols, and reverse that part + // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal + val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {case (f, t) => + t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f) + } + val (boundFrom, boundTo) = boundSubst.unzip + normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_))) + // println("normalize: "+ normalize) + + val (unboundFrom, unboundTo) = unboundSubst unzip + val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway + pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1 + // println("pointsToBound: "+ pointsToBound) + + accumSubst >>= okSubst + // println("accumSubst: "+ accumSubst) + + // TODO: improve, e.g., for constants + def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match { + case (_ : Ident, _ : Ident) => a.symbol eq b.symbol + case _ => false + }) + + // hashconsing trees (modulo value-equality) + def unique(t: Tree): Tree = + trees find (a => a.equalsStructure0(t)(sameValue)) match { + case Some(orig) => orig // println("unique: "+ (t eq orig, orig)); + case _ => trees += t; t + } + + def uniqueTp(tp: Type): Type = tp match { + // typerefs etc are already hashconsed + case _ : UniqueType => tp + case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help? + case _ => tp } - treeMakers + + def binderToUniqueTree(b: Symbol) = unique(accumSubst(normalize(CODE.REF(b)))) + + Test(tm match { + case ProductExtractorTreeMaker(pb, None, subst) => Top // TODO: NotNullTest(prevBinder) + case tm@TypeTestTreeMaker(prevBinder, nextBinderTp, _) => TypeCond(binderToUniqueTree(prevBinder), uniqueTp(nextBinderTp)) + case tm@TypeAndEqualityTestTreeMaker(_, patBinder, pt, _) => TypeAndEqualityCond(binderToUniqueTree(patBinder), uniqueTp(pt)) + case tm@EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree)) + case ExtractorTreeMaker(_, _, _) + | GuardTreeMaker(_) + | ProductExtractorTreeMaker(_, Some(_), _) => Havoc + case FilteredExtractorTreeMaker(x, g, nb, subst) => Havoc + case AlternativesTreeMaker(_, _*) => Havoc // TODO: can do better here + case SubstOnlyTreeMaker(_) => Top + }, tm) } - propagateSubstitution(treeMakers).foldRight (body) (_ chainBefore _) - // this optimization doesn't give us much - // var accumSubst: Substitution = EmptySubstitution - // var revMakers: List[TreeMaker] = Nil - // treeMakers foreach { maker => - // accumSubst = accumSubst >> maker.substitution - // maker.substitution = accumSubst - // revMakers ::= maker - // } - // - // var accumTree = body - // revMakers foreach { maker => - // accumTree = maker chainBefore accumTree - // } - // - // atPos(pos)(accumTree) + val testss = cases.map {case (treeMakers, _) => treeMakers map approximateTreeMaker } + + // interpret: + val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]] + val tested = new collection.mutable.HashSet[Cond] + testss foreach { tests => + tested.clear() + tests dropWhile { test => + val cond = test.cond + if ((cond eq Havoc) || (cond eq Top)) (cond eq Top) // stop when we encounter a havoc, skip top + else { + tested += cond + + // is there an earlier test that checks our condition and whose dependencies are implied by ours? + dependencies find { case (priorTest, deps) => + ((priorTest.cond eq cond) || (deps contains cond)) && (deps subsetOf tested) + } foreach { case (priorTest, deps) => + // if so, note the dependency in both tests + priorTest registerReuseBy test + } + + dependencies(test) = tested.toSet // copies + true + } + } + } + + // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase + // then, collapse these contiguous sequences of reusing tests + // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used) + // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable + (testss, cases).zipped map { case (tests, (_, caseBody)) => + var currDeps = Set[Cond]() + val (sharedPrefix, suffix) = tests span { test => + (test.cond eq Top) || (for( + reusedTest <- test.reuses; + nextDeps <- dependencies.get(reusedTest); + diff <- (nextDeps -- currDeps).headOption; + _ <- Some(currDeps = nextDeps)) + yield diff).nonEmpty + } + + val collapsedTreeMakers = if (sharedPrefix.lengthCompare(1) > 0) { // prefix must be longer than one for the optimization to pay off + for (test <- sharedPrefix; reusedTest <- test.reuses; if reusedTest.treeMaker.isInstanceOf[FunTreeMaker]) + reusedTest.treeMaker.asInstanceOf[FunTreeMaker].reused = true + // println("sharedPrefix: "+ sharedPrefix) + for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption; + lastReused <- lastShared.reuses) + yield ReusingCondTreeMaker(sharedPrefix map (t => (t.treeMaker, t.reuses map (_.treeMaker)))) :: suffix.map(_.treeMaker) + } else None + + (collapsedTreeMakers getOrElse tests.map(_.treeMaker), // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above) + caseBody) + } } - def combineCases(scrut: Tree, scrutSym: Symbol, cases: List[(List[TreeMaker], Tree)], pt: Type): Tree = { + + // a foldLeft to accumulate the localSubstitution left-to-right, mutating the treemakers in-place for performance + def propagateSubstitution(treeMakers: List[TreeMaker]): List[TreeMaker] = { + var accumSubst: Substitution = EmptySubstitution + treeMakers foreach { maker => + maker incorporateOuterSubstitution accumSubst + accumSubst = maker.substitution + } + treeMakers + } + + // calls propagateSubstitution on the treemakers + def analyzeCases(prevBinder: Symbol, cases: List[(List[TreeMaker], Tree)], pt: Type): List[(List[TreeMaker], Tree)] = { + cases foreach { case (pats, _) => propagateSubstitution(pats) } + doCSE(prevBinder, cases, pt) + } + + def combineCases(scrut: Tree, scrutSym: Symbol, cases0: List[(List[TreeMaker], Tree)], pt: Type): Tree = { + var toHoist = List[Tree]() val matcher = - if (cases nonEmpty) { + if (cases0 nonEmpty) { // when specified, need to propagate pt explicitly (type inferencer can't handle it) val optPt = if (isFullyDefined(pt)) appliedType(matchingMonadType, List(pt)) else NoType + val cases = analyzeCases(scrutSym, cases0, pt) + // map + foldLeft var combinedCases = combineExtractors(cases.head._1, cases.head._2) cases.tail foreach { case (pats, body) => combinedCases = pmgen.typedOrElse(optPt)(combinedCases, combineExtractors(pats, body)) } + toHoist = (for ((treeMakers, _) <- cases; tm <- treeMakers; hoisted <- tm.treesToHoist) yield hoisted).toList + pmgen.fun(scrutSym, combinedCases) } else pmgen.zero - pmgen.runOrElse(scrut, matcher, scrutSym.info, if (isFullyDefined(pt)) pt else NoType) + + val expr = pmgen.runOrElse(scrut, matcher, scrutSym.info, if (isFullyDefined(pt)) pt else NoType) + if (toHoist isEmpty) expr + else Block(toHoist, expr) } + // combineExtractors changes the current substitution's of the tree makers in `treeMakers` + // requires propagateSubstitution(treeMakers) has been called + def combineExtractors(treeMakers: List[TreeMaker], body: Tree): Tree = + treeMakers.foldRight (body) (_ chainBefore _) + + object Substitution { def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to)) // requires sameLength(from, to) @@ -833,11 +1121,14 @@ defined class Foo */ class Substitution(val from: List[Symbol], val to: List[Tree]) { def apply(tree: Tree): Tree = typedSubst(tree, from, to) + + // the substitution that chains `other` before `this` substitution // forall t: Tree. this(other(t)) == (this >> other)(t) def >>(other: Substitution): Substitution = { val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) } new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly } + override def toString = (from zip to) mkString("Substitution(", ", ", ")") } object EmptySubstitution extends Substitution(Nil, Nil) { @@ -845,6 +1136,8 @@ defined class Foo */ override def >>(other: Substitution): Substitution = other } + + def matchingMonadType: Type def typedSubst(tree: Tree, from: List[Symbol], to: List[Tree]): Tree def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x"): Symbol @@ -855,6 +1148,7 @@ defined class Foo */ trait AbsCodeGen { import CODE.UNIT def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type): Tree def flatMap(a: Tree, b: Tree): Tree + def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree def fun(arg: Symbol, body: Tree): Tree def or(f: Tree, as: List[Tree]): Tree def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree @@ -868,6 +1162,8 @@ defined class Foo */ def condOptimized(c: Tree, then: Tree): Tree def condCast(c: Tree, binder: Symbol, expectedTp: Type): Tree def _equals(checker: Tree, binder: Symbol): Tree + def _asInstanceOf(b: Symbol, tp: Type): Tree + def mkZero(tp: Type): Tree } def pmgen: AbsCodeGen @@ -967,6 +1263,22 @@ defined class Foo */ if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX) else gen.mkAsInstanceOf(REF(b), tpX, true, false) } + + // duplicated out of frustration with cast generation + def mkZero(tp: Type): Tree = { + tp.typeSymbol match { + case UnitClass => Literal(Constant()) + case BooleanClass => Literal(Constant(false)) + case FloatClass => Literal(Constant(0.0f)) + case DoubleClass => Literal(Constant(0.0d)) + case ByteClass => Literal(Constant(0.toByte)) + case ShortClass => Literal(Constant(0.toShort)) + case IntClass => Literal(Constant(0)) + case LongClass => Literal(Constant(0L)) + case CharClass => Literal(Constant(0.toChar)) + case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here + } + } } trait MatchingStrategyGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen => @@ -989,6 +1301,8 @@ defined class Foo */ trait MonadInstGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen => // methods in the monad instance -- used directly in translation def flatMap(a: Tree, b: Tree): Tree = (a DOT vpmName.flatMap)(b) + def flatMapCond(condi: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = + flatMap(cond(condi, res, nextBinderTp), fun(nextBinder, next)) def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (genTypeApply(thisCase DOT vpmName.orElse, pt)) APPLY (elseCase) } @@ -1004,22 +1318,6 @@ defined class Foo */ override def zero: Tree = REF(zeroSym) override def one(res: Tree, tp: Type = NoType, oneName: Name = vpmName.one): Tree = Apply(genTypeApply(REF(SomeModule), tp), List(res)) - // duplicated out of frustration with cast generation - private def mkZero(tp: Type): Tree = { - tp.typeSymbol match { - case UnitClass => Literal(Constant()) - case BooleanClass => Literal(Constant(false)) - case FloatClass => Literal(Constant(0.0f)) - case DoubleClass => Literal(Constant(0.0d)) - case ByteClass => Literal(Constant(0.toByte)) - case ShortClass => Literal(Constant(0.toShort)) - case IntClass => Literal(Constant(0)) - case LongClass => Literal(Constant(0L)) - case CharClass => Literal(Constant(0.toChar)) - case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here - } - } - /** Inline runOrElse and get rid of Option allocations * @@ -1103,6 +1401,12 @@ defined class Foo */ case _ => println("huh?") (opt DOT vpmName.flatMap)(fun) } + + override def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = + IF (cond) THEN BLOCK( + VAL(nextBinder) === res, + next + ) ELSE zero } def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) @@ -1161,3 +1465,42 @@ defined class Foo */ // var okTree: Tree = null // } // private def c(t: Tree): Tree = noShadowedUntyped(t) + + // def approximateTreeMaker(tm: TreeMaker): List[Test] = tm match { + // case ExtractorTreeMaker(extractor, _, _) => HavocTest + // case FilteredExtractorTreeMaker(extractor, lenGuard, _, _) => HavocTest + // case ProductExtractorTreeMaker(testedBinder, lenGuard, _) => TopTest // TODO: (testedBinder ne null) and lenGuard + // + // // cond = typeTest(prevBinder, nextBinderTp) + // // res = pmgen._asInstanceOf(prevBinder, nextBinderTp) + // case TypeTestTreeMaker(testedBinder, pt, _) => + // + // // cond = typeAndEqualityTest(patBinder, pt) + // // res = pmgen._asInstanceOf(patBinder, nextBinderTp) + // case TypeAndEqualityTestTreeMaker(_, testedBinder, pt, _) => + // + // // cond = pmgen._equals(patTree, prevBinder) + // // res = CODE.REF(prevBinder) + // case EqualityTestTreeMaker(testedBinder, rhs, _) => + // + // case AlternativesTreeMaker(_, alts: *) => + // + // case GuardTreeMaker(guardTree) => + // } + + // // TODO: it's not exactly sound to represent an unapply-call by its symbol... also need to consider the prefix, like the outer-test (can this be captured as the path to this test?) + // type ExtractorRepr = Symbol + // + // // TODO: we're undoing tree-construction that we ourselves performed earlier -- how about not-doing so we don't have to undo? + // private def findBinderArgOfApply(extractor: Tree, unappSym: Symbol): Symbol = { + // class CollectTreeTraverser[T](pf: PartialFunction[Tree => T]) extends Traverser { + // val hits = new ListBuffer[T] + // override def traverse(t: Tree) { + // if (pf.isDefinedAt(t)) hits += pf(t) + // super.traverse(t) + // } + // } + // val trav = new CollectTreeTraverser{ case Apply(unapp, List(arg)) if unapp.symbol eq unappSym => arg.symbol} + // trav.traverse(extractor) + // trav.hits.headOption getOrElse NoSymbol + // } diff --git a/test/files/run/virtpatmat_opt_sharing.check b/test/files/run/virtpatmat_opt_sharing.check new file mode 100644 index 0000000000..d00491fd7e --- /dev/null +++ b/test/files/run/virtpatmat_opt_sharing.check @@ -0,0 +1 @@ +1 diff --git a/test/files/run/virtpatmat_opt_sharing.flags b/test/files/run/virtpatmat_opt_sharing.flags new file mode 100644 index 0000000000..9769db9257 --- /dev/null +++ b/test/files/run/virtpatmat_opt_sharing.flags @@ -0,0 +1 @@ + -Yvirtpatmat -Xexperimental diff --git a/test/files/run/virtpatmat_opt_sharing.scala b/test/files/run/virtpatmat_opt_sharing.scala new file mode 100644 index 0000000000..119e3050ea --- /dev/null +++ b/test/files/run/virtpatmat_opt_sharing.scala @@ -0,0 +1,10 @@ +object Test extends App { + virtMatch() + def virtMatch() = { + List(1, 3, 4, 7) match { + case 1 :: 3 :: 4 :: 5 :: x => println("nope") + case 1 :: 3 :: 4 :: 6 :: x => println("nope") + case 1 :: 3 :: 4 :: 7 :: x => println(1) + } + } +} \ No newline at end of file -- cgit v1.2.3 From d4182c7f1473c8c831644da1a473e829345ce5a4 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 15 Dec 2011 17:33:57 +0100 Subject: [vpm] emitting switches -- BodyTreeMaker 1) introduce BodyTreeMaker to get rid of special casing for body now each case is a list of TreeMakers rather than a pair of such a list and a tree needed to do this since emitting switches requires access to the untranslated body 2) emitting switches - alternatives are flattened: each alternative block ends with a jump to the next alternative (if there is one) - to avoid stack overflow in typedMatch: detect when translateMatch returns a Match the patch to uncurry would be nicer with an extractor, but that breaks due to a bug in old patmat made trees into dags again -- NPE in erasure tree.duplicate seems to break lambdalift because it does not give fresh symbols (or trees?) to the valdefs for the arguments of duplicated functions duplicate enclosing tree, not subtrees improved propagateSubstitution for AlternativesTreeMaker - it now propagates to all its alternatives, so we don't have to do that in chainBefore - by making propagation more regular, a bug in substitution in AlternativesTreeMaker manifested itself it introduced a new binder, unnecessarily, which then was unbound -- now reusing binder of outer pattern having removeSubstOnly in propagateSubstitution unveiled a bug: guard treemaker should substitute move fixerUpper closer to what it fixes up --- .../scala/tools/nsc/backend/icode/GenICode.scala | 1 + .../scala/tools/nsc/transform/UnCurry.scala | 116 +++++--- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 330 ++++++++++++++------- .../scala/tools/nsc/typechecker/Typers.scala | 16 +- test/files/pos/virtpatmat_alts_subst.flags | 1 + test/files/pos/virtpatmat_alts_subst.scala | 6 + test/files/pos/virtpatmat_binding_opt.flags | 1 + test/files/pos/virtpatmat_binding_opt.scala | 11 + test/files/run/virtpatmat_literal.scala | 3 +- 9 files changed, 319 insertions(+), 166 deletions(-) create mode 100644 test/files/pos/virtpatmat_alts_subst.flags create mode 100644 test/files/pos/virtpatmat_alts_subst.scala create mode 100644 test/files/pos/virtpatmat_binding_opt.flags create mode 100644 test/files/pos/virtpatmat_binding_opt.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index e26a0d59e8..3f0a0fac1a 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1078,6 +1078,7 @@ abstract class GenICode extends SubComponent { } caseCtx = genLoad(body, tmpCtx, generatedType) + // close the block unless it's already been closed by the body, which closes the block if it ends in a jump (which is emitted to have alternatives share their body) caseCtx.bb.closeWith(JUMP(afterCtx.bb) setPos caze.pos) } ctx1.bb.emitOnly( diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 6ac28f2fe3..90f46206c5 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -290,38 +290,70 @@ abstract class UnCurry extends InfoTransform val idparam = m.paramss.head.head val substParam = new TreeSymSubstituter(List(vparam), List(idparam)) def substTree[T <: Tree](t: T): T = substParam(resetLocalAttrs(t)) - object VirtPatmatOpt { - object Last { - def unapply[T](xs: List[T]) = xs.lastOption - } - // keep this in synch by what's generated by combineCases/runOrElse - object MatcherBlock { - def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree])] = matcher match { // TODO: BUG the unapplySeq version of the case below does not seem to work in virtpatmat?? - case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) => Some(zero, x, matchRes, keepGoing, stats) - case _ => None + + // waiting here until we can mix case classes and extractors reliably (i.e., when virtpatmat becomes the default) + // object VirtPatmatOpt { + // object Last { + // def unapply[T](xs: List[T]) = xs.lastOption + // } + // // keep this in synch by what's generated by combineCases/runOrElse + // object MatcherBlock { + // def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree])] = matcher match { // TODO: BUG the unapplySeq version of the case below does not seem to work in virtpatmat?? + // case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) => Some(zero, x, matchRes, keepGoing, stats) + // case _ => None + // } + // } + // // TODO: virtpatmat use case: would be nice if could abstract over the repeated pattern more easily + // // case Block(Last(P)) => + // // case P => + // def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree], Tree => Tree)] = matcher match { + // case MatcherBlock(zero, x, matchRes, keepGoing, stats) => Some(zero, x, matchRes, keepGoing, stats, identity[Tree]) + // case Block(outerStats, MatcherBlock(zero, x, matchRes, keepGoing, stats)) => Some(zero, x, matchRes, keepGoing, stats, inner => Block(outerStats, inner)) + // case b => treeBrowser browse b; None + // } + // } + + // TODO: optimize duplication, but make sure ValDef's introduced by wrap are treated correctly + def dupMatch(selector: Tree, cases: List[CaseDef], wrap: Match => Tree = identity) = { + def transformCase(cdef: CaseDef): CaseDef = + CaseDef(cdef.pat, cdef.guard, Literal(Constant(true))) + def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) + + gen.mkUncheckedMatch( + if (cases exists treeInfo.isDefaultCase) Literal(Constant(true)) + else substTree(wrap(Match(selector, (cases map transformCase) :+ defaultCase)).duplicate) + ) + } + + def dupVirtMatch(zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], wrap: Block => Tree = identity) = { + object dropMatchResAssign extends Transformer { + // override val treeCopy = newStrictTreeCopier // will duplicate below + override def transform(tree: Tree): Tree = tree match { + // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing + case Block(List(matchRes, ass@Assign(keepGoingLhs, falseLit)), zero) if keepGoingLhs.symbol eq keepGoing.symbol => + Block(List(ass), zero) + case _ => + super.transform(tree) } } - // TODO: virtpatmat use case: would be nice if could abstract over the repeated pattern more easily - // case Block(Last(P)) => - // case P => - def unapply(matcher: Tree): Option[(ValDef, ValDef, ValDef, ValDef, List[Tree], Tree => Tree)] = matcher match { - case MatcherBlock(zero, x, matchRes, keepGoing, stats) => Some(zero, x, matchRes, keepGoing, stats, identity[Tree]) - case Block(outerStats, MatcherBlock(zero, x, matchRes, keepGoing, stats)) => Some(zero, x, matchRes, keepGoing, stats, inner => Block(outerStats, inner)) - case b => treeBrowser browse b; None - } + val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList + val idaBlock = wrap(Block( + zero :: + x :: + /* drop matchRes def */ + keepGoing :: + statsNoMatchRes, + NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `!keepGoing` + )) + substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed } + DefDef(m, (fun.body: @unchecked) match { case Match(selector, cases) => - def transformCase(cdef: CaseDef): CaseDef = - substTree(CaseDef(cdef.pat.duplicate, cdef.guard.duplicate, Literal(Constant(true)))) - def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) - - gen.mkUncheckedMatch( - if (cases exists treeInfo.isDefaultCase) Literal(Constant(true)) - else Match(substTree(selector.duplicate), (cases map transformCase) :+ defaultCase) - ) - // TODO: find a better way to keep this in synch with the code generard by patmatvirtualizer - // TODO: check tgt.tpe.typeSymbol isNonBottomSubclass MatchingStrategyClass + dupMatch(selector, cases) + case Block((vd: ValDef) :: Nil, Match(selector, cases)) => // can't factor this out using an extractor due to bugs in the old pattern matcher + dupMatch(selector, cases, m => Block(List(vd), m)) + // virtpatmat -- TODO: find a better way to keep this in synch with the code generated by patmatvirtualizer case Apply(Apply(TypeApply(Select(tgt, nme.runOrElse), targs), args_scrut), args_pm) if opt.virtPatmat => object noOne extends Transformer { override val treeCopy = newStrictTreeCopier // must duplicate everything @@ -336,28 +368,13 @@ abstract class UnCurry extends InfoTransform } } substTree(Apply(Apply(TypeApply(Select(tgt.duplicate, tgt.tpe.member("isSuccess".toTermName)), targs map (_.duplicate)), args_scrut map (_.duplicate)), args_pm map (noOne.transform))) - // for no-option version of virtpatmat - case VirtPatmatOpt(zero, x, matchRes, keepGoing, stats, addOuter) if opt.virtPatmat => import CODE._ - object dropMatchResAssign extends Transformer { - // override val treeCopy = newStrictTreeCopier // will duplicate below - override def transform(tree: Tree): Tree = tree match { - // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing - case Block(List(matchRes, ass@Assign(keepGoingLhs, falseLit)), zero) if keepGoingLhs.symbol eq keepGoing.symbol => - Block(List(ass), zero) - case _ => - super.transform(tree) - } - } - val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList - val idaBlock = addOuter(Block( - zero :: - x :: - /* drop matchRes def */ - keepGoing :: - statsNoMatchRes, - NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` by `!keepGoing` - )) - substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed + // for the optimized version of virtpatmat + case Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _) if opt.virtPatmat => + dupVirtMatch(zero, x, matchRes, keepGoing, stats) + case Block(outerStats, Block((zero: ValDef) :: (x: ValDef) :: (matchRes: ValDef) :: (keepGoing: ValDef) :: stats, _)) if opt.virtPatmat => // can't factor this out using an extractor due to bugs in the old pattern matcher + dupVirtMatch(zero, x, matchRes, keepGoing, stats, m => Block(outerStats, m)) + // case other => + // treeBrowser browse other }) } @@ -542,6 +559,7 @@ abstract class UnCurry extends InfoTransform } case ValDef(_, _, _, rhs) => val sym = tree.symbol + if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit) // a local variable that is mutable and free somewhere later should be lifted // as lambda lifting (coming later) will wrap 'rhs' in an Ref object. if (!sym.owner.isSourceMethod) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 17f2d4f96c..ff59cb15f1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -90,7 +90,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => val scrutSym = freshSym(scrut.pos, scrutType) val okPt = repeatedToSeq(pt) // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental - fixerUpper(context.owner, scrut.pos)(combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt)) + combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, context.owner) } @@ -123,7 +123,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => * */ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) => - (translatePattern(scrutSym, pattern) ++ translateGuard(guard), translateBody(body, pt)) + translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt) } def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = { @@ -274,11 +274,13 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account - def translateBody(body: Tree, matchPt: Type): Tree = atPos(body.pos)(pmgen.one(body, body.tpe, matchPt)) + def translateBody(body: Tree, matchPt: Type): TreeMaker = + BodyTreeMaker(body, matchPt) /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// object ExtractorCall { def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args) @@ -643,7 +645,7 @@ defined class Foo */ } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// the making of the trees /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait TreeMakers { @@ -657,13 +659,12 @@ defined class Foo */ protected def localSubstitution: Substitution - private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): TreeMaker = { + private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = { if (currSub ne null) { println("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst)) Thread.dumpStack() } else currSub = outerSubst >> substitution - this } private[this] var currSub: Substitution = null @@ -672,6 +673,17 @@ defined class Foo */ def treesToHoist: List[Tree] = Nil } + case class TrivialTreeMaker(tree: Tree) extends TreeMaker { + val localSubstitution: Substitution = EmptySubstitution + def chainBefore(next: Tree, pt: Type): Tree = tree + } + + case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker { + val localSubstitution: Substitution = EmptySubstitution + def chainBefore(next: Tree, pt: Type): Tree = // assert(next eq EmptyTree) + atPos(body.pos)(substitution(pmgen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here + } + case class SubstOnlyTreeMaker(localSubstitution: Substitution) extends TreeMaker { def chainBefore(next: Tree, pt: Type): Tree = substitution(next) } @@ -680,7 +692,7 @@ defined class Foo */ val nextBinder: Symbol // for CSE (used iff optimizingCodeGen) - // TODO: factor this out into a separate TreeMaker that gets created when reuse is detected -- don't mutate treemakers + // TODO: factor this out -- don't mutate treemakers var reused: Boolean = false def reusedBinders: List[Symbol] = Nil override def treesToHoist: List[Tree] = { import CODE._ @@ -696,7 +708,7 @@ defined class Foo */ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder))) } - // TODO: in the process of shifting optimized code gen into the treemakers: complete and make it conditional in the same way as is happening in pmgen + // TODO: factor out optimization-specific stuff into codegen abstract class CondTreeMaker extends FreshFunTreeMaker { import CODE._ val cond: Tree val res: Tree @@ -796,39 +808,48 @@ defined class Foo */ override def toString = "ET"+(prevBinder, patTree) } - case class AlternativesTreeMaker(prevBinder: Symbol, altss: List[List[TreeMaker]], pos: Position) extends FreshFunTreeMaker { - val nextBinderTp: Type = prevBinder.info.widen - private def inlineNext(next: Tree) = { - var okToInline = true - var sizeBudget = 20 // yep, totally arbitrary! - object travOkToInline extends Traverser { override def traverse(tree: Tree): Unit = if (sizeBudget >= 0) { sizeBudget -= 1; tree match { - case TypeApply(_, _) | Apply(_, _) | Select(_, _) - | Block(_, _) | Assign(_, _) | If(_, _, _) | Typed(_, _) => super.traverse(tree) // these are allowed if their subtrees are - case EmptyTree | This(_) | New(_) | Literal(_) | Ident(_) => // these are always ok - case _ if tree.isType => // these are always ok - case _ => okToInline = false //; println("not inlining: "+ (tree, tree.getClass)) - }}} - travOkToInline.traverse(next) - // println("(okToInline, sizeBudget): "+ (okToInline, sizeBudget)) - okToInline && sizeBudget > 0 // must be strict comparison + case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker { + // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one + val localSubstitution: Substitution = EmptySubstitution + + override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = { + super.incorporateOuterSubstitution(outerSubst) + altss = altss map (alts => propagateSubstitution(alts, substitution)) } + def chainBefore(next: Tree, pt: Type): Tree = { import CODE._ + // next does not contain deftrees, is pretty short + val canDuplicate = { + var okToInline = true + var sizeBudget = 100 / (altss.length max 1) // yep, totally arbitrary! + object travOkToInline extends Traverser { override def traverse(tree: Tree): Unit = if (sizeBudget >= 0) { sizeBudget -= 1; tree match { + case TypeApply(_, _) | Apply(_, _) | Select(_, _) + | Block(_, _) | Assign(_, _) | If(_, _, _) | Typed(_, _) => super.traverse(tree) // these are allowed if their subtrees are + case EmptyTree | This(_) | New(_) | Literal(_) | Ident(_) => // these are always ok + case _ if tree.isType => // these are always ok + case _ => okToInline = false //; println("not inlining: "+ (tree, tree.getClass)) + }}} + travOkToInline.traverse(next) + // println("(okToInline, sizeBudget): "+ (okToInline, sizeBudget)) + okToInline && sizeBudget > 0 // must be strict comparison + } + atPos(pos)( - if (inlineNext(next)) { - altss map (altTreeMakers => - combineExtractors(propagateSubstitution(altTreeMakers), next.duplicate, pt) // don't substitute prevBinder to nextBinder, beta-reduce application to prevBinder - ) reduceLeft pmgen.typedOrElse(pt) + if (canDuplicate) { + altss map {altTreeMakers => + combineExtractors(altTreeMakers :+ TrivialTreeMaker(substitution(next).duplicate), pt) + } reduceLeft pmgen.typedOrElse(pt) } else { - val rest = freshSym(pos, functionType(List(nextBinderTp), inMatchMonad(pt)), "rest") + val rest = freshSym(pos, functionType(List(), inMatchMonad(pt)), "rest") // rest.info.member(nme.apply).withAnnotation(AnnotationInfo(ScalaInlineClass.tpe, Nil, Nil)) // one alternative may still generate multiple trees (e.g., an extractor call + equality test) // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers val combinedAlts = altss map (altTreeMakers => - combineExtractors(propagateSubstitution(altTreeMakers), REF(rest) APPLY (REF(prevBinder)), pt) + combineExtractors(altTreeMakers :+ TrivialTreeMaker(REF(rest) APPLY ()), pt) ) BLOCK( - VAL(rest) === pmgen.fun(nextBinder, substitution(next)), + VAL(rest) === Function(Nil, substitution(next)), combinedAlts reduceLeft pmgen.typedOrElse(pt) ) } @@ -838,7 +859,7 @@ defined class Foo */ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker { val localSubstitution: Substitution = EmptySubstitution - def chainBefore(next: Tree, pt: Type): Tree = pmgen.flatMapGuard(guardTree, next) + def chainBefore(next: Tree, pt: Type): Tree = pmgen.flatMapGuard(substitution(guardTree), next) override def toString = "G("+ guardTree +")" } @@ -947,7 +968,7 @@ defined class Foo */ * * intended to be generalised to exhaustivity/reachability checking */ - def doCSE(prevBinder: Symbol, cases: List[(List[TreeMaker], Tree)], pt: Type): List[(List[TreeMaker], Tree)] = { + def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = { // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively) val pointsToBound = collection.mutable.HashSet(prevBinder) @@ -1013,10 +1034,11 @@ defined class Foo */ | ProductExtractorTreeMaker(_, Some(_), _) => Havoc case AlternativesTreeMaker(_, _, _) => Havoc // TODO: can do better here case SubstOnlyTreeMaker(_) => Top + case BodyTreeMaker(_, _) => Havoc }, tm) } - val testss = cases.map {case (treeMakers, _) => treeMakers map approximateTreeMaker } + val testss = cases.map { _ map approximateTreeMaker } // interpret: val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]] @@ -1047,7 +1069,7 @@ defined class Foo */ // then, collapse these contiguous sequences of reusing tests // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used) // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable - (testss, cases).zipped map { case (tests, (_, caseBody)) => + testss map { tests => var currDeps = Set[Cond]() val (sharedPrefix, suffix) = tests span { test => (test.cond eq Top) || (for( @@ -1067,63 +1089,186 @@ defined class Foo */ yield ReusingCondTreeMaker(sharedPrefix map (t => (t.treeMaker, t.reuses map (_.treeMaker)))) :: suffix.map(_.treeMaker) } else None - (collapsedTreeMakers getOrElse tests.map(_.treeMaker), // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above) - caseBody) + collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above) } } + // TODO: non-trivial dead-code elimination + // e.g., the following match should compile to a simple instanceof: + // case class Ident(name: String) + // for (Ident(name) <- ts) println(name) + def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = { + // do minimal DCE + cases + } - // a foldLeft to accumulate the localSubstitution left-to-right, mutating the treemakers in-place for performance - def propagateSubstitution(treeMakers: List[TreeMaker]): List[TreeMaker] = { - var accumSubst: Substitution = EmptySubstitution + + def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) + + // a foldLeft to accumulate the localSubstitution left-to-right + // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution + def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { + var accumSubst: Substitution = initial treeMakers foreach { maker => maker incorporateOuterSubstitution accumSubst accumSubst = maker.substitution } - treeMakers + removeSubstOnly(treeMakers) } - // calls propagateSubstitution on the treemakers - def analyzeCases(prevBinder: Symbol, cases: List[(List[TreeMaker], Tree)], pt: Type): List[(List[TreeMaker], Tree)] = { - cases foreach { case (pats, _) => propagateSubstitution(pats) } - if (optimizingCodeGen) { - doCSE(prevBinder, cases, pt) - } else cases - } + object SwitchablePattern { def unapply(pat: Tree) = pat match { + case Literal(Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) => true // TODO: Java 7 allows strings in switches + case _ => false + }} + + // def isSwitchable(cases: List[(List[TreeMaker], Tree)]): Boolean = { + // def isSwitchableTreeMaker(tm: TreeMaker) = tm match { + // case tm@EqualityTestTreeMaker(_, SwitchablePattern(), _) => true + // case SubstOnlyTreeMaker(_) => true + // case AlternativesTreeMaker(_, altss, _) => altss forall (_.forall(isSwitchableTreeMaker)) + // case _ => false + // } + // } + + def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = if (optimizingCodeGen) { + def unfold(tms: List[TreeMaker], currLabel: Option[Symbol] = None, nextLabel: Option[Symbol] = None): List[CaseDef] = tms match { + // constant + case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil => import CODE._ + @inline + def substedBody = btm.substitution(body) + val labelledBody = currLabel match { + case None => substedBody // currLabel.isEmpty implies nextLabel.isEmpty + case Some(myLabel) => + LabelDef(myLabel, Nil, + nextLabel match { + case None => substedBody + case Some(next) => ID(next) APPLY () + } + ) + } + List(CaseDef(const, EmptyTree, labelledBody)) + + // alternatives + case AlternativesTreeMaker(_, altss, _) :: bodyTm :: Nil => // assert(currLabel.isEmpty && nextLabel.isEmpty) + val labels = altss map { alts => + Some(freshSym(NoPosition, MethodType(Nil, pt), "$alt$") setFlag (METHOD | LABEL)) + } - def combineCases(scrut: Tree, scrutSym: Symbol, cases0: List[(List[TreeMaker], Tree)], pt: Type): Tree = { - var toHoist = List[Tree]() - val matcher = - if (cases0 nonEmpty) { - // when specified, need to propagate pt explicitly (type inferencer can't handle it) - val optPt = - if (isFullyDefined(pt)) inMatchMonad(pt) - else NoType - - val cases = analyzeCases(scrutSym, cases0, pt) - - // map + foldLeft - var combinedCases = combineExtractors(cases.head._1, cases.head._2, pt) - cases.tail foreach { case (pats, body) => - combinedCases = pmgen.typedOrElse(optPt)(combinedCases, combineExtractors(pats, body, pt)) + val caseDefs = (altss, labels, labels.tail :+ None).zipped.map { case (alts, currLabel, nextLabel) => + unfold(alts :+ bodyTm, currLabel, nextLabel) } - toHoist = (for ((treeMakers, _) <- cases; tm <- treeMakers; hoisted <- tm.treesToHoist) yield hoisted).toList + if (caseDefs exists (_.isEmpty)) Nil + else caseDefs.flatten + + case _ => Nil // failure + } + + val caseDefs = cases map { makers => + removeSubstOnly(makers) match { + // default case (don't move this to unfold, as it may only occur on the top level, not as an alternative -- well, except in degenerate matches) + case (btm@BodyTreeMaker(body, _)) :: Nil => + List(CaseDef(Ident(nme.WILDCARD), EmptyTree, btm.substitution(body))) + case nonTrivialMakers => + unfold(nonTrivialMakers) + } + } - pmgen.fun(scrutSym, combinedCases) - } else pmgen.zero + if (caseDefs exists (_.isEmpty)) None + else { import CODE._ + val matcher = BLOCK( + VAL(scrutSym) === scrut, // TODO: type test for switchable type if patterns allow switch but the scrutinee doesn't + Match(REF(scrutSym), caseDefs.flatten) // match on scrutSym, not scrut to avoid duplicating scrut + ) + // matcher filter (tree => tree.tpe == null) foreach println + // treeBrowser browse matcher + Some(matcher) // set type to avoid recursion in typedMatch + } + } else None - val expr = pmgen.runOrElse(scrut, matcher, scrutSym.info, if (isFullyDefined(pt)) pt else NoType) - if (toHoist isEmpty) expr - else Block(toHoist, expr) + def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = + doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt) + + // calls propagateSubstitution on the treemakers + def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){ + val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them + + emitSwitch(scrut, scrutSym, casesUnOpt, pt).getOrElse{ + var toHoist = List[Tree]() + val matcher = + if (casesUnOpt nonEmpty) { + // when specified, need to propagate pt explicitly (type inferencer can't handle it) + val optPt = + if (isFullyDefined(pt)) inMatchMonad(pt) + else NoType + + val cases = + if (optimizingCodeGen) optimizeCases(scrutSym, casesUnOpt, pt) + else casesUnOpt + + val combinedCases = + cases.map(combineExtractors(_, pt)).reduceLeft(pmgen.typedOrElse(optPt)) + + toHoist = (for (treeMakers <- cases; tm <- treeMakers; hoisted <- tm.treesToHoist) yield hoisted).toList + + pmgen.fun(scrutSym, combinedCases) + } else pmgen.zero + + + val expr = pmgen.runOrElse(scrut, matcher, scrutSym.info, if (isFullyDefined(pt)) pt else NoType) + if (toHoist isEmpty) expr + else Block(toHoist, expr) + } } // combineExtractors changes the current substitution's of the tree makers in `treeMakers` // requires propagateSubstitution(treeMakers) has been called - def combineExtractors(treeMakers: List[TreeMaker], body: Tree, pt: Type): Tree = - treeMakers.foldRight (body) (_.chainBefore(_, pt)) + def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree = + treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt)) + + + + // TODO: do this during tree construction, but that will require tracking the current owner in treemakers + // TODO: assign more fine-grained positions + // fixes symbol nesting, assigns positions + private def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser { + currentOwner = origOwner + + override def traverse(t: Tree) { + if (t != EmptyTree && t.pos == NoPosition) { + t.setPos(pos) + } + t match { + case Function(_, _) if t.symbol == NoSymbol => + t.symbol = currentOwner.newValue(t.pos, nme.ANON_FUN_NAME).setFlag(SYNTHETIC).setInfo(NoType) + // println("new symbol for "+ (t, t.symbol.ownerChain)) + case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) => + // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain)) + t.symbol.owner = currentOwner + case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2) + // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain)) + if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree?? + assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner) + d.symbol.lazyAccessor.owner = currentOwner + } + if(d.symbol.moduleClass ne NoSymbol) + d.symbol.moduleClass.owner = currentOwner + d.symbol.owner = currentOwner + // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) => + // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)) + case _ => + } + super.traverse(t) + } + + // override def apply + // println("before fixerupper: "+ xTree) + // currentRun.trackerFactory.snapshot() + // println("after fixerupper") + // currentRun.trackerFactory.snapshot() + } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // substitution @@ -1169,6 +1314,7 @@ defined class Foo */ def fun(arg: Symbol, body: Tree): Tree def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree def zero: Tree + def one(res: Tree, bodyPt: Type, matchPt: Type): Tree def condOptimized(c: Tree, then: Tree): Tree def _equals(checker: Tree, binder: Symbol): Tree def _asInstanceOf(b: Symbol, tp: Type): Tree @@ -1176,6 +1322,7 @@ defined class Foo */ } def pmgen: AbsCodeGen + def typed(tree: Tree, mode: Int, pt: Type): Tree // implemented in MatchTranslator } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -1391,49 +1538,6 @@ defined class Foo */ } def matchingStrategy: Tree - def typed(tree: Tree, mode: Int, pt: Type): Tree // implemented in MatchTranslator - } - - - // TODO: do this during tree construction, but that will require tracking the current owner in treemakers - // TODO: assign more fine-grained positions - // fixes symbol nesting, assigns positions - def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser { - currentOwner = origOwner - - override def traverse(t: Tree) { - if (t != EmptyTree && t.pos == NoPosition) { - t.setPos(pos) - } - t match { - case Function(_, _) if t.symbol == NoSymbol => - t.symbol = currentOwner.newValue(t.pos, nme.ANON_FUN_NAME).setFlag(SYNTHETIC).setInfo(NoType) - // println("new symbol for "+ (t, t.symbol.ownerChain)) - case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) => - // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain)) - t.symbol.owner = currentOwner - case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2) - // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain)) - if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree?? - assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner) - d.symbol.lazyAccessor.owner = currentOwner - } - if(d.symbol.moduleClass ne NoSymbol) - d.symbol.moduleClass.owner = currentOwner - - d.symbol.owner = currentOwner - // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) => - // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)) - case _ => - } - super.traverse(t) - } - - // override def apply - // println("before fixerupper: "+ xTree) - // currentRun.trackerFactory.snapshot() - // println("after fixerupper") - // currentRun.trackerFactory.snapshot() } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 341e1bc5ea..f6f783516c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3223,9 +3223,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val owntype = elimAnonymousClass(owntype0) if (needAdapt) cases1 = cases1 map (adaptCase(_, owntype)) - val translated = (new MatchTranslator(this)).translateMatch(selector1, cases1, owntype) - - typed1(translated, mode, WildcardType) setType owntype // TODO: get rid of setType owntype -- it should all typecheck + (new MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match { + case Block(vd :: Nil, tree@Match(selector, cases)) => + val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType)) + var cases1 = typedCases(tree, cases, packCaptured(selector1.tpe.widen), pt) + val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe)) + if (needAdapt) + cases1 = cases1 map (adaptCase(_, owntype)) + typed(Block(vd :: Nil, treeCopy.Match(tree, selector1, cases1) setType owntype)) + case translated => + // TODO: get rid of setType owntype -- it should all typecheck + // must call typed, not typed1, or we overflow the stack when emitting switches + typed(translated, mode, WildcardType) setType owntype + } } } } diff --git a/test/files/pos/virtpatmat_alts_subst.flags b/test/files/pos/virtpatmat_alts_subst.flags new file mode 100644 index 0000000000..9769db9257 --- /dev/null +++ b/test/files/pos/virtpatmat_alts_subst.flags @@ -0,0 +1 @@ + -Yvirtpatmat -Xexperimental diff --git a/test/files/pos/virtpatmat_alts_subst.scala b/test/files/pos/virtpatmat_alts_subst.scala new file mode 100644 index 0000000000..e27c52f9c7 --- /dev/null +++ b/test/files/pos/virtpatmat_alts_subst.scala @@ -0,0 +1,6 @@ +case class Foo(s: String) { + def appliedType(tycon: Any) = + tycon match { + case Foo(sym @ ("NothingClass" | "AnyClass")) => println(sym) + } +} diff --git a/test/files/pos/virtpatmat_binding_opt.flags b/test/files/pos/virtpatmat_binding_opt.flags new file mode 100644 index 0000000000..9769db9257 --- /dev/null +++ b/test/files/pos/virtpatmat_binding_opt.flags @@ -0,0 +1 @@ + -Yvirtpatmat -Xexperimental diff --git a/test/files/pos/virtpatmat_binding_opt.scala b/test/files/pos/virtpatmat_binding_opt.scala new file mode 100644 index 0000000000..962e3d7dbe --- /dev/null +++ b/test/files/pos/virtpatmat_binding_opt.scala @@ -0,0 +1,11 @@ +class Test { + def combine = this match { + case that if that eq this => this // just return this + case that: Test2 => + println(that) + this + case _ => error("meh") + } +} + +class Test2 extends Test \ No newline at end of file diff --git a/test/files/run/virtpatmat_literal.scala b/test/files/run/virtpatmat_literal.scala index cb72b1d2a5..5bd6b30791 100644 --- a/test/files/run/virtpatmat_literal.scala +++ b/test/files/run/virtpatmat_literal.scala @@ -1,8 +1,9 @@ object Test extends App { + val a = 1 1 match { case 2 => println("FAILED") case 1 => println("OK") - case 1 => println("FAILED") + case `a` => println("FAILED") } val one = 1 -- cgit v1.2.3 From 460bbc1276fb4ba83b9bcbdc7f7ba475b352b7c6 Mon Sep 17 00:00:00 2001 From: Szabolcs Berecz Date: Sun, 25 Dec 2011 01:37:02 +0100 Subject: fixes #5104 and related NaN ordering inconsistencies The bug was caused by the inconsistency between j.l.Math.min() and j.l.Double.compareTo() wrt NaN (j.l.Math.min() considers NaN to be less than any other value while j.l.Double.compareTo() says it's greater...) The fix changes Ordering.{FloatOrdering,DoubleOrdering) to base it's results on primitive comparisons and math.{min,max} instead of j.l.{Float,Double}.compareTo() --- src/library/scala/math/Ordering.scala | 40 ++++++++++ test/files/scalacheck/nan-ordering.scala | 130 +++++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+) create mode 100644 test/files/scalacheck/nan-ordering.scala (limited to 'test/files') diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index d007ae3780..8fc74a9d5d 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -262,12 +262,52 @@ object Ordering extends LowPriorityOrderingImplicits { implicit object Long extends LongOrdering trait FloatOrdering extends Ordering[Float] { + outer => + def compare(x: Float, y: Float) = java.lang.Float.compare(x, y) + + override def lteq(x: Float, y: Float): Boolean = x <= y + override def gteq(x: Float, y: Float): Boolean = x >= y + override def lt(x: Float, y: Float): Boolean = x < y + override def gt(x: Float, y: Float): Boolean = x > y + override def equiv(x: Float, y: Float): Boolean = x == y + override def max(x: Float, y: Float): Float = math.max(x, y) + override def min(x: Float, y: Float): Float = math.min(x, y) + + override def reverse: Ordering[Float] = new FloatOrdering { + override def reverse = outer + override def compare(x: Float, y: Float) = outer.compare(y, x) + + override def lteq(x: Float, y: Float): Boolean = outer.lteq(y, x) + override def gteq(x: Float, y: Float): Boolean = outer.gteq(y, x) + override def lt(x: Float, y: Float): Boolean = outer.lt(y, x) + override def gt(x: Float, y: Float): Boolean = outer.gt(y, x) + } } implicit object Float extends FloatOrdering trait DoubleOrdering extends Ordering[Double] { + outer => + def compare(x: Double, y: Double) = java.lang.Double.compare(x, y) + + override def lteq(x: Double, y: Double): Boolean = x <= y + override def gteq(x: Double, y: Double): Boolean = x >= y + override def lt(x: Double, y: Double): Boolean = x < y + override def gt(x: Double, y: Double): Boolean = x > y + override def equiv(x: Double, y: Double): Boolean = x == y + override def max(x: Double, y: Double): Double = math.max(x, y) + override def min(x: Double, y: Double): Double = math.min(x, y) + + override def reverse: Ordering[Double] = new DoubleOrdering { + override def reverse = outer + override def compare(x: Double, y: Double) = outer.compare(y, x) + + override def lteq(x: Double, y: Double): Boolean = outer.lteq(y, x) + override def gteq(x: Double, y: Double): Boolean = outer.gteq(y, x) + override def lt(x: Double, y: Double): Boolean = outer.lt(y, x) + override def gt(x: Double, y: Double): Boolean = outer.gt(y, x) + } } implicit object Double extends DoubleOrdering diff --git a/test/files/scalacheck/nan-ordering.scala b/test/files/scalacheck/nan-ordering.scala new file mode 100644 index 0000000000..2094a46e37 --- /dev/null +++ b/test/files/scalacheck/nan-ordering.scala @@ -0,0 +1,130 @@ +import org.scalacheck._ +import Gen._ +import Prop._ + +object Test extends Properties("NaN-Ordering") { + + val specFloats: Gen[Float] = oneOf( + Float.MaxValue, + Float.MinPositiveValue, + Float.MinValue, + Float.NaN, + Float.NegativeInfinity, + Float.PositiveInfinity, + -0.0f, + +0.0f + ) + + property("Float min") = forAll(specFloats, specFloats) { (d1, d2) => { + val mathmin = math.min(d1, d2) + val numericmin = d1 min d2 + mathmin == numericmin || mathmin.isNaN && numericmin.isNaN + } + } + + property("Float max") = forAll(specFloats, specFloats) { (d1, d2) => { + val mathmax = math.max(d1, d2) + val numericmax = d1 max d2 + mathmax == numericmax || mathmax.isNaN && numericmax.isNaN + } + } + + val numFloat = implicitly[Numeric[Float]] + + property("Float lt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.lt(d1, d2) == d1 < d2 } + + property("Float lteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.lteq(d1, d2) == d1 <= d2 } + + property("Float gt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.gt(d1, d2) == d1 > d2 } + + property("Float gteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.gteq(d1, d2) == d1 >= d2 } + + property("Float equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.equiv(d1, d2) == (d1 == d2) } + + property("Float reverse.min") = forAll(specFloats, specFloats) { (d1, d2) => { + val mathmin = math.min(d1, d2) + val numericmin = numFloat.reverse.min(d1, d2) + mathmin == numericmin || mathmin.isNaN && numericmin.isNaN + } + } + + property("Float reverse.max") = forAll(specFloats, specFloats) { (d1, d2) => { + val mathmax = math.max(d1, d2) + val numericmax = numFloat.reverse.max(d1, d2) + mathmax == numericmax || mathmax.isNaN && numericmax.isNaN + } + } + + property("Float reverse.lt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.lt(d1, d2) == d2 < d1 } + + property("Float reverse.lteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.lteq(d1, d2) == d2 <= d1 } + + property("Float reverse.gt") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.gt(d1, d2) == d2 > d1 } + + property("Float reverse.gteq") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.gteq(d1, d2) == d2 >= d1 } + + property("Float reverse.equiv") = forAll(specFloats, specFloats) { (d1, d2) => numFloat.reverse.equiv(d1, d2) == (d1 == d2) } + + + val specDoubles: Gen[Double] = oneOf( + Double.MaxValue, + Double.MinPositiveValue, + Double.MinValue, + Double.NaN, + Double.NegativeInfinity, + Double.PositiveInfinity, + -0.0, + +0.0 + ) + + // ticket #5104 + property("Double min") = forAll(specDoubles, specDoubles) { (d1, d2) => { + val mathmin = math.min(d1, d2) + val numericmin = d1 min d2 + mathmin == numericmin || mathmin.isNaN && numericmin.isNaN + } + } + + property("Double max") = forAll(specDoubles, specDoubles) { (d1, d2) => { + val mathmax = math.max(d1, d2) + val numericmax = d1 max d2 + mathmax == numericmax || mathmax.isNaN && numericmax.isNaN + } + } + + val numDouble = implicitly[Numeric[Double]] + + property("Double lt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.lt(d1, d2) == d1 < d2 } + + property("Double lteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.lteq(d1, d2) == d1 <= d2 } + + property("Double gt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.gt(d1, d2) == d1 > d2 } + + property("Double gteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.gteq(d1, d2) == d1 >= d2 } + + property("Double equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.equiv(d1, d2) == (d1 == d2) } + + property("Double reverse.min") = forAll(specDoubles, specDoubles) { (d1, d2) => { + val mathmin = math.min(d1, d2) + val numericmin = numDouble.reverse.min(d1, d2) + mathmin == numericmin || mathmin.isNaN && numericmin.isNaN + } + } + + property("Double reverse.max") = forAll(specDoubles, specDoubles) { (d1, d2) => { + val mathmax = math.max(d1, d2) + val numericmax = numDouble.reverse.max(d1, d2) + mathmax == numericmax || mathmax.isNaN && numericmax.isNaN + } + } + + property("Double reverse.lt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.lt(d1, d2) == d2 < d1 } + + property("Double reverse.lteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.lteq(d1, d2) == d2 <= d1 } + + property("Double reverse.gt") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.gt(d1, d2) == d2 > d1 } + + property("Double reverse.gteq") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.gteq(d1, d2) == d2 >= d1 } + + property("Double reverse.equiv") = forAll(specDoubles, specDoubles) { (d1, d2) => numDouble.reverse.equiv(d1, d2) == (d1 == d2) } +} -- cgit v1.2.3 From f737e35ddf43599043ab78404c4f9a13e6d02c9b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 26 Dec 2011 06:07:04 -0800 Subject: Fixed regression in lub calculation. Changing NullaryMethodType to be a SimpleTypeProxy because nearly all its operations forward to its result type was it seems not such a good idea, because it also meant that calling .underlying returned the result type rather than the method type. The way this materialized was in subtype checks of refinement types. A lub is calculated for two nullary method types in the course of calculating a refinement, and then the input types are checked against the calculated lub. However in the lub refinement, the nullary method type has become a bare typeref, and so the subtype check failed. Closes SI-5317. This does give me confidence that all the malformed lubs one sees logged under -Ydebug (and there are still many, especially with type constructors) are alerting us to real bugs elsewhere in Types. --- src/compiler/scala/reflect/internal/Types.scala | 26 ++++++++++++++++--------- test/files/pos/t5317.scala | 12 ++++++++++++ 2 files changed, 29 insertions(+), 9 deletions(-) create mode 100644 test/files/pos/t5317.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 2db957410b..47184eee51 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -2153,15 +2153,23 @@ A type's typeSymbol should never be inspected directly. override def isJava = true } - case class NullaryMethodType(override val resultType: Type) extends SimpleTypeProxy { - override def underlying = resultType - override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations) - override def paramSectionCount = 0 - override def paramss = Nil - override def params = Nil - override def paramTypes = Nil - override def safeToString = "=> " + resultType - override def kind = "NullaryMethodType" + case class NullaryMethodType(override val resultType: Type) extends Type { + override def isTrivial = resultType.isTrivial && (resultType eq resultType.withoutAnnotations) + override def prefix: Type = resultType.prefix + override def narrow: Type = resultType.narrow + override def finalResultType: Type = resultType.finalResultType + override def termSymbol: Symbol = resultType.termSymbol + override def typeSymbol: Symbol = resultType.typeSymbol + override def parents: List[Type] = resultType.parents + override def decls: Scope = resultType.decls + override def baseTypeSeq: BaseTypeSeq = resultType.baseTypeSeq + override def baseTypeSeqDepth: Int = resultType.baseTypeSeqDepth + override def baseClasses: List[Symbol] = resultType.baseClasses + override def baseType(clazz: Symbol): Type = resultType.baseType(clazz) + override def boundSyms = resultType.boundSyms + override def isVolatile = resultType.isVolatile + override def safeToString: String = "=> "+ resultType + override def kind = "NullaryMethodType" } object NullaryMethodType extends NullaryMethodTypeExtractor diff --git a/test/files/pos/t5317.scala b/test/files/pos/t5317.scala new file mode 100644 index 0000000000..8c9c9d8222 --- /dev/null +++ b/test/files/pos/t5317.scala @@ -0,0 +1,12 @@ +object Test { + trait S { type T; val x: AnyRef } + trait A extends S { type T <: A; val x: A = null } + trait B extends S { type T <: B; val x: B = null } + + val a = new A{} + val b = new B{} + val y = if (true) a else b + + // lub of y should allow for this + println(y.x.x) +} -- cgit v1.2.3 From 935ba9ba3021b518dab8f22c1e5d897865777aab Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 27 Dec 2011 07:00:07 -0800 Subject: Consecutive type application. The parser through I think a quirk of history would not allow back to back type applications, like expr[T1, T2][T3, T4] Now it does, meaning the only thing it can: val n0 = Partial[immutable.HashMap][String][Int] ++ Seq(("a", 1)) val n1 = Partial.apply[immutable.HashMap].apply[String].apply[Int] ++ Seq(("a", 1)) assert(n0 == n1) --- .../scala/tools/nsc/ast/parser/Parsers.scala | 7 ++-- test/files/run/type-currying.check | 27 +++++++++++++ test/files/run/type-currying.scala | 45 ++++++++++++++++++++++ 3 files changed, 76 insertions(+), 3 deletions(-) create mode 100644 test/files/run/type-currying.check create mode 100644 test/files/run/type-currying.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index e27d5cacda..00ac3976a9 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1534,9 +1534,10 @@ self => val t1 = stripParens(t) t1 match { case Ident(_) | Select(_, _) => - val tapp = atPos(t1.pos.startOrPoint, in.offset) { - TypeApply(t1, exprTypeArgs()) - } + var tapp: Tree = t1 + while (in.token == LBRACKET) + tapp = atPos(tapp.pos.startOrPoint, in.offset)(TypeApply(tapp, exprTypeArgs())) + simpleExprRest(tapp, true) case _ => t1 diff --git a/test/files/run/type-currying.check b/test/files/run/type-currying.check new file mode 100644 index 0000000000..e5db238ca5 --- /dev/null +++ b/test/files/run/type-currying.check @@ -0,0 +1,27 @@ +Map(abc -> 55) +(a,0) +(b,1) +(c,2) +(d,3) +(e,4) +(f,5) +(g,6) +(h,7) +(i,8) +(j,9) +(k,10) +(l,11) +(m,12) +(n,13) +(o,14) +(p,15) +(q,16) +(r,17) +(s,18) +(t,19) +(u,20) +(v,21) +(w,22) +(x,23) +(y,24) +(z,25) diff --git a/test/files/run/type-currying.scala b/test/files/run/type-currying.scala new file mode 100644 index 0000000000..717e0763a3 --- /dev/null +++ b/test/files/run/type-currying.scala @@ -0,0 +1,45 @@ +import scala.collection.{ mutable, immutable, generic } +import generic.CanBuildFrom + +object Partial { + type KnownContainer[CC[K, V] <: collection.Map[K, V]] = { + def values[V] : KnownValues[CC, V] + def apply[K] : KnownKeys[CC, K] + } + type KnownKeys[CC[K, V] <: collection.Map[K, V], K] = { + def apply[V](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]): CC[K, V] + } + type KnownValues[CC[K, V] <: collection.Map[K, V], V] = { + def apply[K](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]): CC[K, V] + } + + def apply[CC[K, V] <: collection.Map[K, V]] : KnownContainer[CC] = new { + def values[V] : KnownValues[CC, V] = new { + def apply[K](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]) = cbf().result + } + def apply[K] = new { + def apply[V](implicit cbf: CanBuildFrom[_, (K, V), CC[K, V]]) = cbf().result + } + } +} + +object Test { + val m = Partial[immutable.TreeMap] + val m1 = m[String] + val m2 = m[Int][Int] + + val mutableBippy = Partial[mutable.HashMap][String][Int] + mutableBippy("abc") = 55 + + val immutableBippy = Partial[immutable.HashMap].values[Int] + def make[T](xs: T*) = immutableBippy[T] ++ xs.zipWithIndex + + val n0 = Partial[immutable.HashMap][String][Int] ++ Seq(("a", 1)) + val n1 = Partial.apply[immutable.HashMap].apply[String].apply[Int] ++ Seq(("a", 1)) + + def main(args: Array[String]): Unit = { + println(mutableBippy) + make('a' to 'z': _*).toList.sorted foreach println + assert(n0 == n1) + } +} -- cgit v1.2.3 From 33ab1a574af0d5f736ab73c5a18cc6a4cb36cbb0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 27 Dec 2011 22:28:41 -0800 Subject: Tone down insensible-equality warning. Closes SI-5175. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/neg/checksensible.check | 8 +------- test/files/pos/t5175.flags | 1 + test/files/pos/t5175.scala | 9 +++++++++ 4 files changed, 12 insertions(+), 8 deletions(-) create mode 100644 test/files/pos/t5175.flags create mode 100644 test/files/pos/t5175.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 98f1c96cad..f920f3c135 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1092,7 +1092,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R } // possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean - if (nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) { + if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) { if (actual isSubClass receiver) () else if (receiver isSubClass actual) () // warn only if they have no common supertype below Object diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check index d45d16165f..0881205bb4 100644 --- a/test/files/neg/checksensible.check +++ b/test/files/neg/checksensible.check @@ -28,12 +28,6 @@ checksensible.scala:27: error: comparing values of types Int and Unit using `==' checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false 1 == "abc" ^ -checksensible.scala:32: error: String and Int are unrelated: they will most likely never compare equal - "abc" == 1 // warns because the lub of String and Int is Any - ^ -checksensible.scala:33: error: Some[Int] and Int are unrelated: they will most likely never compare equal - Some(1) == 1 // as above - ^ checksensible.scala:38: error: comparing a fresh object using `==' will always yield false new AnyRef == 1 ^ @@ -100,4 +94,4 @@ checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true while ((c = in.read) != -1) ^ -34 errors found +32 errors found diff --git a/test/files/pos/t5175.flags b/test/files/pos/t5175.flags new file mode 100644 index 0000000000..e8fb65d50c --- /dev/null +++ b/test/files/pos/t5175.flags @@ -0,0 +1 @@ +-Xfatal-warnings \ No newline at end of file diff --git a/test/files/pos/t5175.scala b/test/files/pos/t5175.scala new file mode 100644 index 0000000000..e15cc3affd --- /dev/null +++ b/test/files/pos/t5175.scala @@ -0,0 +1,9 @@ +object Test { + def ==(p: Phase): Int = 0 + + def foo { + ==(new Phase()) + } +} + +class Phase -- cgit v1.2.3 From d05881e42e661986286ac15c2a8c32f651b0101e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 27 Dec 2011 15:54:32 -0800 Subject: repl power mode improvements. Implemented great suggestion from moors. More imports in power mode, including the contents of treedsl. Also, another swing at overcoming the mismatched global singletons problem, this time taking advantage of dependent method types. Amazingly, it seems to work. Continuing in the quest to create a useful compiler hacking environment, there is now an implicit from Symbol which allows you to pretend a Symbol takes type parameters, and the result is the applied type based on the manifests of the type arguments and the type constructor of the symbol. Examples: // magic with manifests scala> val tp = ArrayClass[scala.util.Random] tp: $r.global.Type = Array[scala.util.Random] // evidence scala> tp.memberType(Array_apply) res0: $r.global.Type = (i: Int)scala.util.Random // treedsl scala> val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) m: $r.treedsl.global.Match = 10 match { case 5 => false case _ => true } // typed is in scope scala> typed(m).tpe res1: $r.treedsl.global.Type = Boolean --- .../scala/reflect/internal/Definitions.scala | 24 +++++ src/compiler/scala/reflect/internal/Types.scala | 4 + .../scala/tools/nsc/interpreter/ILoop.scala | 4 +- .../scala/tools/nsc/interpreter/Power.scala | 111 ++++++--------------- .../scala/tools/nsc/interpreter/ReplVals.scala | 75 ++++++++++++-- src/compiler/scala/tools/reflect/Mock.scala | 3 +- test/files/run/repl-power.check | 22 +++- test/files/run/repl-power.scala | 4 + 8 files changed, 149 insertions(+), 98 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index 15f89e1382..fe20613c22 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -386,6 +386,30 @@ trait Definitions extends reflect.api.StandardDefinitions { lazy val NoneModule: Symbol = getModule("scala.None") lazy val SomeModule: Symbol = getModule("scala.Some") + /** Note: don't use this manifest/type function for anything important, + * as it is incomplete. Would love to have things like existential types + * working, but very unfortunately the manifests just stuff the relevant + * information into the toString method. + */ + def manifestToType(m: OptManifest[_]): Type = m match { + case x: AnyValManifest[_] => + getClassIfDefined("scala." + x).tpe + case m: ClassManifest[_] => + val name = m.erasure.getName + if (name endsWith nme.MODULE_SUFFIX_STRING) + getModuleIfDefined(name stripSuffix nme.MODULE_SUFFIX_STRING).tpe + else { + val sym = getClassIfDefined(name) + val args = m.typeArguments + + if (sym eq NoSymbol) NoType + else if (args.isEmpty) sym.tpe + else appliedType(sym.typeConstructor, args map manifestToType) + } + case _ => + NoType + } + // The given symbol represents either String.+ or StringAdd.+ def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+ def isArrowAssoc(sym: Symbol) = ArrowAssocClass.tpe.decls.toList contains sym diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 47184eee51..38f51b1459 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -5487,6 +5487,10 @@ A type's typeSymbol should never be inspected directly. case _ => t } + def elimRefinement(t: Type) = t match { + case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) + case _ => t + } /** A collector that tests for existential types appearing at given variance in a type */ class ContainsVariantExistentialCollector(v: Int) extends TypeCollector(false) { diff --git a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala index 391d5ab8ee..3ddbffa75e 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ILoop.scala @@ -51,7 +51,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) intp.reporter.printMessage(msg) def isAsync = !settings.Yreplsync.value - lazy val power = Power(this) + lazy val power = new Power(intp, new StdReplVals(this)) // TODO // object opt extends AestheticSettings @@ -253,6 +253,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) /** Power user commands */ lazy val powerCommands: List[LoopCommand] = List( nullary("dump", "displays a view of the interpreter's internal state", dumpCommand), + nullary("vals", "gives information about the power mode repl vals", valsCommand), cmd("phase", "", "set the implicit phase for power commands", phaseCommand), cmd("wrap", "", "name of method to wrap around each repl line", wrapCommand) withLongHelp (""" |:wrap @@ -283,6 +284,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) history.asStrings takeRight 30 foreach echo in.redrawLine() } + private def valsCommand(): Result = power.valsDescription private val typeTransforms = List( "scala.collection.immutable." -> "immutable.", diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index ac7c2b1ecc..82a466a7e5 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -15,54 +15,12 @@ import scala.io.Codec import java.net.{ URL, MalformedURLException } import io.{ Path } -trait SharesGlobal { - type GlobalType <: Global - val global: GlobalType - - // This business gets really old: - // - // found : power.intp.global.Symbol - // required: global.Symbol - // - // Have tried many ways to cast it aside, this is the current winner. - // Todo: figure out a way to abstract over all the type members. - type AnySymbol = Global#Symbol - type AnyType = Global#Type - type AnyName = Global#Name - type AnyTree = Global#Tree - - type Symbol = global.Symbol - type Type = global.Type - type Name = global.Name - type Tree = global.Tree - - implicit def upDependentSymbol(x: AnySymbol): Symbol = x.asInstanceOf[Symbol] - implicit def upDependentType(x: AnyType): Type = x.asInstanceOf[Type] - implicit def upDependentName(x: AnyName): Name = x.asInstanceOf[Name] - implicit def upDependentTree(x: AnyTree): Tree = x.asInstanceOf[Tree] -} - -object Power { - def apply(intp: IMain): Power = apply(null, intp) - def apply(repl: ILoop): Power = apply(repl, repl.intp) - def apply(repl: ILoop, intp: IMain): Power = - new Power(repl, intp) { - type GlobalType = intp.global.type - final val global: intp.global.type = intp.global - } -} - /** A class for methods to be injected into the intp in power mode. */ -abstract class Power( - val repl: ILoop, - val intp: IMain -) extends SharesGlobal { - import intp.{ - beQuietDuring, typeOfExpression, interpret, parse - } - import global._ - import definitions.{ getClassIfDefined, getModuleIfDefined } +class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: ReplValsImpl) { + import intp.{ beQuietDuring, typeOfExpression, interpret, parse } + import intp.global._ + import definitions.{ manifestToType, getClassIfDefined, getModuleIfDefined } abstract class SymSlurper { def isKeep(sym: Symbol): Boolean @@ -130,11 +88,11 @@ abstract class Power( private def customInit = replProps.powerInitCode.option flatMap (f => io.File(f).safeSlurp()) def banner = customBanner getOrElse """ - |** Power User mode enabled - BEEP BOOP SPIZ ** + |** Power User mode enabled - BEEP WHIR GYVE ** |** :phase has been set to 'typer'. ** |** scala.tools.nsc._ has been imported ** - |** global._ and definitions._ also imported ** - |** Try :help, vals., power. ** + |** global._, definitions._ also imported ** + |** Try :help, :vals, power. ** """.stripMargin.trim private def initImports = List( @@ -142,8 +100,9 @@ abstract class Power( "scala.collection.JavaConverters._", "intp.global.{ error => _, _ }", "definitions.{ getClass => _, _ }", - "power.Implicits._", - "power.rutil._" + "power.rutil._", + "replImplicits._", + "treedsl.CODE._" ) def init = customInit match { @@ -155,12 +114,23 @@ abstract class Power( */ def unleash(): Unit = beQuietDuring { // First we create the ReplVals instance and bind it to $r - intp.bind("$r", new ReplVals(repl)) + intp.bind("$r", replVals) // Then we import everything from $r. intp interpret ("import " + intp.pathToTerm("$r") + "._") // And whatever else there is to do. init.lines foreach (intp interpret _) } + def valsDescription: String = { + def to_str(m: Symbol) = "%12s %s".format( + m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.") + + ( rutil.info[ReplValsImpl].declares + filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor) + sortBy (_.decodedName) + map to_str + mkString ("Name and type of values imported into the repl in power mode.\n\n", "\n", "") + ) + } trait LowPriorityInternalInfo { implicit def apply[T: Manifest] : InternalInfo[T] = new InternalInfo[T](None) @@ -180,25 +150,6 @@ abstract class Power( private def symbol = symbol_ private def name = name_ - // Would love to have stuff like existential types working, - // but very unfortunately those manifests just stuff the relevant - // information into the toString method. Boo. - private def manifestToType(m: Manifest[_]): Type = m match { - case x: AnyValManifest[_] => - getClassIfDefined("scala." + x).tpe - case _ => - val name = m.erasure.getName - if (name endsWith nme.MODULE_SUFFIX_STRING) - getModuleIfDefined(name stripSuffix nme.MODULE_SUFFIX_STRING).tpe - else { - val sym = getClassIfDefined(name) - val args = m.typeArguments - - if (args.isEmpty) sym.tpe - else typeRef(NoPrefix, sym, args map manifestToType) - } - } - def symbol_ : Symbol = getClassIfDefined(erasure.getName) def tpe_ : Type = manifestToType(man) def name_ : Name = symbol.name @@ -208,9 +159,10 @@ abstract class Power( def owner = symbol.owner def owners = symbol.ownerChain drop 1 def defn = symbol.defString + def decls = symbol.info.decls - def declares = members filter (_.owner == symbol) - def inherits = members filterNot (_.owner == symbol) + def declares = decls.toList + def inherits = members filterNot (declares contains _) def types = members filter (_.name.isTypeName) def methods = members filter (_.isMethod) def overrides = declares filter (_.isOverride) @@ -233,8 +185,8 @@ abstract class Power( def whoHas(name: String) = bts filter (_.decls exists (_.name.toString == name)) def <:<[U: Manifest](other: U) = tpe <:< InternalInfo[U].tpe - def lub[U: Manifest](other: U) = global.lub(List(tpe, InternalInfo[U].tpe)) - def glb[U: Manifest](other: U) = global.glb(List(tpe, InternalInfo[U].tpe)) + def lub[U: Manifest](other: U) = intp.global.lub(List(tpe, InternalInfo[U].tpe)) + def glb[U: Manifest](other: U) = intp.global.glb(List(tpe, InternalInfo[U].tpe)) def shortClass = erasure.getName split "[$.]" last override def toString = value match { @@ -337,7 +289,7 @@ abstract class Power( def pp() { intp prettyPrint slurp() } } - protected trait Implicits1 { + trait Implicits1 { // fallback implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) = new SinglePrettifierClass[T](x) @@ -367,7 +319,6 @@ abstract class Power( implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in) implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec) } - object Implicits extends Implicits2 { } trait ReplUtilities { def module[T: Manifest] = getModuleIfDefined(manifest[T].erasure.getName stripSuffix nme.MODULE_SUFFIX_STRING) @@ -396,11 +347,7 @@ abstract class Power( } lazy val rutil: ReplUtilities = new ReplUtilities { } - - lazy val phased: Phased = new Phased with SharesGlobal { - type GlobalType = Power.this.global.type - final val global: Power.this.global.type = Power.this.global - } + lazy val phased: Phased = new { val global: intp.global.type = intp.global } with Phased { } def context(code: String) = analyzer.rootContext(unit(code)) def source(code: String) = new BatchSourceFile("", code) diff --git a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala index 2f2489b242..6e5dec4205 100644 --- a/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala +++ b/src/compiler/scala/tools/nsc/interpreter/ReplVals.scala @@ -6,15 +6,68 @@ package scala.tools.nsc package interpreter -final class ReplVals(r: ILoop) { - lazy val repl = r - lazy val intp = r.intp - lazy val power = r.power - lazy val reader = r.in - lazy val vals = this - lazy val global = intp.global - lazy val isettings = intp.isettings - lazy val completion = reader.completion - lazy val history = reader.history - lazy val phased = power.phased +/** A class which the repl utilizes to expose predefined objects. + * The base implementation is empty; the standard repl implementation + * is StdReplVals. + */ +abstract class ReplVals { } + +class StdReplVals(final val r: ILoop) extends ReplVals { + final lazy val repl = r + final lazy val intp = r.intp + final lazy val power = r.power + final lazy val reader = r.in + final lazy val vals = this + final lazy val global: intp.global.type = intp.global + final lazy val isettings = intp.isettings + final lazy val completion = reader.completion + final lazy val history = reader.history + final lazy val phased = power.phased + final lazy val analyzer = global.analyzer + + final lazy val treedsl = new { val global: intp.global.type = intp.global } with ast.TreeDSL { } + final lazy val typer = analyzer.newTyper( + analyzer.rootContext( + power.unit("").asInstanceOf[analyzer.global.CompilationUnit] + ) + ) + + final lazy val replImplicits = new power.Implicits2 { + import intp.global._ + + private val manifestFn = ReplVals.mkManifestToType[intp.global.type](global) + implicit def mkManifestToType(sym: Symbol) = manifestFn(sym) + } + + def typed[T <: analyzer.global.Tree](tree: T): T = typer.typed(tree).asInstanceOf[T] +} + +object ReplVals { + /** Latest attempt to work around the challenge of foo.global.Type + * not being seen as the same type as bar.global.Type even though + * the globals are the same. Dependent method types to the rescue. + */ + def mkManifestToType[T <: Global](global: T) = { + import global._ + import definitions._ + + /** We can't use definitions.manifestToType directly because we're passing + * it to map and the compiler refuses to perform eta expansion on a method + * with a dependent return type. (Can this be relaxed?) To get around this + * I have this forwarder which widens the type and then cast the result back + * to the dependent type. + */ + def manifestToType(m: OptManifest[_]): Global#Type = + definitions.manifestToType(m) + + class AppliedTypeFromManifests(sym: Symbol) { + def apply[M](implicit m1: Manifest[M]): Type = + appliedType(sym.typeConstructor, List(m1) map (x => manifestToType(x).asInstanceOf[Type])) + + def apply[M1, M2](implicit m1: Manifest[M1], m2: Manifest[M2]): Type = + appliedType(sym.typeConstructor, List(m1, m2) map (x => manifestToType(x).asInstanceOf[Type])) + } + + (sym: Symbol) => new AppliedTypeFromManifests(sym) + } } diff --git a/src/compiler/scala/tools/reflect/Mock.scala b/src/compiler/scala/tools/reflect/Mock.scala index 5301816b4b..52c052b8a2 100644 --- a/src/compiler/scala/tools/reflect/Mock.scala +++ b/src/compiler/scala/tools/reflect/Mock.scala @@ -25,7 +25,8 @@ trait Mock extends (Invoked => AnyRef) { def newInvocationHandler() = new InvocationHandler { def invoke(proxy: AnyRef, method: Method, args: Array[AnyRef]) = - mock(Invoked(proxy, method, args)) + try { mock(Invoked(proxy, method, args)) } + catch { case _: NoClassDefFoundError => sys.exit(1) } } } diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check index 38e7532133..1e7b6f0cd8 100644 --- a/test/files/run/repl-power.check +++ b/test/files/run/repl-power.check @@ -2,15 +2,31 @@ Type in expressions to have them evaluated. Type :help for more information. scala> :power -** Power User mode enabled - BEEP BOOP SPIZ ** +** Power User mode enabled - BEEP WHIR GYVE ** ** :phase has been set to 'typer'. ** ** scala.tools.nsc._ has been imported ** -** global._ and definitions._ also imported ** -** Try :help, vals., power. ** +** global._, definitions._ also imported ** +** Try :help, :vals, power. ** scala> // guarding against "error: reference to global is ambiguous" scala> global.emptyValDef // "it is imported twice in the same scope by ..." res0: $r.global.emptyValDef.type = private val _ = _ +scala> val tp = ArrayClass[scala.util.Random] // magic with manifests +tp: $r.global.Type = Array[scala.util.Random] + +scala> tp.memberType(Array_apply) // evidence +res1: $r.global.Type = (i: Int)scala.util.Random + +scala> val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl +m: $r.treedsl.global.Match = +10 match { + case 5 => false + case _ => true +} + +scala> typed(m).tpe // typed is in scope +res2: $r.treedsl.global.Type = Boolean + scala> diff --git a/test/files/run/repl-power.scala b/test/files/run/repl-power.scala index 9f70ac4b68..27da3df106 100644 --- a/test/files/run/repl-power.scala +++ b/test/files/run/repl-power.scala @@ -5,6 +5,10 @@ object Test extends ReplTest { :power // guarding against "error: reference to global is ambiguous" global.emptyValDef // "it is imported twice in the same scope by ..." +val tp = ArrayClass[scala.util.Random] // magic with manifests +tp.memberType(Array_apply) // evidence +val m = LIT(10) MATCH (CASE(LIT(5)) ==> FALSE, DEFAULT ==> TRUE) // treedsl +typed(m).tpe // typed is in scope """.trim } -- cgit v1.2.3 From ec418508d5551071f0b1cec2f74e60ce31c2332a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 28 Dec 2011 09:13:38 -0800 Subject: Fixing slightly damaged test. --- test/files/run/treePrint.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'test/files') diff --git a/test/files/run/treePrint.scala b/test/files/run/treePrint.scala index 745c2150c2..e0332a705f 100644 --- a/test/files/run/treePrint.scala +++ b/test/files/run/treePrint.scala @@ -35,7 +35,7 @@ object Test { settings.Ycompacttrees.value = true val intp = new IMain(settings, new PrintWriter(new NullOutputStream)) - val power = Power(intp) + val power = new Power(intp, new ReplVals { }) intp.interpret("""def initialize = "Have to interpret something or we get errors." """) power trees code foreach println } -- cgit v1.2.3 From 82c793a438c7bd802daf96c8b2012f54fbd737ba Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 23 Dec 2011 03:52:03 -0800 Subject: More performance work. Custom versions of collections which methods which operate on 2 or 3 collections. Eliminated most users of zip/zipped. Cleaned up the kinds checking code somewhat. Reduced the number of silent typechecks being performed at named argument sites. --- src/compiler/scala/reflect/internal/Kinds.scala | 223 ++++++++++++++++ .../scala/reflect/internal/SymbolTable.scala | 2 + src/compiler/scala/reflect/internal/TreeInfo.scala | 64 +++-- src/compiler/scala/reflect/internal/Types.scala | 297 ++++++--------------- .../scala/reflect/internal/util/Collections.scala | 138 ++++++++++ src/compiler/scala/tools/nsc/ast/Trees.scala | 13 +- .../scala/tools/nsc/symtab/SymbolTable.scala | 3 +- .../scala/tools/nsc/transform/LiftCode.scala | 2 +- .../tools/nsc/transform/SpecializeTypes.scala | 16 +- .../scala/tools/nsc/transform/UnCurry.scala | 8 +- .../scala/tools/nsc/typechecker/Infer.scala | 80 ++---- .../tools/nsc/typechecker/NamesDefaults.scala | 243 ++++++++--------- .../scala/tools/nsc/typechecker/RefChecks.scala | 10 +- .../tools/nsc/typechecker/SuperAccessors.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 25 +- src/compiler/scala/tools/util/EditDistance.scala | 38 ++- src/library/scala/collection/LinearSeqLike.scala | 6 + test/files/neg/names-defaults-neg.check | 15 +- 18 files changed, 705 insertions(+), 480 deletions(-) create mode 100644 src/compiler/scala/reflect/internal/Kinds.scala create mode 100644 src/compiler/scala/reflect/internal/util/Collections.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala new file mode 100644 index 0000000000..15fcb5f94d --- /dev/null +++ b/src/compiler/scala/reflect/internal/Kinds.scala @@ -0,0 +1,223 @@ +/* NSC -- new scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.reflect +package internal + +import scala.collection.{ mutable, immutable } +import scala.tools.util.StringOps.{ countAsString, countElementsAsString } + +trait Kinds { + self: SymbolTable => + + import definitions._ + + private type SymPair = ((Symbol, Symbol)) // ((Argument, Parameter)) + + case class KindErrors( + arity: List[SymPair] = Nil, + variance: List[SymPair] = Nil, + strictness: List[SymPair] = Nil + ) { + def isEmpty = arity.isEmpty && variance.isEmpty && strictness.isEmpty + + def arityError(syms: SymPair) = copy(arity = arity :+ syms) + def varianceError(syms: SymPair) = copy(variance = variance :+ syms) + def strictnessError(syms: SymPair) = copy(strictness = strictness :+ syms) + + def ++(errs: KindErrors) = KindErrors( + arity ++ errs.arity, + variance ++ errs.variance, + strictness ++ errs.strictness + ) + // @M TODO this method is duplicated all over the place (varianceString) + private def varStr(s: Symbol): String = + if (s.isCovariant) "covariant" + else if (s.isContravariant) "contravariant" + else "invariant"; + + private def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else { + if((a0 eq b0) || (a0.owner eq b0.owner)) "" + else { + var a = a0; var b = b0 + while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner} + if (a.locationString ne "") " (" + a.locationString.trim + ")" else "" + } + } + private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String = + f(a+qualify(a,p), p+qualify(p,a)) + + private def strictnessMessage(a: Symbol, p: Symbol) = + kindMessage(a, p)("%s's bounds %s are stricter than %s's declared bounds %s".format( + _, a.info, _, p.info)) + + private def varianceMessage(a: Symbol, p: Symbol) = + kindMessage(a, p)("%s is %s, but %s is declared %s".format(_, varStr(a), _, varStr(p))) + + private def arityMessage(a: Symbol, p: Symbol) = + kindMessage(a, p)("%s has %s, but %s has %s".format( + _, countElementsAsString(a.typeParams.length, "type parameter"), + _, countAsString(p.typeParams.length)) + ) + + def errorMessage(targ: Type, tparam: Symbol): String = ( + (targ+"'s type parameters do not match "+tparam+"'s expected parameters: ") + + (arity map { case (a, p) => arityMessage(a, p) } mkString ", ") + + (variance map { case (a, p) => varianceMessage(a, p) } mkString ", ") + + (strictness map { case (a, p) => strictnessMessage(a, p) } mkString ", ") + ) + } + val NoKindErrors = KindErrors(Nil, Nil, Nil) + + // TODO: this desperately needs to be cleaned up + // plan: split into kind inference and subkinding + // every Type has a (cached) Kind + def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean = + checkKindBounds0(tparams, targs, pre, owner, false).isEmpty + + /** Check whether `sym1`'s variance conforms to `sym2`'s variance. + * + * If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal. + */ + private def variancesMatch(sym1: Symbol, sym2: Symbol) = ( + sym2.variance==0 + || sym1.variance==sym2.variance + ) + + /** Check well-kindedness of type application (assumes arities are already checked) -- @M + * + * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1 + * (checked one type member at a time -- in that case, prefix is the name of the type alias) + * + * Type application is just like value application: it's "contravariant" in the sense that + * the type parameters of the supplied type arguments must conform to the type parameters of + * the required type parameters: + * - their bounds must be less strict + * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters) + * - @M TODO: are these conditions correct,sufficient&necessary? + * + * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since + * List's type parameter is also covariant and its bounds are weaker than <: Int + */ + def checkKindBounds0( + tparams: List[Symbol], + targs: List[Type], + pre: Type, + owner: Symbol, + explainErrors: Boolean + ): List[(Type, Symbol, KindErrors)] = { + + // instantiate type params that come from outside the abstract type we're currently checking + def transform(tp: Type, clazz: Symbol): Type = + tp.asSeenFrom(pre, clazz) + def transformedBounds(p: Symbol, o: Symbol) = + transform(p.info.instantiateTypeParams(tparams, targs).bounds, o) + + // check that the type parameters hkargs to a higher-kinded type conform to the + // expected params hkparams + def checkKindBoundsHK( + hkargs: List[Symbol], + arg: Symbol, + param: Symbol, + paramowner: Symbol, + underHKParams: List[Symbol], + withHKArgs: List[Symbol] + ): KindErrors = { + + var kindErrors: KindErrors = NoKindErrors + def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs) + // @M sometimes hkargs != arg.typeParams, the symbol and the type may + // have very different type parameters + val hkparams = param.typeParams + def kindCheck(cond: Boolean, f: KindErrors => KindErrors) { + if (!cond) + kindErrors = f(kindErrors) + } + + if (settings.debug.value) { + log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner) + log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner) + log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs) + } + + if (!sameLength(hkargs, hkparams)) { + // Any and Nothing are kind-overloaded + if (arg == AnyClass || arg == NothingClass) NoKindErrors + // shortcut: always set error, whether explainTypesOrNot + else return kindErrors.arityError(arg -> param) + } + else foreach2(hkargs, hkparams) { (hkarg, hkparam) => + if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind * + kindCheck(variancesMatch(hkarg, hkparam), _ varianceError (hkarg -> hkparam)) + // instantiateTypeParams(tparams, targs) + // higher-order bounds, may contain references to type arguments + // substSym(hkparams, hkargs) + // these types are going to be compared as types of kind * + // + // Their arguments use different symbols, but are + // conceptually the same. Could also replace the types by + // polytypes, but can't just strip the symbols, as ordering + // is lost then. + val declaredBounds = transformedBounds(hkparam, paramowner) + val declaredBoundsInst = bindHKParams(declaredBounds) + val argumentBounds = transform(hkarg.info.bounds, owner) + + kindCheck(declaredBoundsInst <:< argumentBounds, _ strictnessError (hkarg -> hkparam)) + + debuglog( + "checkKindBoundsHK base case: " + hkparam + + " declared bounds: " + declaredBounds + + " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" + + "checkKindBoundsHK base case: "+ hkarg + + " has bounds: " + argumentBounds + ) + } + else { + debuglog("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg) + kindErrors ++= checkKindBoundsHK( + hkarg.typeParams, + hkarg, + hkparam, + paramowner, + underHKParams ++ hkparam.typeParams, + withHKArgs ++ hkarg.typeParams + ) + } + if (!explainErrors && !kindErrors.isEmpty) + return kindErrors + } + if (explainErrors) kindErrors + else NoKindErrors + } + + if (settings.debug.value && (tparams.nonEmpty || targs.nonEmpty)) log( + "checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + + owner + ", " + explainErrors + ")" + ) + + flatMap2(tparams, targs) { (tparam, targ) => + // Prevent WildcardType from causing kind errors, as typevars may be higher-order + if (targ == WildcardType) Nil else { + // force symbol load for #4205 + targ.typeSymbolDirect.info + // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!! + val tparamsHO = targ.typeParams + if (targ.isHigherKinded || tparam.typeParams.nonEmpty) { + // NOTE: *not* targ.typeSymbol, which normalizes + val kindErrors = checkKindBoundsHK( + tparamsHO, targ.typeSymbolDirect, tparam, + tparam.owner, tparam.typeParams, tparamsHO + ) + if (kindErrors.isEmpty) Nil else { + if (explainErrors) List((targ, tparam, kindErrors)) + // Return as soon as an error is seen if there's nothing to explain. + else return List((NoType, NoSymbol, NoKindErrors)) + } + } + else Nil + } + } + } +} \ No newline at end of file diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala index 29ac5fe539..5be69e06ad 100644 --- a/src/compiler/scala/reflect/internal/SymbolTable.scala +++ b/src/compiler/scala/reflect/internal/SymbolTable.scala @@ -10,9 +10,11 @@ import scala.collection.{ mutable, immutable } import util._ abstract class SymbolTable extends api.Universe + with Collections with Names with Symbols with Types + with Kinds with Scopes with Definitions with Constants diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala index 1dc93a7add..14bf36fb47 100644 --- a/src/compiler/scala/reflect/internal/TreeInfo.scala +++ b/src/compiler/scala/reflect/internal/TreeInfo.scala @@ -107,7 +107,15 @@ abstract class TreeInfo { @deprecated("Use isExprSafeToInline instead", "2.10.0") def isPureExpr(tree: Tree) = isExprSafeToInline(tree) - def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = { + def zipMethodParamsAndArgs(params: List[Symbol], args: List[Tree]): List[(Symbol, Tree)] = + mapMethodParamsAndArgs(params, args)((param, arg) => ((param, arg))) + + def mapMethodParamsAndArgs[R](params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => R): List[R] = { + val b = List.newBuilder[R] + foreachMethodParamAndArg(params, args)((param, arg) => b += f(param, arg)) + b.result + } + def foreachMethodParamAndArg(params: List[Symbol], args: List[Tree])(f: (Symbol, Tree) => Unit): Boolean = { val plen = params.length val alen = args.length def fail() = { @@ -116,27 +124,29 @@ abstract class TreeInfo { " params = " + params + "\n" + " args = " + args + "\n" ) - params zip args + false } - if (plen == alen) params zip args - else if (params.isEmpty) fail + if (plen == alen) foreach2(params, args)(f) + else if (params.isEmpty) return fail else if (isVarArgsList(params)) { val plenInit = plen - 1 if (alen == plenInit) { if (alen == 0) Nil // avoid calling mismatched zip - else params.init zip args + else foreach2(params.init, args)(f) } - else if (alen < plenInit) fail + else if (alen < plenInit) return fail else { - val front = params.init zip (args take plenInit) - val back = args drop plenInit map (a => (params.last, a)) - front ++ back + foreach2(params.init, args take plenInit)(f) + val remainingArgs = args drop plenInit + foreach2(List.fill(remainingArgs.size)(params.last), remainingArgs)(f) } } - else fail - } + else return fail + true + } + /** * Selects the correct parameter list when there are nested applications. * Given Apply(fn, args), args might correspond to any of fn.symbol's parameter @@ -144,22 +154,28 @@ abstract class TreeInfo { * applies: for instance Apply(fn @ Apply(Apply(_, _), _), args) implies args * correspond to the third parameter list. * + * The argument fn is the function part of the apply node being considered. + * * Also accounts for varargs. */ + private def applyMethodParameters(fn: Tree): List[Symbol] = { + val depth = applyDepth(fn) + // There could be applies which go beyond the parameter list(s), + // being applied to the result of the method call. + // !!! Note that this still doesn't seem correct, although it should + // be closer than what it replaced. + if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth) + else if (fn.symbol.paramss.isEmpty) Nil + else fn.symbol.paramss.last + } + def zipMethodParamsAndArgs(t: Tree): List[(Symbol, Tree)] = t match { - case Apply(fn, args) => - val depth = applyDepth(fn) - // There could be applies which go beyond the parameter list(s), - // being applied to the result of the method call. - // !!! Note that this still doesn't seem correct, although it should - // be closer than what it replaced. - val params = ( - if (depth < fn.symbol.paramss.size) fn.symbol.paramss(depth) - else if (fn.symbol.paramss.isEmpty) Nil - else fn.symbol.paramss.last - ) - zipMethodParamsAndArgs(params, args) - case _ => Nil + case Apply(fn, args) => zipMethodParamsAndArgs(applyMethodParameters(fn), args) + case _ => Nil + } + def foreachMethodParamAndArg(t: Tree)(f: (Symbol, Tree) => Unit): Unit = t match { + case Apply(fn, args) => foreachMethodParamAndArg(applyMethodParameters(fn), args)(f) + case _ => } /** Is symbol potentially a getter of a variable? diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index d7caebbb0a..690f9b7204 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -578,7 +578,7 @@ trait Types extends api.Types { self: SymbolTable => * T.asSeenFrom(ThisType(C), D) (where D is owner of m) * = Int */ - def asSeenFrom(pre: Type, clazz: Symbol): Type = + def asSeenFrom(pre: Type, clazz: Symbol): Type = { if (isTrivial || phase.erasedTypes && pre.typeSymbol != ArrayClass) this else { // scala.tools.nsc.util.trace.when(pre.isInstanceOf[ExistentialType])("X "+this+".asSeenfrom("+pre+","+clazz+" = ") { @@ -594,6 +594,7 @@ trait Types extends api.Types { self: SymbolTable => stopTimer(asSeenFromNanos, start) result } + } /** The info of `sym`, seen as a member of this type. * @@ -1623,29 +1624,40 @@ trait Types extends api.Types { self: SymbolTable => // (this can happen only for erroneous programs). } + private object enterRefs extends TypeMap { + private var tparam: Symbol = _ + + def apply(tp: Type): Type = { + tp match { + case TypeRef(_, sym, args) if args.nonEmpty => + if (settings.debug.value && !sameLength(sym.info.typeParams, args)) + debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args) + + foreach2(sym.info.typeParams, args) { (tparam1, arg) => + if (arg contains tparam) { + addRef(NonExpansive, tparam, tparam1) + if (arg.typeSymbol != tparam) + addRef(Expansive, tparam, tparam1) + } + } + case _ => + } + mapOver(tp) + } + def enter(tparam0: Symbol, parent: Type) { + this.tparam = tparam0 + this(parent) + } + } + /** Compute initial (one-step) references and set state to `Initializing`. */ private def computeRefs() { refs = Array(Map(), Map()) - for (tparam <- typeSymbol.typeParams) { - val enterRefs = new TypeMap { - def apply(tp: Type): Type = { - tp match { - case TypeRef(_, sym, args) if args.nonEmpty => - if (settings.debug.value && !sameLength(sym.info.typeParams, args)) - debugwarn("Mismatched zip in computeRefs(): " + sym.info.typeParams + ", " + args) - - for ((tparam1, arg) <- sym.info.typeParams zip args; if arg contains tparam) { - addRef(NonExpansive, tparam, tparam1) - if (arg.typeSymbol != tparam) - addRef(Expansive, tparam, tparam1) - } - case _ => - } - mapOver(tp) - } + typeSymbol.typeParams foreach { tparam => + parents foreach { p => + enterRefs.enter(tparam, p) } - for (p <- parents) enterRefs(p) } state = Initializing } @@ -3592,9 +3604,9 @@ A type's typeSymbol should never be inspected directly. // val containsContravariantExistentialCollector = new ContainsVariantExistentialCollector(-1) def typeParamsToExistentials(clazz: Symbol, tparams: List[Symbol]): List[Symbol] = { - val eparams = for ((tparam, i) <- tparams.zipWithIndex) yield { - clazz.newExistential(clazz.pos, newTypeName("?"+i)).setInfo(tparam.info.bounds) - } + val eparams = mapWithIndex(tparams)((tparam, i) => + clazz.newExistential(clazz.pos, newTypeName("?"+i)) setInfo tparam.info.bounds) + eparams map (_ substInfo (tparams, eparams)) } @@ -4425,8 +4437,7 @@ A type's typeSymbol should never be inspected directly. case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) => assert(sym1 == sym2) pre1 =:= pre2 && - ((args1, args2, sym1.typeParams).zipped forall { - (arg1, arg2, tparam) => + forall3(args1, args2, sym1.typeParams) { (arg1, arg2, tparam) => //if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG if (tparam.variance == 0) arg1 =:= arg2 else if (arg1.isInstanceOf[TypeVar]) @@ -4436,7 +4447,7 @@ A type's typeSymbol should never be inspected directly. // also: think what happens if there are embedded typevars? if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1 else true - }) + } case (et: ExistentialType, _) => et.withTypeVars(isConsistent(_, tp2)) case (_, et: ExistentialType) => @@ -4959,19 +4970,11 @@ A type's typeSymbol should never be inspected directly. // --> thus, cannot be subtypes (Any/Nothing has already been checked) })) - /** True if all three arguments have the same number of elements and - * the function is true for all the triples. - */ - @tailrec final def corresponds3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C], f: (A, B, C) => Boolean): Boolean = { - if (xs1.isEmpty) xs2.isEmpty && xs3.isEmpty - else !xs2.isEmpty && !xs3.isEmpty && f(xs1.head, xs2.head, xs3.head) && corresponds3(xs1.tail, xs2.tail, xs3.tail, f) - } - def isSubArg(t1: Type, t2: Type, variance: Int) = (variance > 0 || t2 <:< t1) && (variance < 0 || t1 <:< t2) def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean = - corresponds3(tps1, tps2, tparams map (_.variance), isSubArg) + corresponds3(tps1, tps2, tparams map (_.variance))(isSubArg) def differentOrNone(tp1: Type, tp2: Type) = if (tp1 eq tp2) NoType else tp1 @@ -5343,7 +5346,6 @@ A type's typeSymbol should never be inspected directly. def solve(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Int], upper: Boolean, depth: Int): Boolean = { - val config = tvars zip (tparams zip variances) def solveOne(tvar: TypeVar, tparam: Symbol, variance: Int) { if (tvar.constr.inst == NoType) { @@ -5352,15 +5354,17 @@ A type's typeSymbol should never be inspected directly. val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound)) var cyclic = bound contains tparam - for ((tvar2, (tparam2, variance2)) <- config) { - if (tparam2 != tparam && - ((bound contains tparam2) || - up && (tparam2.info.bounds.lo =:= tparam.tpe) || - !up && (tparam2.info.bounds.hi =:= tparam.tpe))) { + foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => { + val ok = (tparam2 != tparam) && ( + (bound contains tparam2) + || up && (tparam2.info.bounds.lo =:= tparam.tpe) + || !up && (tparam2.info.bounds.hi =:= tparam.tpe) + ) + if (ok) { if (tvar2.constr.inst eq null) cyclic = true solveOne(tvar2, tparam2, variance2) } - } + }) if (!cyclic) { if (up) { if (bound.typeSymbol != AnyClass) @@ -5399,9 +5403,7 @@ A type's typeSymbol should never be inspected directly. } // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info))) - for ((tvar, (tparam, variance)) <- config) - solveOne(tvar, tparam, variance) - + foreach3(tvars, tparams, variances)(solveOne) tvars forall (tvar => tvar.constr.isWithinBounds(tvar.constr.inst)) } @@ -5684,8 +5686,8 @@ A type's typeSymbol should never be inspected directly. case List() => NothingClass.tpe case List(t) => t case ts @ PolyType(tparams, _) :: _ => - val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map - ((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth))) + val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) => + tparam.cloneSymbol.setInfo(glb(bounds, depth))) PolyType(tparams1, lub0(matchingInstTypes(ts, tparams1))) case ts @ MethodType(params, _) :: rest => MethodType(params, lub0(matchingRestypes(ts, params map (_.tpe)))) @@ -5724,7 +5726,7 @@ A type's typeSymbol should never be inspected directly. if (syms contains NoSymbol) NoSymbol else { val symtypes = - (narrowts, syms).zipped map ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType)) + map2(narrowts, syms)((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType)) if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(lub(symtypes, decr(depth))) else if (symtypes.tail forall (symtypes.head =:=)) @@ -5829,8 +5831,8 @@ A type's typeSymbol should never be inspected directly. case List() => AnyClass.tpe case List(t) => t case ts @ PolyType(tparams, _) :: _ => - val tparams1 = (tparams, matchingBounds(ts, tparams).transpose).zipped map - ((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth))) + val tparams1 = map2(tparams, matchingBounds(ts, tparams).transpose)((tparam, bounds) => + tparam.cloneSymbol.setInfo(lub(bounds, depth))) PolyType(tparams1, glbNorm(matchingInstTypes(ts, tparams1), depth)) case ts @ MethodType(params, _) :: rest => MethodType(params, glbNorm(matchingRestypes(ts, params map (_.tpe)), depth)) @@ -5961,38 +5963,39 @@ A type's typeSymbol should never be inspected directly. else if (args exists (arg => isValueClass(arg.typeSymbol))) Some(ObjectClass.tpe) else Some(typeRef(pre, sym, List(lub(args)))) } - } else { - val args = (sym.typeParams, argss.transpose).zipped map { (tparam, as) => - if (depth == 0) { - if (tparam.variance == variance) { - // Take the intersection of the upper bounds of the type parameters - // rather than falling all the way back to "Any", otherwise we end up not - // conforming to bounds. - val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass) - if (bounds0.isEmpty) AnyClass.tpe - else intersectionType(bounds0) - } - else if (tparam.variance == -variance) NothingClass.tpe - else NoType + } + else { + val args = map2(sym.typeParams, argss.transpose) { (tparam, as) => + if (depth == 0) { + if (tparam.variance == variance) { + // Take the intersection of the upper bounds of the type parameters + // rather than falling all the way back to "Any", otherwise we end up not + // conforming to bounds. + val bounds0 = sym.typeParams map (_.info.bounds.hi) filterNot (_.typeSymbol == AnyClass) + if (bounds0.isEmpty) AnyClass.tpe + else intersectionType(bounds0) } + else if (tparam.variance == -variance) NothingClass.tpe + else NoType + } + else { + if (tparam.variance == variance) lub(as, decr(depth)) + else if (tparam.variance == -variance) glb(as, decr(depth)) else { - if (tparam.variance == variance) lub(as, decr(depth)) - else if (tparam.variance == -variance) glb(as, decr(depth)) - else { - val l = lub(as, decr(depth)) - val g = glb(as, decr(depth)) - if (l <:< g) l - else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we - // just err on the conservative side, i.e. with a bound that is too high. - // if(!(tparam.info.bounds contains tparam)){ //@M can't deal with f-bounds, see #2251 - - val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l) - capturedParams += qvar - qvar.tpe - } + val l = lub(as, decr(depth)) + val g = glb(as, decr(depth)) + if (l <:< g) l + else { // Martin: I removed this, because incomplete. Not sure there is a good way to fix it. For the moment we + // just err on the conservative side, i.e. with a bound that is too high. + // if(!(tparam.info.bounds contains tparam)) //@M can't deal with f-bounds, see #2251 + + val qvar = commonOwner(as) freshExistential "" setInfo TypeBounds(g, l) + capturedParams += qvar + qvar.tpe } } } + } if (args contains NoType) None else Some(existentialAbstraction(capturedParams.toList, typeRef(pre, sym, args))) } @@ -6077,148 +6080,6 @@ A type's typeSymbol should never be inspected directly. throw new NoCommonType(tps) } - - // TODO: this desperately needs to be cleaned up - // plan: split into kind inference and subkinding - // every Type has a (cached) Kind - def kindsConform(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): Boolean = - checkKindBounds0(tparams, targs, pre, owner, false).isEmpty - - /** Check well-kindedness of type application (assumes arities are already checked) -- @M - * - * This check is also performed when abstract type members become concrete (aka a "type alias") -- then tparams.length==1 - * (checked one type member at a time -- in that case, prefix is the name of the type alias) - * - * Type application is just like value application: it's "contravariant" in the sense that - * the type parameters of the supplied type arguments must conform to the type parameters of - * the required type parameters: - * - their bounds must be less strict - * - variances must match (here, variances are absolute, the variance of a type parameter does not influence the variance of its higher-order parameters) - * - @M TODO: are these conditions correct,sufficient&necessary? - * - * e.g. class Iterable[t, m[+x <: t]] --> the application Iterable[Int, List] is okay, since - * List's type parameter is also covariant and its bounds are weaker than <: Int - */ - def checkKindBounds0(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol, explainErrors: Boolean): List[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])] = { - var error = false - - def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // instantiate type params that come from outside the abstract type we're currently checking - def transformedBounds(p: Symbol, o: Symbol) = transform(p.info.instantiateTypeParams(tparams, targs).bounds, o) - - /** Check whether `sym1`'s variance conforms to `sym2`'s variance. - * - * If `sym2` is invariant, `sym1`'s variance is irrelevant. Otherwise they must be equal. - */ - def variancesMatch(sym1: Symbol, sym2: Symbol): Boolean = (sym2.variance==0 || sym1.variance==sym2.variance) - - // check that the type parameters hkargs to a higher-kinded type conform to the expected params hkparams - def checkKindBoundsHK( - hkargs: List[Symbol], - arg: Symbol, - param: Symbol, - paramowner: Symbol, - underHKParams: List[Symbol], - withHKArgs: List[Symbol] - ): (List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)]) = { - - def bindHKParams(tp: Type) = tp.substSym(underHKParams, withHKArgs) - // @M sometimes hkargs != arg.typeParams, the symbol and the type may have very different type parameters - val hkparams = param.typeParams - - if (settings.debug.value) { - log("checkKindBoundsHK expected: "+ param +" with params "+ hkparams +" by definition in "+ paramowner) - log("checkKindBoundsHK supplied: "+ arg +" with params "+ hkargs +" from "+ owner) - log("checkKindBoundsHK under params: "+ underHKParams +" with args "+ withHKArgs) - } - - if (!sameLength(hkargs, hkparams)) { - if (arg == AnyClass || arg == NothingClass) (Nil, Nil, Nil) // Any and Nothing are kind-overloaded - else {error = true; (List((arg, param)), Nil, Nil) } // shortcut: always set error, whether explainTypesOrNot - } - else { - val _arityMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null - val _varianceMismatches = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null - val _stricterBounds = if (explainErrors) new ListBuffer[(Symbol, Symbol)] else null - - def varianceMismatch(a: Symbol, p: Symbol) { if(explainErrors) _varianceMismatches += ((a, p)) else error = true} - def stricterBound(a: Symbol, p: Symbol) { if(explainErrors) _stricterBounds += ((a, p)) else error = true } - def arityMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _arityMismatches ++= as } - def varianceMismatches(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _varianceMismatches ++= as } - def stricterBounds(as: Iterable[(Symbol, Symbol)]) { if(explainErrors) _stricterBounds ++= as } - - for ((hkarg, hkparam) <- hkargs zip hkparams) { - if (hkparam.typeParams.isEmpty && hkarg.typeParams.isEmpty) { // base-case: kind * - if (!variancesMatch(hkarg, hkparam)) - varianceMismatch(hkarg, hkparam) - - // instantiateTypeParams(tparams, targs) --> higher-order bounds may contain references to type arguments - // substSym(hkparams, hkargs) --> these types are going to be compared as types of kind * - // --> their arguments use different symbols, but are conceptually the same - // (could also replace the types by polytypes, but can't just strip the symbols, as ordering is lost then) - val declaredBounds = transformedBounds(hkparam, paramowner) - val declaredBoundsInst = bindHKParams(declaredBounds) - val argumentBounds = transform(hkarg.info.bounds, owner) - if (!(declaredBoundsInst <:< argumentBounds)) - stricterBound(hkarg, hkparam) - - debuglog( - "checkKindBoundsHK base case: " + hkparam + - " declared bounds: " + declaredBounds + - " after instantiating earlier hkparams: " + declaredBoundsInst + "\n" + - "checkKindBoundsHK base case: "+ hkarg + - " has bounds: " + argumentBounds - ) - } - else { - debuglog("checkKindBoundsHK recursing to compare params of "+ hkparam +" with "+ hkarg) - val (am, vm, sb) = checkKindBoundsHK( - hkarg.typeParams, - hkarg, - hkparam, - paramowner, - underHKParams ++ hkparam.typeParams, - withHKArgs ++ hkarg.typeParams - ) - arityMismatches(am) - varianceMismatches(vm) - stricterBounds(sb) - } - if (!explainErrors && error) return (Nil, Nil, Nil) // stop as soon as we encountered an error - } - if (!explainErrors) (Nil, Nil, Nil) - else (_arityMismatches.toList, _varianceMismatches.toList, _stricterBounds.toList) - } - } - - val errors = new ListBuffer[(Type, Symbol, List[(Symbol, Symbol)], List[(Symbol, Symbol)], List[(Symbol, Symbol)])] - if (settings.debug.value &&(tparams.nonEmpty || targs.nonEmpty)) - log("checkKindBounds0(" + tparams + ", " + targs + ", " + pre + ", " + owner + ", " + explainErrors + ")") - - for { - (tparam, targ) <- tparams zip targs - // Prevent WildcardType from causing kind errors, as typevars may be higher-order - if (targ != WildcardType) && (targ.isHigherKinded || tparam.typeParams.nonEmpty) - } { - // @M must use the typeParams of the *type* targ, not of the *symbol* of targ!! - targ.typeSymbolDirect.info // force symbol load for #4205 - val tparamsHO = targ.typeParams - - val (arityMismatches, varianceMismatches, stricterBounds) = ( - // NOTE: *not* targ.typeSymbol, which normalizes - checkKindBoundsHK(tparamsHO, targ.typeSymbolDirect, tparam, tparam.owner, tparam.typeParams, tparamsHO) - ) - if (explainErrors) { - if (arityMismatches.nonEmpty || varianceMismatches.nonEmpty || stricterBounds.nonEmpty) { - errors += ((targ, tparam, arityMismatches, varianceMismatches, stricterBounds)) - } - } - else if (error) - return List((NoType, NoSymbol, Nil, Nil, Nil)) - } - - errors.toList - } - // Errors and Diagnostics ----------------------------------------------------- /** A throwable signalling a type error */ diff --git a/src/compiler/scala/reflect/internal/util/Collections.scala b/src/compiler/scala/reflect/internal/util/Collections.scala new file mode 100644 index 0000000000..28a17c7821 --- /dev/null +++ b/src/compiler/scala/reflect/internal/util/Collections.scala @@ -0,0 +1,138 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.reflect.internal.util + +import scala.collection.{ mutable, immutable } +import scala.annotation.tailrec +import mutable.ListBuffer + +/** Profiler driven changes. + */ +trait Collections { + /** True if all three arguments have the same number of elements and + * the function is true for all the triples. + */ + @tailrec final def corresponds3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C]) + (f: (A, B, C) => Boolean): Boolean = ( + if (xs1.isEmpty) xs2.isEmpty && xs3.isEmpty + else !xs2.isEmpty && !xs3.isEmpty && f(xs1.head, xs2.head, xs3.head) && corresponds3(xs1.tail, xs2.tail, xs3.tail)(f) + ) + + final def map2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => C): List[C] = { + val lb = new ListBuffer[C] + var ys1 = xs1 + var ys2 = xs2 + while (!ys1.isEmpty && !ys2.isEmpty) { + lb += f(ys1.head, ys2.head) + ys1 = ys1.tail + ys2 = ys2.tail + } + lb.toList + } + final def map3[A, B, C, D](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => D): List[D] = { + if (xs1.isEmpty || xs2.isEmpty || xs3.isEmpty) Nil + else f(xs1.head, xs2.head, xs3.head) :: map3(xs1.tail, xs2.tail, xs3.tail)(f) + } + final def flatMap2[A, B, C](xs1: List[A], xs2: List[B])(f: (A, B) => List[C]): List[C] = { + val lb = new ListBuffer[C] + var ys1 = xs1 + var ys2 = xs2 + while (!ys1.isEmpty && !ys2.isEmpty) { + lb ++= f(ys1.head, ys2.head) + ys1 = ys1.tail + ys2 = ys2.tail + } + lb.toList + } + + final def mapWithIndex[A, B](xs: List[A])(f: (A, Int) => B): List[B] = { + val lb = new ListBuffer[B] + var index = 0 + var ys = xs + while (!ys.isEmpty) { + lb += f(ys.head, index) + ys = ys.tail + index += 1 + } + lb.toList + } + final def collectMap2[A, B, C](xs1: List[A], xs2: List[B])(p: (A, B) => Boolean): Map[A, B] = { + if (xs1.isEmpty || xs2.isEmpty) + return Map() + + val buf = immutable.Map.newBuilder[A, B] + var ys1 = xs1 + var ys2 = xs2 + while (!ys1.isEmpty && !ys2.isEmpty) { + val x1 = ys1.head + val x2 = ys2.head + if (p(x1, x2)) + buf += ((x1, x2)) + + ys1 = ys1.tail + ys2 = ys2.tail + } + buf.result + } + final def foreach2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Unit): Unit = { + var ys1 = xs1 + var ys2 = xs2 + while (!ys1.isEmpty && !ys2.isEmpty) { + f(ys1.head, ys2.head) + ys1 = ys1.tail + ys2 = ys2.tail + } + } + final def foreach3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Unit): Unit = { + var ys1 = xs1 + var ys2 = xs2 + var ys3 = xs3 + while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) { + f(ys1.head, ys2.head, ys3.head) + ys1 = ys1.tail + ys2 = ys2.tail + ys3 = ys3.tail + } + } + final def exists2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = { + var ys1 = xs1 + var ys2 = xs2 + while (!ys1.isEmpty && !ys2.isEmpty) { + if (f(ys1.head, ys2.head)) + return true + + ys1 = ys1.tail + ys2 = ys2.tail + } + false + } + final def forall2[A, B](xs1: List[A], xs2: List[B])(f: (A, B) => Boolean): Boolean = { + var ys1 = xs1 + var ys2 = xs2 + while (!ys1.isEmpty && !ys2.isEmpty) { + if (!f(ys1.head, ys2.head)) + return false + + ys1 = ys1.tail + ys2 = ys2.tail + } + true + } + final def forall3[A, B, C](xs1: List[A], xs2: List[B], xs3: List[C])(f: (A, B, C) => Boolean): Boolean = { + var ys1 = xs1 + var ys2 = xs2 + var ys3 = xs3 + while (!ys1.isEmpty && !ys2.isEmpty && !ys3.isEmpty) { + if (!f(ys1.head, ys2.head, ys3.head)) + return false + + ys1 = ys1.tail + ys2 = ys2.tail + ys3 = ys3.tail + } + true + } +} diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index f3eaff8db0..30ee7fc885 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -77,16 +77,17 @@ trait Trees extends reflect.internal.Trees { self: Global => }}) val (edefs, rest) = body span treeInfo.isEarlyDef val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef - val (lvdefs, gvdefs) = evdefs map { + val gvdefs = evdefs map { case vdef @ ValDef(mods, name, tpt, rhs) => - val fld = treeCopy.ValDef( + treeCopy.ValDef( vdef.duplicate, mods, name, atPos(focusPos(vdef.pos)) { TypeTree() setOriginal tpt setPos focusPos(tpt.pos) }, // atPos in case EmptyTree) - val local = treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs) - (local, fld) - } unzip - + } + val lvdefs = evdefs map { + case vdef @ ValDef(mods, name, tpt, rhs) => + treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs) + } val constrs = { if (constrMods hasFlag TRAIT) { if (body forall treeInfo.isInterfaceMember) List() diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala index 9fbf649525..a47bfda8c1 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package symtab import ast.{Trees, TreePrinters, DocComments} - import util._ -abstract class SymbolTable extends reflect.internal.SymbolTable +abstract class SymbolTable extends reflect.internal.SymbolTable \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala index 9404f0f699..720509644b 100644 --- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala +++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala @@ -202,7 +202,7 @@ abstract class LiftCode extends Transform with TypingTransformers { /** A method call with a by-name parameter represents escape. */ case Apply(fn, args) if fn.symbol.paramss.nonEmpty => traverse(fn) - for ((param, arg) <- treeInfo.zipMethodParamsAndArgs(tree)) { + treeInfo.foreachMethodParamAndArg(tree) { (param, arg) => if (param.tpe != null && isByNameParamType(param.tpe)) withEscaping(traverse(arg)) else diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 9c4889eba9..bbe803a3fb 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -102,7 +102,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def fromSpecialization(sym: Symbol, args: List[Type]): TypeEnv = { ifDebug(assert(sym.info.typeParams.length == args.length, sym + " args: " + args)) - emptyEnv ++ (sym.info.typeParams zip args filter (kv => isSpecialized(kv._1))) + emptyEnv ++ collectMap2(sym.info.typeParams, args)((k, v) => isSpecialized(k)) } /** Does typeenv `t1` include `t2`? All type variables in `t1` @@ -255,7 +255,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val pre1 = this(pre) // when searching for a specialized class, take care to map all // type parameters that are subtypes of AnyRef to AnyRef - val args1 = (args zip sym.typeParams) map { + val args1 = map2(args, sym.typeParams) { case (tp, orig) if isSpecializedAnyRefSubtype(tp, orig) => AnyRefClass.tpe case (tp, _) => tp } @@ -341,7 +341,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case set :: sets => for (x <- set ; xs <- loop(sets)) yield x :: xs } // zip the keys with each permutation to create a TypeEnv - loop(keys map concreteTypes) map (keys zip _ toMap) + loop(keys map concreteTypes) map (xss => Map(keys zip xss: _*)) } /** Does the given 'sym' need to be specialized in the environment 'env'? @@ -445,7 +445,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def produceTypeParameters(syms: List[Symbol], nowner: Symbol, env: TypeEnv) = { val cloned = for (s <- syms) yield if (!env.contains(s)) s.cloneSymbol(nowner) else env(s).typeSymbol // log("producing type params: " + cloned.map(t => (t, t.tpe.bounds.hi))) - for ((orig, cln) <- syms zip cloned) { + foreach2(syms, cloned) { (orig, cln) => cln.removeAnnotation(SpecializedClass) if (env.contains(orig)) cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefClass.tpe)) @@ -889,7 +889,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ def needsSpecialOverride(overriding: Symbol): (Symbol, TypeEnv) = { def checkOverriddenTParams(overridden: Symbol) { - for ((baseTvar, derivedTvar) <- overridden.info.typeParams.zip(overriding.info.typeParams)) { + foreach2(overridden.info.typeParams, overriding.info.typeParams) { (baseTvar, derivedTvar) => val missing = concreteTypes(baseTvar).toSet -- concreteTypes(derivedTvar).toSet if (missing.nonEmpty) { reporter.error(derivedTvar.pos, @@ -1391,9 +1391,9 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed) if (!symbol.isPackageClass) (new CollectMethodBodies)(tree) - val parents1 = currentOwner.info.parents.zipWithIndex.map { - case (tpe, idx) => TypeTree(tpe) setPos parents(idx).pos - } + val parents1 = map2(currentOwner.info.parents, parents)((tpe, parent) => + TypeTree(tpe) setPos parent.pos) + treeCopy.Template(tree, parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ , self, diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 90f46206c5..13516037f5 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -461,7 +461,7 @@ abstract class UnCurry extends InfoTransform val args1 = if (isVarArgTypes(formals)) transformVarargs(formals.last.typeArgs.head) else args - (formals, args1).zipped map { (formal, arg) => + map2(formals, args1) { (formal, arg) => if (!isByNameParamType(formal)) { arg } else if (isByNameRef(arg)) { @@ -771,7 +771,7 @@ abstract class UnCurry extends InfoTransform case p => p.symbol.tpe } val forwresult = dd.symbol.tpe.finalResultType - val forwformsyms = (forwformals, flatparams).zipped map ((tp, oldparam) => + val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) => currentClass.newValueParameter(oldparam.symbol.pos, oldparam.name).setInfo(tp) ) def mono = MethodType(forwformsyms, forwresult) @@ -789,7 +789,7 @@ abstract class UnCurry extends InfoTransform // create the tree val forwtree = theTyper.typedPos(dd.pos) { - val locals = (forwsym ARGS, flatparams).zipped map { + val locals = map2(forwsym ARGS, flatparams) { case (_, fp) if !rpsymbols(fp.symbol) => null case (argsym, fp) => Block(Nil, @@ -799,7 +799,7 @@ abstract class UnCurry extends InfoTransform ) ) } - val seqargs = (locals, forwsym ARGS).zipped map { + val seqargs = map2(locals, forwsym ARGS) { case (null, argsym) => Ident(argsym) case (l, _) => l } diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 67fa67b0f3..2bd307e31a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -9,7 +9,6 @@ package typechecker import scala.collection.{ mutable, immutable } import scala.collection.mutable.ListBuffer import scala.util.control.ControlThrowable -import scala.tools.util.StringOps.{ countAsString, countElementsAsString } import symtab.Flags._ import scala.annotation.tailrec @@ -459,13 +458,14 @@ trait Infer { } val tvars = tparams map freshVar if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt)) - (tparams, tvars).zipped map ((tparam, tvar) => + map2(tparams, tvars)((tparam, tvar) => instantiateToBound(tvar, varianceInTypes(formals)(tparam))) else tvars map (tvar => WildcardType) } object AdjustedTypeArgs { + val Result = collection.mutable.LinkedHashMap type Result = collection.mutable.LinkedHashMap[Symbol, Option[Type]] def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists( @@ -508,24 +508,27 @@ trait Infer { * type parameters that are inferred as `scala.Nothing` and that are not covariant in restpe are taken to be undetermined */ def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = { - @inline def keep(targ: Type, tparam: Symbol) = ( - targ.typeSymbol != NothingClass // definitely not retracting, it's not Nothing! - || (!restpe.isWildcard && (varianceInType(restpe)(tparam) & COVARIANT) != 0)) // occured covariantly --> don't retract - - @inline def adjusted(targ: Type, tvar: TypeVar) = - if (targ.typeSymbol == RepeatedParamClass) - targ.baseType(SeqClass) - else if (targ.typeSymbol == JavaRepeatedParamClass) - targ.baseType(ArrayClass) - // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat - else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) - targ // this infers Foo.type instead of "object Foo" (see also widenIfNecessary) - else - targ.widen + val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]] + + foreach3(tparams, tvars, targs) { (tparam, tvar, targ) => + val retract = ( + targ.typeSymbol == NothingClass // only retract Nothings + && (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences + ) - (tparams, tvars, targs).zipped.map { (tparam, tvar, targ) => - tparam -> (if(keep(targ, tparam)) Some(adjusted(targ, tvar)) else None) - }(collection.breakOut) + // checks opt.virtPatmat directly so one need not run under -Xexperimental to use virtpatmat + buf += ((tparam, + if (retract) None + else Some( + if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass) + else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass) + // this infers Foo.type instead of "object Foo" (see also widenIfNecessary) + else if (targ.typeSymbol.isModuleClass || ((opt.experimental || opt.virtPatmat) && tvar.constr.avoidWiden)) targ + else targ.widen + ) + )) + } + buf.result } /** Return inferred type arguments, given type parameters, formal parameters, @@ -584,7 +587,7 @@ trait Infer { if (!isFullyDefined(tvar)) tvar.constr.inst = NoType // Then define remaining type variables from argument types. - (argtpes, formals).zipped map { (argtpe, formal) => + map2(argtpes, formals) { (argtpe, formal) => val tp1 = argtpe.deconst.instantiateTypeParams(tparams, tvars) val pt1 = formal.instantiateTypeParams(tparams, tvars) @@ -756,7 +759,8 @@ trait Infer { typesCompatible(reorderArgs(argtpes1, argPos)) ) } - } else { + } + else { // not enough arguments, check if applicable using defaults val missing = missingParams[Type](argtpes0, params, { case NamedType(name, _) => Some(name) @@ -994,39 +998,13 @@ trait Infer { } } - def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = { - // @M TODO this method is duplicated all over the place (varianceString) - def varStr(s: Symbol): String = - if (s.isCovariant) "covariant" - else if (s.isContravariant) "contravariant" - else "invariant"; - - def qualify(a0: Symbol, b0: Symbol): String = if (a0.toString != b0.toString) "" else { - if((a0 eq b0) || (a0.owner eq b0.owner)) "" - else { - var a = a0; var b = b0 - while (a.owner.name == b.owner.name) { a = a.owner; b = b.owner} - if (a.locationString ne "") " (" + a.locationString.trim + ")" else "" - } + checkKindBounds0(tparams, targs, pre, owner, true) map { + case (targ, tparam, kindErrors) => + kindErrors.errorMessage(targ, tparam) } - - val errors = checkKindBounds0(tparams, targs, pre, owner, true) - val errorMessages = new ListBuffer[String] - errors foreach {case (targ, tparam, arityMismatches, varianceMismatches, stricterBounds) => errorMessages += - (targ+"'s type parameters do not match "+tparam+"'s expected parameters: "+ - (for ((a, p) <- arityMismatches) - yield a+qualify(a,p)+ " has "+countElementsAsString(a.typeParams.length, "type parameter")+", but "+ - p+qualify(p,a)+" has "+countAsString(p.typeParams.length)).toList.mkString(", ") + - (for ((a, p) <- varianceMismatches) - yield a+qualify(a,p)+ " is "+varStr(a)+", but "+ - p+qualify(p,a)+" is declared "+varStr(p)).toList.mkString(", ") + - (for ((a, p) <- stricterBounds) - yield a+qualify(a,p)+"'s bounds "+a.info+" are stricter than "+ - p+qualify(p,a)+"'s declared bounds "+p.info).toList.mkString(", ")) - } - errorMessages.toList } + /** Substitute free type variables `undetparams` of polymorphic argument * expression `tree`, given two prototypes `strictPt`, and `lenientPt`. * `strictPt` is the first attempt prototype where type parameters diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 8611fafe52..a8dfea02ec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -260,7 +260,7 @@ trait NamesDefaults { self: Analyzer => */ def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = { val context = blockTyper.context - val symPs = (args, paramTypes).zipped map ((arg, tpe) => { + val symPs = map2(args, paramTypes)((arg, tpe) => { val byName = isByNameParamType(tpe) val (argTpe, repeated) = if (isScalaRepeatedParamType(tpe)) arg match { @@ -276,7 +276,7 @@ trait NamesDefaults { self: Analyzer => s.setInfo(valType) (context.scope.enter(s), byName, repeated) }) - (symPs, args).zipped map { + map2(symPs, args) { case ((sym, byName, repeated), arg) => val body = if (byName) { @@ -326,13 +326,15 @@ trait NamesDefaults { self: Analyzer => reorderArgsInv(formals, argPos), blockTyper) // refArgs: definition-site order again - val refArgs = (reorderArgs(valDefs, argPos), formals).zipped map ((vDef, tpe) => { + val refArgs = map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => { val ref = gen.mkAttributedRef(vDef.symbol) atPos(vDef.pos.focus) { // for by-name parameters, the local value is a nullary function returning the argument - if (isByNameParamType(tpe)) Apply(ref, List()) - else if (isScalaRepeatedParamType(tpe)) Typed(ref, Ident(tpnme.WILDCARD_STAR)) - else ref + tpe.typeSymbol match { + case ByNameParamClass => Apply(ref, Nil) + case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR)) + case _ => ref + } } }) // cannot call blockTyper.typedBlock here, because the method expr might be partially applied only @@ -340,7 +342,7 @@ trait NamesDefaults { self: Analyzer => res.setPos(res.pos.makeTransparent) val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos) context.namedApplyBlockInfo = - Some((block, NamedApplyInfo(qual, targs, vargss ::: List(refArgs), blockTyper))) + Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper))) block } } @@ -430,6 +432,80 @@ trait NamesDefaults { self: Analyzer => } } else NoSymbol } + + private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = { + val savedParams = context.extractUndetparams() + val savedReporting = context.reportAmbiguousErrors + + context.reportAmbiguousErrors = false + try fn(savedParams) + finally { + context.reportAmbiguousErrors = savedReporting + //@M note that we don't get here when an ambiguity was detected (during the computation of res), + // as errorTree throws an exception + context.undetparams = savedParams + } + } + + /** Fast path for ambiguous assignment check. + */ + private def isNameInScope(context: Context, name: Name) = ( + context.enclosingContextChain exists (ctx => + (ctx.scope.lookupEntry(name) != null) + || (ctx.owner.rawInfo.member(name) != NoSymbol) + ) + ) + + /** A full type check is very expensive; let's make sure there's a name + * somewhere which could potentially be ambiguous before we go that route. + */ + private def isAmbiguousAssignment(typer: Typer, param: Symbol, arg: Tree) = { + import typer.context + isNameInScope(context, param.name) && { + // for named arguments, check whether the assignment expression would + // typecheck. if it does, report an ambiguous error. + val paramtpe = param.tpe.cloneInfo(param) + // replace type parameters by wildcard. in the below example we need to + // typecheck (x = 1) with wildcard (not T) so that it succeeds. + // def f[T](x: T) = x + // var x = 0 + // f(x = 1) << "x = 1" typechecks with expected type WildcardType + savingUndeterminedTParams(context) { udp => + val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) { + override def apply(tp: Type): Type = super.apply(tp match { + case TypeRef(_, ByNameParamClass, x :: Nil) => x + case _ => tp + }) + } + // This throws an exception which is caught in `tryTypedApply` (as it + // uses `silent`) - unfortunately, tryTypedApply recovers from the + // exception if you use errorTree(arg, ...) and conforms is allowed as + // a view (see tryImplicit in Implicits) because it tries to produce a + // new qualifier (if the old one was P, the new one will be + // conforms.apply(P)), and if that works, it pretends nothing happened. + // + // To make sure tryTypedApply fails, we would like to pass EmptyTree + // instead of arg, but can't do that because eventually setType(ErrorType) + // is called, and EmptyTree can only be typed NoType. Thus we need to + // disable conforms as a view... + try typer.silent(_.typed(arg, subst(paramtpe))) match { + case t: Tree => !t.isErroneous + case _ => false + } + catch { + // `silent` only catches and returns TypeErrors which are not + // CyclicReferences. Fix for #3685 + case cr @ CyclicReference(sym, _) => + (sym.name == param.name) && sym.accessedOrSelf.isVariable && { + context.error(sym.pos, + "variable definition needs type because '%s' is used as a named argument in its body.".format(sym.name)) + typer.infer.setError(arg) + true + } + } + } + } + } /** * Removes name assignments from args. Additionally, returns an array mapping @@ -439,71 +515,38 @@ trait NamesDefaults { self: Analyzer => * after named ones. */ def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = { - import typer.infer.errorTree - - // maps indicies from (order written by user) to (order of definition) - val argPos = (new Array[Int](args.length)) map (x => -1) + import typer.context + // maps indices from (order written by user) to (order of definition) + val argPos = Array.fill(args.length)(-1) var positionalAllowed = true - val namelessArgs = for ((arg, index) <- (args.zipWithIndex)) yield arg match { - case a @ AssignOrNamedArg(Ident(name), rhs) => - val (pos, newName) = paramPos(params, name) - newName.foreach(n => { - typer.context.unit.deprecationWarning(arg.pos, "the parameter name "+ name +" has been deprecated. Use "+ n +" instead.") - }) - if (pos == -1) { - if (positionalAllowed) { - argPos(index) = index - // prevent isNamed from being true when calling doTypedApply recursively, - // treat the arg as an assignment of type Unit - Assign(a.lhs, rhs).setPos(arg.pos) - } else { - errorTree(arg, "unknown parameter name: "+ name) - } - } else if (argPos contains pos) { - errorTree(arg, "parameter specified twice: "+ name) - } else { - // for named arguments, check whether the assignment expression would - // typecheck. if it does, report an ambiguous error. - val param = params(pos) - val paramtpe = params(pos).tpe.cloneInfo(param) - // replace type parameters by wildcard. in the below example we need to - // typecheck (x = 1) with wildcard (not T) so that it succeeds. - // def f[T](x: T) = x - // var x = 0 - // f(x = 1) << "x = 1" typechecks with expected type WildcardType - val udp = typer.context.extractUndetparams() - val subst = new SubstTypeMap(udp, udp map (_ => WildcardType)) { - override def apply(tp: Type): Type = tp match { - case TypeRef(_, ByNameParamClass, List(arg)) => super.apply(arg) - case _ => super.apply(tp) + val namelessArgs = mapWithIndex(args) { (arg, index) => + def fail(msg: String) = typer.infer.errorTree(arg, msg) + arg match { + case arg @ AssignOrNamedArg(Ident(name), rhs) => + def matchesName(param: Symbol) = !param.isSynthetic && ( + (param.name == name) || (param.deprecatedParamName match { + case Some(`name`) => + context.unit.deprecationWarning(arg.pos, + "the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.") + true + case _ => false + }) + ) + val pos = params indexWhere matchesName + if (pos == -1) { + if (positionalAllowed) { + argPos(index) = index + // prevent isNamed from being true when calling doTypedApply recursively, + // treat the arg as an assignment of type Unit + Assign(arg.lhs, rhs) setPos arg.pos } + else fail("unknown parameter name: " + name) } - val reportAmbiguousErrors = typer.context.reportAmbiguousErrors - typer.context.reportAmbiguousErrors = false - - var variableNameClash = false - val typedAssign = try { - typer.silent(_.typed(arg, subst(paramtpe))) - } catch { - // `silent` only catches and returns TypeErrors which are not - // CyclicReferences. Fix for #3685 - case cr @ CyclicReference(sym, info) if sym.name == param.name => - if (sym.isVariable || sym.isGetter && sym.accessed.isVariable) { - // named arg not allowed - variableNameClash = true - typer.context.error(sym.pos, - "%s definition needs %s because '%s' is used as a named argument in its body.".format( - "variable", // "method" - "type", // "result type" - sym.name - ) - ) - typer.infer.setError(arg) - } - else cr - } - - def applyNamedArg = { + else if (argPos contains pos) + fail("parameter specified twice: " + name) + else if (isAmbiguousAssignment(typer, params(pos), arg)) + fail("reference to " + name + " is ambiguous; it is both a method parameter and a variable in scope.") + else { // if the named argument is on the original parameter // position, positional after named is allowed. if (index != pos) @@ -511,63 +554,13 @@ trait NamesDefaults { self: Analyzer => argPos(index) = pos rhs } - - val res = typedAssign match { - case _: TypeError => applyNamedArg - - case t: Tree => - if (t.isErroneous && !variableNameClash) { - applyNamedArg - } else if (t.isErroneous) { - t // name clash with variable. error was already reported above. - } else { - // This throws an exception which is caught in `tryTypedApply` (as it - // uses `silent`) - unfortunately, tryTypedApply recovers from the - // exception if you use errorTree(arg, ...) and conforms is allowed as - // a view (see tryImplicit in Implicits) because it tries to produce a - // new qualifier (if the old one was P, the new one will be - // conforms.apply(P)), and if that works, it pretends nothing happened. - // - // To make sure tryTypedApply fails, we would like to pass EmptyTree - // instead of arg, but can't do that because eventually setType(ErrorType) - // is called, and EmptyTree can only be typed NoType. Thus we need to - // disable conforms as a view... - errorTree(arg, "reference to "+ name +" is ambiguous; it is both, a parameter\n"+ - "name of the method and the name of a variable currently in scope.") - } - } - - typer.context.reportAmbiguousErrors = reportAmbiguousErrors - //@M note that we don't get here when an ambiguity was detected (during the computation of res), - // as errorTree throws an exception - typer.context.undetparams = udp - res - } - case _ => - argPos(index) = index - if (positionalAllowed) arg - else errorTree(arg, "positional after named argument.") - } - (namelessArgs, argPos) - } - - /** - * Returns - * - the position of the parameter named `name` - * - optionally, if `name` is @deprecatedName, the new name - */ - def paramPos(params: List[Symbol], name: Name): (Int, Option[Name]) = { - var i = 0 - var rest = params - while (!rest.isEmpty) { - val p = rest.head - if (!p.isSynthetic) { - if (p.name == name) return (i, None) - if (p.deprecatedParamName == Some(name)) return (i, Some(p.name)) + case _ => + argPos(index) = index + if (positionalAllowed) arg + else fail("positional after named argument.") } - i += 1 - rest = rest.tail } - (-1, None) + + (namelessArgs, argPos) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 59a1a254c6..ace38bb4cb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -410,8 +410,6 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R } } - - def checkOverrideTypes() { if (other.isAliasType) { //if (!member.typeParams.isEmpty) (1.5) @MAT @@ -420,14 +418,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R // overrideError("may not override parameterized type"); // @M: substSym - if( !(sameLength(member.typeParams, other.typeParams) && (self.memberType(member).substSym(member.typeParams, other.typeParams) =:= self.memberType(other))) ) // (1.6) + if( !(sameLength(member.typeParams, other.typeParams) && (memberTp.substSym(member.typeParams, other.typeParams) =:= otherTp)) ) // (1.6) overrideTypeError(); - } else if (other.isAbstractType) { + } + else if (other.isAbstractType) { //if (!member.typeParams.isEmpty) // (1.7) @MAT // overrideError("may not be parameterized"); - - val memberTp = self.memberType(member) val otherTp = self.memberInfo(other) + if (!(otherTp.bounds containsType memberTp)) { // (1.7.1) overrideTypeError(); // todo: do an explaintypes with bounds here explainTypes(_.bounds containsType _, otherTp, memberTp) diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index c9991614e4..a0ef2f5e2e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -43,7 +43,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT } private def transformArgs(params: List[Symbol], args: List[Tree]) = { - treeInfo.zipMethodParamsAndArgs(params, args) map { case (param, arg) => + treeInfo.mapMethodParamsAndArgs(params, args) { (param, arg) => if (isByNameParamType(param.tpe)) withInvalidOwner { checkPackedConforms(transform(arg), param.tpe.typeArgs.head) } else transform(arg) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a4c00e9f89..6b6b905e16 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1207,7 +1207,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (preSuperVals.isEmpty && preSuperStats.nonEmpty) debugwarn("Wanted to zip empty presuper val list with " + preSuperStats) else - (preSuperStats, preSuperVals).zipped map { case (ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe } + map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe) case _ => if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments") @@ -1959,7 +1959,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (argpts.lengthCompare(numVparams) != 0) errorTree(fun, "wrong number of parameters; expected = " + argpts.length) else { - val vparamSyms = (fun.vparams, argpts).zipped map { (vparam, argpt) => + val vparamSyms = map2(fun.vparams, argpts) { (vparam, argpt) => if (vparam.tpt.isEmpty) { vparam.tpt.tpe = if (isFullyDefined(argpt)) argpt @@ -2195,15 +2195,16 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def needsInstantiation(tparams: List[Symbol], formals: List[Type], args: List[Tree]) = { def isLowerBounded(tparam: Symbol) = !tparam.info.bounds.lo.typeSymbol.isBottomClass - (formals, args).zipped exists { + exists2(formals, args) { case (formal, Function(vparams, _)) => (vparams exists (_.tpt.isEmpty)) && vparams.length <= MaxFunctionArity && (formal baseType FunctionClass(vparams.length) match { case TypeRef(_, _, formalargs) => - (formalargs, vparams).zipped.exists ((formalarg, vparam) => - vparam.tpt.isEmpty && (tparams exists (formalarg contains))) && - (tparams forall isLowerBounded) + ( exists2(formalargs, vparams)((formal, vparam) => + vparam.tpt.isEmpty && (tparams exists formal.contains)) + && (tparams forall isLowerBounded) + ) case _ => false }) @@ -2460,7 +2461,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } else { assert(!inPatternMode(mode)) // this case cannot arise for patterns val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt) - val strictTargs = (lenientTargs, tparams).zipped map ((targ, tparam) => + val strictTargs = map2(lenientTargs, tparams)((targ, tparam) => if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK var remainingParams = paramTypes def typedArgToPoly(arg: Tree, formal: Type): Tree = { //TR TODO: cleanup @@ -2477,7 +2478,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } arg1 } - val args1 = (args, formals).zipped map typedArgToPoly + val args1 = map2(args, formals)(typedArgToPoly) if (args1 exists (_.tpe.isError)) errTree else { debuglog("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info)) //debug @@ -2926,7 +2927,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { override val typeParams = tparams map (_.symbol) val typeSkolems = typeParams map (_.newTypeSkolem setInfo this) // Replace the symbols - def substitute() = (tparams, typeSkolems).zipped map (_ setSymbol _) + def substitute() = map2(tparams, typeSkolems)(_ setSymbol _) override def complete(sym: Symbol) { // The info of a skolem is the skolemized info of the // actual type parameter of the skolem @@ -3972,7 +3973,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } val argtypes = args1 map (_.tpe) - (args, tparams).zipped foreach { (arg, tparam) => arg match { + foreach2(args, tparams)((arg, tparam) => arg match { // note: can't use args1 in selector, because Bind's got replaced case Bind(_, _) => if (arg.symbol.isAbstractType) @@ -3981,7 +3982,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { lub(List(arg.symbol.info.bounds.lo, tparam.info.bounds.lo.subst(tparams, argtypes))), glb(List(arg.symbol.info.bounds.hi, tparam.info.bounds.hi.subst(tparams, argtypes)))) case _ => - }} + }) val original = treeCopy.AppliedTypeTree(tree, tpt1, args1) val result = TypeTree(appliedType(tpt1.tpe, argtypes)) setOriginal original if(tpt1.tpe.isInstanceOf[PolyType]) // did the type application (performed by appliedType) involve an unchecked beta-reduction? @@ -4079,7 +4080,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case UnApply(fun, args) => val fun1 = typed(fun) val tpes = formalTypes(unapplyTypeList(fun.symbol, fun1.tpe), args.length) - val args1 = (args, tpes).zipped map typedPattern + val args1 = map2(args, tpes)(typedPattern) treeCopy.UnApply(tree, fun1, args1) setType pt case ArrayValue(elemtpt, elems) => diff --git a/src/compiler/scala/tools/util/EditDistance.scala b/src/compiler/scala/tools/util/EditDistance.scala index b705a1eac4..5067dce384 100644 --- a/src/compiler/scala/tools/util/EditDistance.scala +++ b/src/compiler/scala/tools/util/EditDistance.scala @@ -30,23 +30,37 @@ object EditDistance { if (m == 0) return n val d = Array.ofDim[Int](n + 1, m + 1) - 0 to n foreach (x => d(x)(0) = x) - 0 to m foreach (x => d(0)(x) = x) + var i = 0 + val max = math.max(m, n) + while (i <= max) { + if (i <= n) + d(i)(0) = i + if (i <= m) + d(0)(i) = i + i += 1 + } + i = 1 - for (i <- 1 to n ; s_i = s(i - 1) ; j <- 1 to m) { - val t_j = t(j - 1) - val cost = if (s_i == t_j) 0 else 1 + while (i <= n) { + val s_i = s(i - 1) + var j = 1 + while (j <= m) { + val t_j = t(j - 1) + val cost = if (s_i == t_j) 0 else 1 - val c1 = d(i - 1)(j) + 1 - val c2 = d(i)(j - 1) + 1 - val c3 = d(i - 1)(j - 1) + cost + val c1 = d(i - 1)(j) + 1 + val c2 = d(i)(j - 1) + 1 + val c3 = d(i - 1)(j - 1) + cost - d(i)(j) = c1 min c2 min c3 + d(i)(j) = c1 min c2 min c3 - if (transpositions) { - if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1)) - d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost) + if (transpositions) { + if (i > 1 && j > 1 && s(i - 1) == t(j - 2) && s(i - 2) == t(j - 1)) + d(i)(j) = d(i)(j) min (d(i - 2)(j - 2) + cost) + } + j += 1 } + i += 1 } d(n)(m) diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala index 75c1edac66..ceb980ff80 100644 --- a/src/library/scala/collection/LinearSeqLike.scala +++ b/src/library/scala/collection/LinearSeqLike.scala @@ -13,6 +13,7 @@ import generic._ import mutable.ListBuffer import immutable.List import scala.util.control.Breaks._ +import annotation.tailrec /** A template trait for linear sequences of type `LinearSeq[A]`. * @@ -69,4 +70,9 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr xs } } + + @tailrec override final def corresponds[B](that: GenSeq[B])(p: (A,B) => Boolean): Boolean = { + if (this.isEmpty) that.isEmpty + else that.nonEmpty && p(head, that.head) && (tail corresponds that.tail)(p) + } } diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check index 03e44f745d..01ef54e0ea 100644 --- a/test/files/neg/names-defaults-neg.check +++ b/test/files/neg/names-defaults-neg.check @@ -10,8 +10,7 @@ names-defaults-neg.scala:5: error: type mismatch; names-defaults-neg.scala:8: error: positional after named argument. test1(b = "(*", 23) ^ -names-defaults-neg.scala:13: error: reference to x is ambiguous; it is both, a parameter -name of the method and the name of a variable currently in scope. +names-defaults-neg.scala:13: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. test2(x = 1) ^ names-defaults-neg.scala:15: error: not found: value c @@ -26,8 +25,7 @@ names-defaults-neg.scala:17: error: not found: value m names-defaults-neg.scala:18: error: not found: value m test7 { m = 1 } // no named arguments in argument block ^ -names-defaults-neg.scala:19: error: reference to x is ambiguous; it is both, a parameter -name of the method and the name of a variable currently in scope. +names-defaults-neg.scala:19: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. test8(x = 1) ^ names-defaults-neg.scala:22: error: parameter specified twice: a @@ -118,8 +116,7 @@ names-defaults-neg.scala:93: error: parameter specified twice: b names-defaults-neg.scala:98: error: unknown parameter name: m f3818(y = 1, m = 1) ^ -names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both, a parameter -name of the method and the name of a variable currently in scope. +names-defaults-neg.scala:131: error: reference to var2 is ambiguous; it is both a method parameter and a variable in scope. delay(var2 = 40) ^ names-defaults-neg.scala:134: error: missing parameter type for expanded function ((x$1) => a = x$1) @@ -146,15 +143,13 @@ names-defaults-neg.scala:164: error: variable definition needs type because 'x' names-defaults-neg.scala:167: error: variable definition needs type because 'x' is used as a named argument in its body. def u6 { var x = u.f(x = "32") } ^ -names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both, a parameter -name of the method and the name of a variable currently in scope. +names-defaults-neg.scala:170: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. def u9 { var x: Int = u.f(x = 1) } ^ names-defaults-neg.scala:177: error: variable definition needs type because 'x' is used as a named argument in its body. class u15 { var x = u.f(x = 1) } ^ -names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both, a parameter -name of the method and the name of a variable currently in scope. +names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both a method parameter and a variable in scope. class u18 { var x: Int = u.f(x = 1) } ^ one warning found -- cgit v1.2.3 From 008a781f49feb567833e9347ff9d293defeafe6d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Dec 2011 23:02:44 -0800 Subject: More uniformity for the parser. Fixing consecutive type application made it more obvious there was another missing bit of the parser, type application following function application. This should (and now does) work: object F { def apply[T] = List[T]() } def g() = F g()[String] --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 8 ++++---- test/files/run/type-currying.scala | 13 +++++++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 00ac3976a9..db97dd3475 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1533,12 +1533,12 @@ self => case LBRACKET => val t1 = stripParens(t) t1 match { - case Ident(_) | Select(_, _) => - var tapp: Tree = t1 + case Ident(_) | Select(_, _) | Apply(_, _) => + var app: Tree = t1 while (in.token == LBRACKET) - tapp = atPos(tapp.pos.startOrPoint, in.offset)(TypeApply(tapp, exprTypeArgs())) + app = atPos(app.pos.startOrPoint, in.offset)(TypeApply(app, exprTypeArgs())) - simpleExprRest(tapp, true) + simpleExprRest(app, true) case _ => t1 } diff --git a/test/files/run/type-currying.scala b/test/files/run/type-currying.scala index 717e0763a3..f9764c64f0 100644 --- a/test/files/run/type-currying.scala +++ b/test/files/run/type-currying.scala @@ -43,3 +43,16 @@ object Test { assert(n0 == n1) } } + +class A { + object Foo { + def apply[T] = Bar + } + object Bar { + def apply() = Foo + } + + def f() = Foo + def g = f()[Int]()[String]() + def h = Foo[Foo.type]()[Foo.type]() +} -- cgit v1.2.3 From 451e1dc2da16c1bb5a7a59488865df9294eeaf3e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 2 Jan 2012 06:41:31 -0800 Subject: Added -Xlog-implicit-conversions. New command line option prints a message whenever the compiler inserts an implicit conversion. Implicit parameters are not under consideration here, since the primary motivation is to make it easy to inspect your code for unintentional conversions, since they can have dramatic performance implications. class A { def f(xs: Array[Byte]) = xs.size def g(xs: Array[Byte]) = xs.length } % scalac -Xlog-implicit-conversions logImplicits.scala logImplicits.scala:2: applied implicit conversion from xs.type to ?{val size: ?} = implicit def byteArrayOps(xs: Array[Byte]): scala.collection.mutable.ArrayOps[Byte] def f(xs: Array[Byte]) = xs.size ^ --- .../scala/tools/nsc/CompilationUnits.scala | 3 +++ src/compiler/scala/tools/nsc/CompileServer.scala | 4 ++-- src/compiler/scala/tools/nsc/Driver.scala | 8 +++---- src/compiler/scala/tools/nsc/Global.scala | 2 +- src/compiler/scala/tools/nsc/ScalaDoc.scala | 8 +++---- src/compiler/scala/tools/nsc/doc/DocFactory.scala | 2 +- .../scala/tools/nsc/interactive/REPL.scala | 4 ++-- .../scala/tools/nsc/reporters/Reporter.scala | 23 ++++++++++++++------ .../scala/tools/nsc/reporters/ReporterTimer.scala | 2 -- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 + .../scala/tools/nsc/typechecker/Typers.scala | 14 ++++++++++-- test/files/neg/logImplicits.check | 19 ++++++++++++++++ test/files/neg/logImplicits.flags | 1 + test/files/neg/logImplicits.scala | 25 ++++++++++++++++++++++ 14 files changed, 91 insertions(+), 25 deletions(-) create mode 100644 test/files/neg/logImplicits.check create mode 100644 test/files/neg/logImplicits.flags create mode 100644 test/files/neg/logImplicits.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 470207fd35..940d115b2f 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -74,6 +74,9 @@ trait CompilationUnits { self: Global => * It is empty up to phase 'icode'. */ val icode: LinkedHashSet[icodes.IClass] = new LinkedHashSet + + def echo(pos: Position, msg: String) = + reporter.echo(pos, msg) def error(pos: Position, msg: String) = reporter.error(pos, msg) diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index b10ac78ac7..6393ade146 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -136,9 +136,9 @@ class StandardCompileServer extends SocketServer { } if (command.shouldStopWithInfo) - reporter.info(null, command.getInfoMessage(newGlobal(newSettings, reporter)), true) + reporter.echo(command.getInfoMessage(newGlobal(newSettings, reporter))) else if (command.files.isEmpty) - reporter.info(null, command.usageMsg, true) + reporter.echo(command.usageMsg) else { if (isCompilerReusable) { info("[Reusing existing Global instance.]") diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala index db95c1442b..0c52954a0b 100644 --- a/src/compiler/scala/tools/nsc/Driver.scala +++ b/src/compiler/scala/tools/nsc/Driver.scala @@ -24,8 +24,8 @@ abstract class Driver { protected def doCompile(compiler: Global) { if (command.files.isEmpty) { - reporter.info(null, command.usageMsg, true) - reporter.info(null, compiler.pluginOptionsHelp, true) + reporter.echo(command.usageMsg) + reporter.echo(compiler.pluginOptionsHelp) } else { val run = new compiler.Run() run compile command.files @@ -40,14 +40,14 @@ abstract class Driver { settings = command.settings if (settings.version.value) { - reporter.info(null, versionMsg, true) + reporter.echo(versionMsg) } else if (processSettingsHook()) { val compiler = newCompiler() try { if (reporter.hasErrors) reporter.flush() else if (command.shouldStopWithInfo) - reporter.info(null, command.getInfoMessage(compiler), true) + reporter.echo(command.getInfoMessage(compiler)) else doCompile(compiler) } catch { diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 2dd32e355b..c388a62644 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -159,7 +159,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb // nearly every trait really must go. For now using globalError. def error(msg: String) = globalError(msg) def globalError(msg: String) = reporter.error(NoPosition, msg) - def inform(msg: String) = reporter.info(NoPosition, msg, true) + def inform(msg: String) = reporter.echo(msg) def warning(msg: String) = if (opt.fatalWarnings) globalError(msg) else reporter.warning(NoPosition, msg) diff --git a/src/compiler/scala/tools/nsc/ScalaDoc.scala b/src/compiler/scala/tools/nsc/ScalaDoc.scala index a9330b053b..4fa2cc71e5 100644 --- a/src/compiler/scala/tools/nsc/ScalaDoc.scala +++ b/src/compiler/scala/tools/nsc/ScalaDoc.scala @@ -30,17 +30,17 @@ class ScalaDoc { def hasFiles = command.files.nonEmpty || docSettings.uncompilableFiles.nonEmpty if (docSettings.version.value) - reporter.info(null, versionMsg, true) + reporter.echo(versionMsg) else if (docSettings.Xhelp.value) - reporter.info(null, command.xusageMsg, true) + reporter.echo(command.xusageMsg) else if (docSettings.Yhelp.value) - reporter.info(null, command.yusageMsg, true) + reporter.echo(command.yusageMsg) else if (docSettings.showPlugins.value) reporter.warning(null, "Plugins are not available when using Scaladoc") else if (docSettings.showPhases.value) reporter.warning(null, "Phases are restricted when using Scaladoc") else if (docSettings.help.value || !hasFiles) - reporter.info(null, command.usageMsg, true) + reporter.echo(command.usageMsg) else try { if (docSettings.target.value == "msil") msilLibPath foreach (x => docSettings.assemrefs.value += (pathSeparator + x)) diff --git a/src/compiler/scala/tools/nsc/doc/DocFactory.scala b/src/compiler/scala/tools/nsc/doc/DocFactory.scala index 5a510803ed..9a025b0d14 100644 --- a/src/compiler/scala/tools/nsc/doc/DocFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/DocFactory.scala @@ -96,7 +96,7 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor val documentError: PartialFunction[Throwable, Unit] = { case NoCompilerRunException => - reporter.info(NoPosition, "No documentation generated with unsucessful compiler run", false) + reporter.info(null, "No documentation generated with unsucessful compiler run", false) case _: ClassNotFoundException => () } diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala index 81d4faa36e..1d78cc6e1c 100644 --- a/src/compiler/scala/tools/nsc/interactive/REPL.scala +++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala @@ -37,7 +37,7 @@ object REPL { reporter = new ConsoleReporter(settings) val command = new CompilerCommand(args.toList, settings) if (command.settings.version.value) - reporter.info(null, versionMsg, true) + reporter.echo(versionMsg) else { try { object compiler extends Global(command.settings, reporter) { @@ -48,7 +48,7 @@ object REPL { return } if (command.shouldStopWithInfo) { - reporter.info(null, command.getInfoMessage(compiler), true) + reporter.echo(command.getInfoMessage(compiler)) } else { run(compiler) } diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index 12306606e4..f19a285d7c 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -47,14 +47,23 @@ abstract class Reporter { finally incompleteHandler = saved } - var cancelled = false - def hasErrors = ERROR.count > 0 || cancelled - def hasWarnings = WARNING.count > 0 + var cancelled = false + def hasErrors = ERROR.count > 0 || cancelled + def hasWarnings = WARNING.count > 0 - def info(pos: Position, msg: String, force: Boolean) { info0(pos, msg, INFO, force) } - def warning(pos: Position, msg: String ) { withoutTruncating(info0(pos, msg, WARNING, false)) } - def error(pos: Position, msg: String ) { withoutTruncating(info0(pos, msg, ERROR, false)) } - def incompleteInputError(pos: Position, msg: String ) { + /** For sending a message which should not be labeled as a warning/error, + * but also shouldn't require -verbose to be visible. + */ + def echo(msg: String): Unit = info(NoPosition, msg, true) + def echo(pos: Position, msg: String): Unit = info(pos, msg, true) + + /** Informational messages, suppressed unless -verbose or force=true. */ + def info(pos: Position, msg: String, force: Boolean): Unit = info0(pos, msg, INFO, force) + + /** Warnings and errors. */ + def warning(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, WARNING, false)) + def error(pos: Position, msg: String): Unit = withoutTruncating(info0(pos, msg, ERROR, false)) + def incompleteInputError(pos: Position, msg: String): Unit = { if (incompleteHandled) incompleteHandler(pos, msg) else error(pos, msg) } diff --git a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala index 800af55861..f55d0684c8 100644 --- a/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala +++ b/src/compiler/scala/tools/nsc/reporters/ReporterTimer.scala @@ -13,8 +13,6 @@ import scala.tools.util.AbstractTimer * timings. */ class ReporterTimer(reporter: Reporter) extends AbstractTimer { - def issue(msg: String, duration: Long) = reporter.info(null, "[" + msg + " in " + duration + "ms]", false) - } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 7fcfb6fc6d..a712f4cba2 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -67,6 +67,7 @@ trait ScalaSettings extends AbsScalaSettings val future = BooleanSetting ("-Xfuture", "Turn on future language features.") val genPhaseGraph = StringSetting ("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "") val XlogImplicits = BooleanSetting ("-Xlog-implicits", "Show more detail on why some implicits are not applicable.") + val logImplicitConv = BooleanSetting ("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.") val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None) val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.") val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9991836344..a7ad140fc1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -969,7 +969,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { return typed(Select(tree, meth), mode, pt) } if (coercion != EmptyTree) { - debuglog("inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe) + def msg = "inferred view from " + tree.tpe + " to " + pt + " = " + coercion + ":" + coercion.tpe + if (settings.logImplicitConv.value) + unit.echo(tree.pos, msg) + + debuglog(msg) return newTyper(context.makeImplicit(context.reportAmbiguousErrors)).typed( new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt) } @@ -1056,7 +1060,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } inferView(qual, qual.tpe, searchTemplate, true) match { case EmptyTree => qual - case coercion => typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual)))) + case coercion => + if (settings.logImplicitConv.value) + unit.echo(qual.pos, + "applied implicit conversion from %s to %s = %s".format( + qual.tpe, searchTemplate, coercion.symbol.defString)) + + typedQualifier(atPos(qual.pos)(new ApplyImplicitView(coercion, List(qual)))) } } else qual diff --git a/test/files/neg/logImplicits.check b/test/files/neg/logImplicits.check new file mode 100644 index 0000000000..d98422dacb --- /dev/null +++ b/test/files/neg/logImplicits.check @@ -0,0 +1,19 @@ +logImplicits.scala:2: applied implicit conversion from xs.type to ?{val size: ?} = implicit def byteArrayOps(xs: Array[Byte]): scala.collection.mutable.ArrayOps[Byte] + def f(xs: Array[Byte]) = xs.size + ^ +logImplicits.scala:7: applied implicit conversion from String("abc") to ?{val map: ?} = implicit def augmentString(x: String): scala.collection.immutable.StringOps + def f = "abc" map (_ + 1) + ^ +logImplicits.scala:15: inferred view from String("abc") to Int = C.this.convert:(p: String("abc"))Int + math.max(122, x: Int) + ^ +logImplicits.scala:19: applied implicit conversion from Int(1) to ?{val ->: ?} = implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] + def f = (1 -> 2) + "c" + ^ +logImplicits.scala:19: applied implicit conversion from (Int, Int) to ?{val +: ?} = implicit def any2stringadd(x: Any): scala.runtime.StringAdd + def f = (1 -> 2) + "c" + ^ +logImplicits.scala:22: error: class Un needs to be abstract, since method unimplemented is not defined +class Un { + ^ +one error found diff --git a/test/files/neg/logImplicits.flags b/test/files/neg/logImplicits.flags new file mode 100644 index 0000000000..97e5ae94ef --- /dev/null +++ b/test/files/neg/logImplicits.flags @@ -0,0 +1 @@ +-Xlog-implicit-conversions \ No newline at end of file diff --git a/test/files/neg/logImplicits.scala b/test/files/neg/logImplicits.scala new file mode 100644 index 0000000000..fb5dd8a025 --- /dev/null +++ b/test/files/neg/logImplicits.scala @@ -0,0 +1,25 @@ +class A { + def f(xs: Array[Byte]) = xs.size + def g(xs: Array[Byte]) = xs.length +} + +class B { + def f = "abc" map (_ + 1) +} + +object C { + final val x = "abc" + + implicit def convert(p: x.type): Int = 123 + + math.max(122, x: Int) +} + +class D { + def f = (1 -> 2) + "c" +} + +class Un { + // forcing post-typer failure, since we're only interested in the output from the above + def unimplemented: Int +} \ No newline at end of file -- cgit v1.2.3 From fafbffc2950aa3f25f91575786093e044f9af549 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 4 Jan 2012 07:05:19 +0100 Subject: Closes SI-5354. The reason why the test case compiled without error is pretty devious: When checking the `Foo.x' implicit, a CyclicReference error occurs which causes the alternative to be discarded. Why a CylicReference? Because the inferencer tries to decide whether the owner of `z` is a subclass of the owner od `x`. To do this, it computed the info of the owner of `z1`, which is not complete because no result type for `f1` was given. Hence a CyclicReference error. The fix is twofold: (1) We make isNonBottomSubClass smarter so that it always returns false if the symbol in question is not a type; hence the info need not be computed. (2) It's dubious to swallow CyclicReference errors anywhere, but I deemed it too risky to propagate them. But at least the CyclicReference is now logged if -Ylog-implicit is true. This hopefully spares future maintainers the same detective work I had to go through when digging this out. --- src/compiler/scala/reflect/internal/Symbols.scala | 15 +++++++++------ src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 9 ++++++++- test/files/neg/t5354.check | 7 +++++++ test/files/neg/t5354.scala | 15 +++++++++++++++ 4 files changed, 39 insertions(+), 7 deletions(-) create mode 100644 test/files/neg/t5354.check create mode 100644 test/files/neg/t5354.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 0c57f0c43a..e629b0ed43 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1244,12 +1244,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ final def isNestedIn(that: Symbol): Boolean = owner == that || owner != NoSymbol && (owner isNestedIn that) - - /** Is this class symbol a subclass of that symbol? */ - final def isNonBottomSubClass(that: Symbol): Boolean = ( - (this eq that) || this.isError || that.isError || - info.baseTypeIndex(that) >= 0 - ) + + /** Is this class symbol a subclass of that symbol, + * and is this class symbol also different from Null or Nothing? */ + def isNonBottomSubClass(that: Symbol): Boolean = false /** Overridden in NullClass and NothingClass for custom behavior. */ @@ -2226,6 +2224,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => super.info_=(tp) } + final override def isNonBottomSubClass(that: Symbol): Boolean = ( + (this eq that) || this.isError || that.isError || + info.baseTypeIndex(that) >= 0 + ) + override def reset(completer: Type) { super.reset(completer) tpePeriod = NoPeriod diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index d54cb248cf..77dde88a80 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -816,7 +816,14 @@ trait Implicits { val newPending = undoLog undo { is filterNot (alt => alt == i || { try improves(i, alt) - catch { case e: CyclicReference => true } + catch { + case e: CyclicReference => + if (printInfers) { + println(i+" discarded because cyclic reference occurred") + e.printStackTrace() + } + true + } }) } rankImplicits(newPending, i :: acc) diff --git a/test/files/neg/t5354.check b/test/files/neg/t5354.check new file mode 100644 index 0000000000..e47cecb5fe --- /dev/null +++ b/test/files/neg/t5354.check @@ -0,0 +1,7 @@ +t5354.scala:9: error: ambiguous implicit values: + both method x123 in package foo of type => foo.Bippy + and method z of type => foo.Bippy + match expected type foo.Bippy + implicitly[Bippy] + ^ +one error found diff --git a/test/files/neg/t5354.scala b/test/files/neg/t5354.scala new file mode 100644 index 0000000000..99b5650155 --- /dev/null +++ b/test/files/neg/t5354.scala @@ -0,0 +1,15 @@ +package object foo { + implicit def x123: Bippy = new Bippy("x") +} +package foo { + class Bippy(override val toString: String){ } + class Dingus { + def f1 = { + implicit def z: Bippy = new Bippy("z") + implicitly[Bippy] + } + } + object Test extends App { + println(new Dingus().f1) + } +} -- cgit v1.2.3 From 6975b4888da9b56e2c06d45da8f483f2e33a102b Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 4 Jan 2012 07:05:19 +0100 Subject: Closes SI-5354. The reason why the test case compiled without error is pretty devious: When checking the `Foo.x' implicit, a CyclicReference error occurs which causes the alternative to be discarded. Why a CylicReference? Because the inferencer tries to decide whether the owner of `z` is a subclass of the owner od `x`. To do this, it computed the info of the owner of `z1`, which is not complete because no result type for `f1` was given. Hence a CyclicReference error. The fix is twofold: (1) We make isNonBottomSubClass smarter so that it always returns false if the symbol in question is not a type; hence the info need not be computed. (2) It's dubious to swallow CyclicReference errors anywhere, but I deemed it too risky to propagate them. But at least the CyclicReference is now logged if -Ylog-implicit is true. This hopefully spares future maintainers the same detective work I had to go through when digging this out. --- src/compiler/scala/reflect/internal/Symbols.scala | 15 +++++++++------ src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 9 ++++++++- test/files/neg/t5354.check | 7 +++++++ test/files/neg/t5354.scala | 15 +++++++++++++++ 4 files changed, 39 insertions(+), 7 deletions(-) create mode 100644 test/files/neg/t5354.check create mode 100644 test/files/neg/t5354.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 0c57f0c43a..e629b0ed43 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1244,12 +1244,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ final def isNestedIn(that: Symbol): Boolean = owner == that || owner != NoSymbol && (owner isNestedIn that) - - /** Is this class symbol a subclass of that symbol? */ - final def isNonBottomSubClass(that: Symbol): Boolean = ( - (this eq that) || this.isError || that.isError || - info.baseTypeIndex(that) >= 0 - ) + + /** Is this class symbol a subclass of that symbol, + * and is this class symbol also different from Null or Nothing? */ + def isNonBottomSubClass(that: Symbol): Boolean = false /** Overridden in NullClass and NothingClass for custom behavior. */ @@ -2226,6 +2224,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => super.info_=(tp) } + final override def isNonBottomSubClass(that: Symbol): Boolean = ( + (this eq that) || this.isError || that.isError || + info.baseTypeIndex(that) >= 0 + ) + override def reset(completer: Type) { super.reset(completer) tpePeriod = NoPeriod diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index d54cb248cf..77dde88a80 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -816,7 +816,14 @@ trait Implicits { val newPending = undoLog undo { is filterNot (alt => alt == i || { try improves(i, alt) - catch { case e: CyclicReference => true } + catch { + case e: CyclicReference => + if (printInfers) { + println(i+" discarded because cyclic reference occurred") + e.printStackTrace() + } + true + } }) } rankImplicits(newPending, i :: acc) diff --git a/test/files/neg/t5354.check b/test/files/neg/t5354.check new file mode 100644 index 0000000000..e47cecb5fe --- /dev/null +++ b/test/files/neg/t5354.check @@ -0,0 +1,7 @@ +t5354.scala:9: error: ambiguous implicit values: + both method x123 in package foo of type => foo.Bippy + and method z of type => foo.Bippy + match expected type foo.Bippy + implicitly[Bippy] + ^ +one error found diff --git a/test/files/neg/t5354.scala b/test/files/neg/t5354.scala new file mode 100644 index 0000000000..99b5650155 --- /dev/null +++ b/test/files/neg/t5354.scala @@ -0,0 +1,15 @@ +package object foo { + implicit def x123: Bippy = new Bippy("x") +} +package foo { + class Bippy(override val toString: String){ } + class Dingus { + def f1 = { + implicit def z: Bippy = new Bippy("z") + implicitly[Bippy] + } + } + object Test extends App { + println(new Dingus().f1) + } +} -- cgit v1.2.3 From be46e487134305edae065de00582928c120bcfbb Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 4 Jan 2012 23:47:41 -0800 Subject: Fix for NoSuchMethod in cleanup. Don't assume that just because someone is calling x.toInt and x <: java.lang.Number, that it's a boxed primitive. Closes SI-5356. --- src/compiler/scala/tools/nsc/transform/CleanUp.scala | 12 +++++++++++- test/files/run/t5356.check | 6 ++++++ test/files/run/t5356.scala | 12 ++++++++++++ 3 files changed, 29 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t5356.check create mode 100644 test/files/run/t5356.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 575fe8f295..f04867b889 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -275,7 +275,17 @@ abstract class CleanUp extends Transform with ast.TreeDSL { /* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */ val testForNumber: Tree => Tree = { - qual1 => (qual1 IS_OBJ BoxedNumberClass.tpe) OR (qual1 IS_OBJ BoxedCharacterClass.tpe) + // Can't shortcut on BoxedNumber because BoxesRunTime + // is unforgiving of other Numbers showing up. + qual1 => ( + (qual1 IS_OBJ BoxedIntClass.tpe) + OR (qual1 IS_OBJ BoxedLongClass.tpe) + OR (qual1 IS_OBJ BoxedDoubleClass.tpe) + OR (qual1 IS_OBJ BoxedFloatClass.tpe) + OR (qual1 IS_OBJ BoxedByteClass.tpe) + OR (qual1 IS_OBJ BoxedShortClass.tpe) + OR (qual1 IS_OBJ BoxedCharacterClass.tpe) + ) } val testForBoolean: Tree => Tree = { qual1 => (qual1 IS_OBJ BoxedBooleanClass.tpe) diff --git a/test/files/run/t5356.check b/test/files/run/t5356.check new file mode 100644 index 0000000000..21c4aef07b --- /dev/null +++ b/test/files/run/t5356.check @@ -0,0 +1,6 @@ +1 scala.runtime.RichInt +1 scala.runtime.RichInt +1 scala.math.BigInt +1 scala.runtime.RichDouble +1 scala.runtime.RichFloat +1 diff --git a/test/files/run/t5356.scala b/test/files/run/t5356.scala new file mode 100644 index 0000000000..f7696c6088 --- /dev/null +++ b/test/files/run/t5356.scala @@ -0,0 +1,12 @@ +object Test { + def f(x: { def toInt: Int }) = println(x.toInt + " " + x.getClass.getName) + + def main(args: Array[String]): Unit = { + f(1) + f(1.toInt) + f(BigInt(1)) + f(1d) + f(1f) + println((1: { def toInt: Int }).toInt) + } +} -- cgit v1.2.3 From fe94bc7a144921f6c3dcbedbedd2c5c884a77bbd Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 5 Jan 2012 11:38:44 -0800 Subject: Don't mark mixed in methods as bridges. Sometime during the signature-related chaos before 2.9.1, genjvm was modified to pin ACC_BRIDGE onto mixed-in methods. This isn't necessary to suppress the signature (which has already happened at that point) and has deleterious effects since many tools ignore bridge methods. Review by @odersky. --- src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala | 2 +- test/files/run/mixin-bridge-methods.scala | 14 ++++++++++++++ 2 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 test/files/run/mixin-bridge-methods.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index ff98537907..241163885d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -1901,7 +1901,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with if (sym.isInterface) ACC_INTERFACE else 0, if (finalFlag) ACC_FINAL else 0, if (sym.isStaticMember) ACC_STATIC else 0, - if (sym.isBridge || sym.hasFlag(Flags.MIXEDIN) && sym.isMethod) ACC_BRIDGE else 0, + if (sym.isBridge) ACC_BRIDGE else 0, if (sym.isClass && !sym.isInterface) ACC_SUPER else 0, if (sym.isVarargsMethod) ACC_VARARGS else 0 ) diff --git a/test/files/run/mixin-bridge-methods.scala b/test/files/run/mixin-bridge-methods.scala new file mode 100644 index 0000000000..e0340ebb12 --- /dev/null +++ b/test/files/run/mixin-bridge-methods.scala @@ -0,0 +1,14 @@ +trait Foo { + def getFoo() = "foo" +} + +class Sub extends Foo { + def getBar() = "bar" +} + +object Test { + def main(args: Array[String]): Unit = { + val ms = classOf[Sub].getDeclaredMethods + assert(ms forall (x => !x.isBridge), ms mkString " ") + } +} -- cgit v1.2.3 From 97020945efb0b9d7eebebedce757548349c94537 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 5 Jan 2012 08:41:09 -0800 Subject: Moved Origins into scala.reflect.internal.util. It's too handy and I can't reach it from key classes whose calls I want to profile. --- .../scala/reflect/internal/util/Origins.scala | 107 +++++++++++++++++++++ src/compiler/scala/tools/nsc/util/Origins.scala | 107 --------------------- test/files/run/origins.scala | 2 +- 3 files changed, 108 insertions(+), 108 deletions(-) create mode 100644 src/compiler/scala/reflect/internal/util/Origins.scala delete mode 100644 src/compiler/scala/tools/nsc/util/Origins.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/util/Origins.scala b/src/compiler/scala/reflect/internal/util/Origins.scala new file mode 100644 index 0000000000..b9985c8f50 --- /dev/null +++ b/src/compiler/scala/reflect/internal/util/Origins.scala @@ -0,0 +1,107 @@ +/* NSC -- new scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.reflect +package internal.util + +import NameTransformer._ +import scala.collection.{ mutable, immutable } +import Origins._ + +/** A debugging class for logging from whence a method is being called. + * Say you wanted to discover who was calling phase_= in SymbolTable. + * You could do this: + * + * {{{ + * private lazy val origins = Origins[SymbolTable]("phase_=") + * // Commented out original enclosed for contrast + * // final def phase_=(p: Phase): Unit = { + * final def phase_=(p: Phase): Unit = origins { + * }}} + * + * And that's it. When the JVM exits it would issue a report something like this: + {{{ + >> Origins scala.tools.nsc.symtab.SymbolTable.phase_= logged 145585 calls from 51 distinguished sources. + + 71114 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:862) + 16584 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:757) + 15411 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:869) + 11507 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:770) + 10285 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:864) + 6860 scala.tools.nsc.transform.SpecializeTypes.specializedTypeVars(SpecializeTypes.scala:304) + ... + }}} + * + */ +abstract class Origins { + type Rep + def newRep(xs: StackSlice): Rep + def repString(rep: Rep): String + def originClass: String + + private var _tag: String = null + def tag: String = _tag + def setTag(tag: String): this.type = { + _tag = tag + this + } + + private val origins = new mutable.HashMap[Rep, Int] withDefaultValue 0 + private def add(xs: Rep) = origins(xs) += 1 + private def total = origins.values.foldLeft(0L)(_ + _) + + // We find the right line by dropping any from around here and any + // from the method's origin class. + private def dropStackElement(cn: String) = + (cn startsWith OriginsName) || (cn startsWith originClass) + + // Create a stack and whittle it down to the interesting part. + private def readStack(): Array[StackTraceElement] = + (new Throwable).getStackTrace dropWhile (el => dropStackElement(el.getClassName)) + + def apply[T](body: => T): T = { + add(newRep(readStack())) + body + } + def clear() = origins.clear() + def show() = { + println("\n>> Origins %s.%s logged %s calls from %s distinguished sources.\n".format(originClass, tag, total, origins.keys.size)) + origins.toList sortBy (-_._2) foreach { + case (k, v) => println("%7s %s".format(v, repString(k))) + } + } + def purge() = { + show() + clear() + } +} + +object Origins { + private type StackSlice = Array[StackTraceElement] + private val OriginsName = classOf[Origins].getName + private val counters = new mutable.HashSet[Origins] + + { + // Console.println("\nOrigins loaded: registering shutdown hook to display results.") + sys.addShutdownHook(counters foreach (_.purge())) + } + + def apply[T: Manifest](tag: String): Origins = apply(tag, manifest[T].erasure) + def apply(tag: String, clazz: Class[_]): Origins = apply(tag, new OneLine(clazz)) + def apply(tag: String, orElse: => Origins): Origins = { + counters find (_.tag == tag) getOrElse { + val res = orElse setTag tag + counters += res + res + } + } + + class OneLine(clazz: Class[_]) extends Origins { + type Rep = StackTraceElement + val originClass = clazz.getName stripSuffix MODULE_SUFFIX_STRING + def newRep(xs: StackSlice): Rep = xs(0) + def repString(rep: Rep) = " " + rep + } +} diff --git a/src/compiler/scala/tools/nsc/util/Origins.scala b/src/compiler/scala/tools/nsc/util/Origins.scala deleted file mode 100644 index f8ba34ae3c..0000000000 --- a/src/compiler/scala/tools/nsc/util/Origins.scala +++ /dev/null @@ -1,107 +0,0 @@ -/* NSC -- new scala compiler - * Copyright 2005-2011 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools.nsc -package util - -import scala.reflect.NameTransformer._ - -/** A debugging class for logging from whence a method is being called. - * Say you wanted to discover who was calling phase_= in SymbolTable. - * You could do this: - * - * {{{ - * private lazy val origins = Origins[SymbolTable]("phase_=") - * // Commented out original enclosed for contrast - * // final def phase_=(p: Phase): Unit = { - * final def phase_=(p: Phase): Unit = origins { - * }}} - * - * And that's it. When the JVM exits it would issue a report something like this: - {{{ - >> Origins scala.tools.nsc.symtab.SymbolTable.phase_= logged 145585 calls from 51 distinguished sources. - - 71114 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:862) - 16584 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:757) - 15411 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:869) - 11507 scala.tools.nsc.symtab.Symbols$Symbol.rawInfo(Symbols.scala:770) - 10285 scala.tools.nsc.symtab.Symbols$Symbol.unsafeTypeParams(Symbols.scala:864) - 6860 scala.tools.nsc.transform.SpecializeTypes.specializedTypeVars(SpecializeTypes.scala:304) - ... - }}} - * - */ - -import scala.collection.{ mutable, immutable } -import Origins._ - -abstract class Origins { - type Rep - def newRep(xs: StackSlice): Rep - def repString(rep: Rep): String - def originClass: String - - private var _tag: String = null - def tag: String = _tag - def setTag(tag: String): this.type = { - _tag = tag - this - } - - private val origins = new mutable.HashMap[Rep, Int] withDefaultValue 0 - private def add(xs: Rep) = origins(xs) += 1 - private def total = origins.values.foldLeft(0L)(_ + _) - - // We find the right line by dropping any from around here and any - // from the method's origin class. - private def dropStackElement(cn: String) = - (cn startsWith OriginsName) || (cn startsWith originClass) - - // Create a stack and whittle it down to the interesting part. - private def readStack(): Array[StackTraceElement] = - (new Throwable).getStackTrace dropWhile (el => dropStackElement(el.getClassName)) - - def apply[T](body: => T): T = { - add(newRep(readStack())) - body - } - def clear() = origins.clear() - def show() = { - println("\n>> Origins %s.%s logged %s calls from %s distinguished sources.\n".format(originClass, tag, total, origins.keys.size)) - origins.toList sortBy (-_._2) foreach { - case (k, v) => println("%7s %s".format(v, repString(k))) - } - } - def purge() = { - show() - clear() - } -} - -object Origins { - private type StackSlice = Array[StackTraceElement] - private val OriginsName = classOf[Origins].getName - private val counters = new mutable.HashSet[Origins] - - { - // Console.println("\nOrigins loaded: registering shutdown hook to display results.") - sys.addShutdownHook(counters foreach (_.purge())) - } - - def apply[T: Manifest](tag: String): Origins = apply(tag, manifest[T].erasure) - def apply(tag: String, clazz: Class[_]): Origins = apply(tag, new OneLine(clazz)) - def apply(tag: String, orElse: => Origins): Origins = { - counters find (_.tag == tag) getOrElse { - returning(orElse setTag tag)(counters += _) - } - } - - class OneLine(clazz: Class[_]) extends Origins { - type Rep = StackTraceElement - val originClass = clazz.getName stripSuffix MODULE_SUFFIX_STRING - def newRep(xs: StackSlice): Rep = xs(0) - def repString(rep: Rep) = " " + rep - } -} diff --git a/test/files/run/origins.scala b/test/files/run/origins.scala index ab873bca89..9dc6071c7b 100644 --- a/test/files/run/origins.scala +++ b/test/files/run/origins.scala @@ -1,4 +1,4 @@ -import scala.tools.nsc.util.Origins +import scala.reflect.internal.util.Origins package goxbox { object Socks { -- cgit v1.2.3 From 97f20afa4d35e629d0926a2e2cc0f20fd7f32d33 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 5 Jan 2012 08:59:27 -0800 Subject: Optimization in Constructors. Reworked some old code which was far too expensive for the job it was performing. --- .../scala/tools/nsc/transform/Constructors.scala | 30 ++++++++-------------- test/files/neg/t1960.check | 2 +- 2 files changed, 12 insertions(+), 20 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 342c298e1d..e03f329577 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -254,26 +254,18 @@ abstract class Constructors extends Transform with ast.TreeDSL { for ((accSym, accBody) <- outerAccessors) if (mustbeKept(accSym)) accessTraverser.traverse(accBody) - // Conflicting symbol list from parents: see bug #1960. - // It would be better to mangle the constructor parameter name since - // it can only be used internally, but I think we need more robust name - // mangling before we introduce more of it. - val parentSymbols = Map((for { - p <- impl.parents - if p.symbol.isTrait - sym <- p.symbol.info.nonPrivateMembers - if sym.isGetter && !sym.isOuterField - } yield sym.name -> p): _*) - // Initialize all parameters fields that must be kept. - val paramInits = - for (acc <- paramAccessors if mustbeKept(acc)) yield { - if (parentSymbols contains acc.name) - unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s in '%s'".format( - acc.name, acc.name, parentSymbols(acc.name))) - - copyParam(acc, parameter(acc)) - } + val paramInits = paramAccessors filter mustbeKept map { acc => + // Check for conflicting symbol amongst parents: see bug #1960. + // It would be better to mangle the constructor parameter name since + // it can only be used internally, but I think we need more robust name + // mangling before we introduce more of it. + val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait) + if (conflict ne NoSymbol) + unit.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString)) + + copyParam(acc, parameter(acc)) + } /** Return a single list of statements, merging the generic class constructor with the * specialized stats. The original statements are retyped in the current class, and diff --git a/test/files/neg/t1960.check b/test/files/neg/t1960.check index dabf53f126..5238141c4e 100644 --- a/test/files/neg/t1960.check +++ b/test/files/neg/t1960.check @@ -1,4 +1,4 @@ -t1960.scala:5: error: parameter 'p' requires field but conflicts with p in 'TBase' +t1960.scala:5: error: parameter 'p' requires field but conflicts with method p in trait TBase class Aclass (p: Int) extends TBase { def g() { f(p) } } ^ one error found -- cgit v1.2.3 From bf05808e4e9d9aaaa3f08673ce8fc2e521861764 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 5 Jan 2012 12:27:47 -0800 Subject: Optimization in refchecks. Making the inherited java vararg check cheaper. --- src/compiler/scala/reflect/internal/Types.scala | 3 +++ src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/pos/t1459/App.scala | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 844d6b79e3..998eae0cc1 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -499,6 +499,9 @@ trait Types extends api.Types { self: SymbolTable => * Alternatives of overloaded symbol appear in the order they are declared. */ def decl(name: Name): Symbol = findDecl(name, 0) + + /** A list of all non-private members defined or declared in this type. */ + def nonPrivateDecls: List[Symbol] = decls filter (x => !x.isPrivate) toList /** The non-private defined or declared members with name `name` in this type; * an OverloadedSymbol if several exist, NoSymbol if none exist. diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index ace38bb4cb..f9689e4b14 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -154,7 +154,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R def isJavaVarargsAncestor(clazz: Symbol) = ( clazz.isClass && clazz.isJavaDefined - && (clazz.info.nonPrivateMembers exists isJavaVarArgsMethod) + && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod) ) /** Add bridges for vararg methods that extend Java vararg methods diff --git a/test/files/pos/t1459/App.scala b/test/files/pos/t1459/App.scala index 651b285b17..36e5022e94 100755 --- a/test/files/pos/t1459/App.scala +++ b/test/files/pos/t1459/App.scala @@ -1,7 +1,7 @@ package foo import base._ -object App extends Application { +object App extends scala.App { class Concrete extends AbstractBase { override def doStuff(params:java.lang.String*): Unit = println("doStuff invoked") } -- cgit v1.2.3 From cdd4aac9819bc6bb2872a503a1bb2542fcfb6230 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 5 Jan 2012 15:01:07 -0800 Subject: Fix issue with higher-order type params. I think I found an issue underlying more than one bit of sketchy behavior amongst CC[_] and friends. Plus, I managed to initialize TypeConstraints with the bounds of the originating type parameter. I feel like that should cause something nifty to happen somewhere, but I have seen neither confetti nor lasers in greater quantities than I usually do. Will keep my remaining eye out. Closes SI-5359, review by @moors. --- src/compiler/scala/reflect/internal/Symbols.scala | 2 +- src/compiler/scala/reflect/internal/Types.scala | 45 +++++++++++++++++------ test/files/pos/t5359.scala | 17 +++++++++ 3 files changed, 51 insertions(+), 13 deletions(-) create mode 100644 test/files/pos/t5359.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 91b63f76e4..6ee061392c 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -409,7 +409,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isError = hasFlag(IS_ERROR) final def isErroneous = isError || isInitialized && tpe.isErroneous final def isTypeParameterOrSkolem = isType && hasFlag(PARAM) - final def isHigherOrderTypeParameter = owner.isTypeParameterOrSkolem + final def isHigherOrderTypeParameter = (this ne NoSymbol) && owner.isTypeParameterOrSkolem final def isTypeSkolem = isSkolem && hasFlag(PARAM) // a type symbol bound by an existential type, for instance the T in // List[T] forSome { type T } diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 5b4dc4b4c6..6b5ba05c6d 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -2451,9 +2451,29 @@ A type's typeSymbol should never be inspected directly. */ def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr)) def apply(origin: Type, constr: TypeConstraint) = new TypeVar(origin, constr, List(), List()) - // TODO why not initialise TypeConstraint with bounds of tparam? - // @PP: I tried that, didn't work out so well for me. - def apply(tparam: Symbol) = new TypeVar(tparam.tpeHK, new TypeConstraint, List(), tparam.typeParams) + // See pos/tcpoly_infer_implicit_tuple_wrapper for the test which + // fails if I initialize the type constraint with the type parameter + // bounds. It seems that in that instance it interferes with the + // inference. Thus, the isHigherOrderTypeParameter condition. + def apply(tparam: Symbol) = { + val constr = ( + if (tparam.isAbstractType && tparam.typeParams.nonEmpty) { + // Force the info of a higher-order tparam's parameters. + // Otherwise things don't end well. See SI-5359. + val info = tparam.info + if (info.bounds exists (t => t.typeSymbol.isHigherOrderTypeParameter)) { + log("TVar(" + tparam + ") receives empty constraint due to higher order type parameter in bounds " + info.bounds) + new TypeConstraint + } + else { + log("TVar(" + tparam + ") constraint initialized with bounds " + info.bounds) + new TypeConstraint(info.bounds) + } + } + else new TypeConstraint + ) + new TypeVar(tparam.tpeHK, constr, Nil, tparam.typeParams) + } def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) = new TypeVar(origin, constr, args, params) } @@ -2495,10 +2515,9 @@ A type's typeSymbol should never be inspected directly. override val typeArgs: List[Type], override val params: List[Symbol] ) extends Type { - private val numArgs = typeArgs.length // params are needed to keep track of variance (see mapOverArgs in SubstMap) - assert(typeArgs.isEmpty || sameLength(typeArgs, params)) - // var tid = { tidCount += 1; tidCount } //DEBUG + assert(typeArgs.isEmpty || sameLength(typeArgs, params), + "%s / params=%s / args=%s".format(origin, params, typeArgs)) /** The constraint associated with the variable */ var constr = constr0 @@ -2740,11 +2759,12 @@ A type's typeSymbol should never be inspected directly. override def isVolatile = origin.isVolatile private def levelString = if (settings.explaintypes.value) level else "" - override def safeToString = constr.inst match { - case null => "" - case NoType => "?" + levelString + origin + typeArgsString(this) - case x => "" + x - } + override def safeToString = ( + if (constr eq null) "TVar<%s,constr=null>".format(origin) + else if (constr.inst eq null) "TVar<%s,constr.inst=null>".format(origin) + else if (constr.inst eq NoType) "?" + levelString + origin + typeArgsString(this) + else "" + constr.inst + ) override def kind = "TypeVar" def cloneInternal = { @@ -3248,6 +3268,7 @@ A type's typeSymbol should never be inspected directly. */ class TypeConstraint(lo0: List[Type], hi0: List[Type], numlo0: Type, numhi0: Type, avoidWidening0: Boolean = false) { def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType) + def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi)) def this() = this(List(), List()) private var lobounds = lo0 @@ -4159,7 +4180,7 @@ A type's typeSymbol should never be inspected directly. case WildcardType => TypeVar(tp, new TypeConstraint) case BoundedWildcardType(bounds) => - TypeVar(tp, new TypeConstraint(List(bounds.lo), List(bounds.hi))) + TypeVar(tp, new TypeConstraint(bounds)) case _ => mapOver(tp) } diff --git a/test/files/pos/t5359.scala b/test/files/pos/t5359.scala new file mode 100644 index 0000000000..c22b2b1c76 --- /dev/null +++ b/test/files/pos/t5359.scala @@ -0,0 +1,17 @@ +// /scala/trac/5359/a.scala +// Thu Jan 5 13:31:05 PST 2012 + +object test { + trait Step[F[_]] { + // crash: typeConstructor inapplicable for + this match { + case S1() => + } + } + case class S1[F[_]]() extends Step[F] + + // okay + (null: Step[Option]) match { + case S1() => + } +} -- cgit v1.2.3 From 3192048a4bfb59966f93bb87a3c4f6b7ccfc80b2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 5 Jan 2012 00:24:59 -0800 Subject: Error message improvement. % scalac files/neg/t5357.scala files/neg/t5357.scala:5: error: '=>' expected but ':' found. case A: N => 1 ^ one error found That's uggo! Now it says: % scalac files/neg/t5357.scala files/neg/t5357.scala:5: error: Pattern variables must start with a lower-case letter. (SLS 8.1.1.) case A: N => 1 ^ one error found --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 13 +++++++++---- test/files/neg/t5357.check | 4 ++++ test/files/neg/t5357.scala | 9 +++++++++ 3 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 test/files/neg/t5357.check create mode 100644 test/files/neg/t5357.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 5881821ab3..d7bfcfc314 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1745,11 +1745,16 @@ self => * }}} */ def pattern1(): Tree = pattern2() match { - case p @ Ident(name) if treeInfo.isVarPattern(p) && in.token == COLON => - atPos(p.pos.startOrPoint, in.skipToken()) { Typed(p, compoundType()) } - case p => - p + case p @ Ident(name) if in.token == COLON => + if (treeInfo.isVarPattern(p)) + atPos(p.pos.startOrPoint, in.skipToken())(Typed(p, compoundType())) + else { + syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)") + p + } + case p => p } + /** {{{ * Pattern2 ::= varid [ @ Pattern3 ] * | Pattern3 diff --git a/test/files/neg/t5357.check b/test/files/neg/t5357.check new file mode 100644 index 0000000000..3385559071 --- /dev/null +++ b/test/files/neg/t5357.check @@ -0,0 +1,4 @@ +t5357.scala:5: error: Pattern variables must start with a lower-case letter. (SLS 8.1.1.) + case A: N => 1 + ^ +one error found diff --git a/test/files/neg/t5357.scala b/test/files/neg/t5357.scala new file mode 100644 index 0000000000..369a5568a4 --- /dev/null +++ b/test/files/neg/t5357.scala @@ -0,0 +1,9 @@ +trait M + +case class N() extends M { + def mytest(x: M) = x match { + case A: N => 1 + case _ => 0 + } +} + -- cgit v1.2.3 From dd14b6a9b8b3355fae847f7fc8c1fc7d41babaa5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 6 Jan 2012 07:29:20 -0800 Subject: TypeVar tracing. % scala -Dscalac.debug.tvar scala> class Foo[CC[X] <: Traversable[X]] { def bar[T](xs: CC[T]) = xs.head } defined class Foo scala> new Foo bar List(1,2,3) [ create] ?CC ( In Foo[CC[X] <: Traversable[X]] ) [ create] ?CC ( In Foo[CC[X] <: Traversable[X]] ) [ setInst] Nothing ( In Foo[CC[X] <: Traversable[X]], CC=Nothing ) [ create] ?CC ( In Foo[CC[X] <: Traversable[X]] ) [ create] ?T ( In Foo[CC[X] <: Traversable[X]]#bar[T] ) [ create] ?A ( In List#apply[A] ) [ create] ?A ( In List#apply[A] ) [ setInst] Int ( In List#apply[A], A=Int ) [ create] ?CC ( In Foo[CC[X] <: Traversable[X]] ) [ create] ?T ( In Foo[CC[X] <: Traversable[X]]#bar[T] ) [ create] ?CC ( In Foo[CC[X] <: Traversable[X]] ) [ applyArgs] ?CC ( In Foo[CC[X] <: Traversable[X]], apply args ?T to CC ) [ setInst] List ( In Foo[CC[X] <: Traversable[X]], CC=List ) [ setInst] Int ( In Foo[CC[X] <: Traversable[X]]#bar[T], T=Int ) res0: Int = 1 Also, I gave TypeVar some polymorphism. Review by @moors. --- .../scala/reflect/internal/Importers.scala | 2 +- src/compiler/scala/reflect/internal/Types.scala | 245 ++++++++++++++------- test/files/neg/names-defaults-neg.check | 2 +- 3 files changed, 163 insertions(+), 86 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala index 6683778671..38f808cef9 100644 --- a/src/compiler/scala/reflect/internal/Importers.scala +++ b/src/compiler/scala/reflect/internal/Importers.scala @@ -167,7 +167,7 @@ trait Importers { self: SymbolTable => case from.AntiPolyType(pre, targs) => AntiPolyType(importType(pre), targs map importType) case x: from.TypeVar => - new TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol) + TypeVar(importType(x.origin), importTypeConstraint(x.constr0), x.typeArgs map importType, x.params map importSymbol) case from.NotNullType(tpe) => NotNullType(importType(tpe)) case from.AnnotatedType(annots, tpe, selfsym) => diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 6b5ba05c6d..b26c78677d 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -87,6 +87,7 @@ trait Types extends api.Types { self: SymbolTable => private final def decr(depth: Int) = if (depth == AnyDepth) AnyDepth else depth - 1 private final val printLubs = sys.props contains "scalac.debug.lub" + private final val traceTypeVars = sys.props contains "scalac.debug.tvar" /** In case anyone wants to turn off lub verification without reverting anything. */ private final val verifyLubs = true @@ -2431,51 +2432,56 @@ A type's typeSymbol should never be inspected directly. // but pattern-matching returned the original constr0 (a bug) // now, pattern-matching returns the most recent constr object TypeVar { - // encapsulate suspension so we can automatically link the suspension of cloned - // typevars to their original if this turns out to be necessary -/* - def Suspension = new Suspension - class Suspension { - private val suspended = mutable.HashSet[TypeVar]() - def suspend(tv: TypeVar): Unit = { - tv.suspended = true - suspended += tv - } - def resumeAll(): Unit = { - for (tv <- suspended) { - tv.suspended = false + @inline final def trace[T](action: String, msg: => String)(value: T): T = { + if (traceTypeVars) { + val s = msg match { + case "" => "" + case str => "( " + str + " )" } - suspended.clear() + Console.err.println("[%10s] %-25s%s".format(action, value, s)) } + value } -*/ - def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr)) - def apply(origin: Type, constr: TypeConstraint) = new TypeVar(origin, constr, List(), List()) - // See pos/tcpoly_infer_implicit_tuple_wrapper for the test which - // fails if I initialize the type constraint with the type parameter - // bounds. It seems that in that instance it interferes with the - // inference. Thus, the isHigherOrderTypeParameter condition. - def apply(tparam: Symbol) = { - val constr = ( - if (tparam.isAbstractType && tparam.typeParams.nonEmpty) { - // Force the info of a higher-order tparam's parameters. - // Otherwise things don't end well. See SI-5359. - val info = tparam.info - if (info.bounds exists (t => t.typeSymbol.isHigherOrderTypeParameter)) { - log("TVar(" + tparam + ") receives empty constraint due to higher order type parameter in bounds " + info.bounds) - new TypeConstraint - } - else { - log("TVar(" + tparam + ") constraint initialized with bounds " + info.bounds) - new TypeConstraint(info.bounds) - } - } + + /** Create a new TypeConstraint based on the given symbol. + */ + private def deriveConstraint(tparam: Symbol): TypeConstraint = { + // Force the info of a higher-order tparam's parameters. + // Otherwise things don't end well. See SI-5359. However + // we can't force all info, so we have to discriminate + // carefully. + val isHigher = tparam.isAbstractType && tparam.typeParams.nonEmpty + // See pos/tcpoly_infer_implicit_tuple_wrapper for the test which + // fails if I initialize the type constraint with the type parameter + // bounds. It seems that in that instance it interferes with the + // inference. Thus, the isHigherOrderTypeParameter condition. + val isExclude = isHigher && tparam.info.bounds.exists(_.typeSymbol.isHigherOrderTypeParameter) + + def message = "" + tparam.name + " in " + tparam.owner + ( + if (isExclude) ", empty due to higher order type parameter in bounds" + else "" + ) + /*TypeVar.trace[TypeConstraint]("constr", message)*/( + if (isHigher && !isExclude) new TypeConstraint(tparam.info.bounds) else new TypeConstraint ) - new TypeVar(tparam.tpeHK, constr, Nil, tparam.typeParams) } - def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]) = - new TypeVar(origin, constr, args, params) + def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr)) + def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil) + def apply(tparam: Symbol): TypeVar = apply(tparam.tpeHK, deriveConstraint(tparam), Nil, tparam.typeParams) + + /** This is the only place TypeVars should be instantiated. + */ + def apply(origin: Type, constr: TypeConstraint, args: List[Type], params: List[Symbol]): TypeVar = { + val tv = ( + if (args.isEmpty && params.isEmpty) new TypeVar(origin, constr) + else if (args.size == params.size) new AppliedTypeVar(origin, constr, params zip args) + else if (args.isEmpty) new HKTypeVar(origin, constr, params) + else throw new TypeError("Invalid TypeVar construction: " + ((origin, constr, args, params))) + ) + + trace("create", "In " + tv.originLocation)(tv) + } } // TODO: I don't really know why this happens -- maybe because @@ -2502,22 +2508,53 @@ A type's typeSymbol should never be inspected directly. tp.typeSymbol ) + /** Precondition: params.nonEmpty. (args.nonEmpty enforced structurally.) + */ + class HKTypeVar( + _origin: Type, + _constr: TypeConstraint, + override val params: List[Symbol] + ) extends TypeVar(_origin, _constr) { + + require(params.nonEmpty, this) + override def isHigherKinded = true + override protected def typeVarString = params.map(_.name).mkString("[", ", ", "]=>" + originName) + } + + /** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.) + */ + class AppliedTypeVar( + _origin: Type, + _constr: TypeConstraint, + zippedArgs: List[(Symbol, Type)] + ) extends TypeVar(_origin, _constr) { + + require(zippedArgs.nonEmpty, this) + + override def params: List[Symbol] = zippedArgs map (_._1) + override def typeArgs: List[Type] = zippedArgs map (_._2) + + override protected def typeVarString = ( + zippedArgs map { case (p, a) => p.name + "=" + a } mkString (origin + "[", ", ", "]") + ) + } + /** A class representing a type variable: not used after phase `typer`. * * A higher-kinded TypeVar has params (Symbols) and typeArgs (Types). * A TypeVar with nonEmpty typeArgs can only be instantiated by a higher-kinded * type that can be applied to those args. A TypeVar is much like a TypeRef, * except it has special logic for equality and subtyping. + * + * Precondition for this class, enforced structurally: args.isEmpty && params.isEmpty. */ class TypeVar( val origin: Type, - val constr0: TypeConstraint, - override val typeArgs: List[Type], - override val params: List[Symbol] + val constr0: TypeConstraint ) extends Type { - // params are needed to keep track of variance (see mapOverArgs in SubstMap) - assert(typeArgs.isEmpty || sameLength(typeArgs, params), - "%s / params=%s / args=%s".format(origin, params, typeArgs)) + override def params: List[Symbol] = Nil + override def typeArgs: List[Type] = Nil + override def isHigherKinded = false /** The constraint associated with the variable */ var constr = constr0 @@ -2525,7 +2562,38 @@ A type's typeSymbol should never be inspected directly. /** The variable's skolemization level */ val level = skolemizationLevel - + + /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to + * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`. + * + * `constr` for `?CC` only tracks type constructors anyway, + * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]` + * `?CC's` hibounds contains List and Iterable. + */ + def applyArgs(newArgs: List[Type]): TypeVar = ( + if (newArgs.isEmpty && typeArgs.isEmpty) + this + else if (newArgs.size == params.size) { + val tv = TypeVar(origin, constr, newArgs, params) + TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv) + } + else + throw new TypeError("Invalid type application in TypeVar: " + params + ", " + newArgs) + ) + // newArgs.length may differ from args.length (could've been empty before) + // + // !!! @PP - I need an example of this, since this exception never triggers + // even though I am requiring the size match. + // + // example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A] + // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver) + // TypeVars get applied to different arguments over time (in asSeenFrom) + // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala + // thus: make new TypeVar's for every application of a TV to args, + // inference may generate several TypeVar's for a single type parameter that must be inferred, + // only one of them is in the set of tvars that need to be solved, but + // they share the same TypeConstraint instance + // When comparing to types containing skolems, remember the highest level // of skolemization. If that highest level is higher than our initial // skolemizationLevel, we can't re-use those skolems as the solution of this @@ -2536,26 +2604,6 @@ A type's typeSymbol should never be inspected directly. private var encounteredHigherLevel = false private def shouldRepackType = enableTypeVarExperimentals && encounteredHigherLevel - /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to - * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`. - * - * `constr` for `?CC` only tracks type constructors anyway, - * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]` - * `?CC's` hibounds contains List and Iterable. - */ - def applyArgs(newArgs: List[Type]): TypeVar = - if (newArgs.isEmpty) this // SubstMap relies on this (though this check is redundant when called from appliedType...) - else TypeVar(origin, constr, newArgs, params) // @M TODO: interaction with undoLog?? - // newArgs.length may differ from args.length (could've been empty before) - // example: when making new typevars, you start out with C[A], then you replace C by ?C, which should yield ?C[A], then A by ?A, ?C[?A] - // we need to track a TypeVar's arguments, and map over them (see TypeMap::mapOver) - // TypeVars get applied to different arguments over time (in asSeenFrom) - // -- see pos/tcpoly_infer_implicit_tuplewrapper.scala - // thus: make new TypeVar's for every application of a TV to args, - // inference may generate several TypeVar's for a single type parameter that must be inferred, - // only one of them is in the set of tvars that need to be solved, but - // they share the same TypeConstraint instance - // // invariant: before mutating constr, save old state in undoLog // (undoLog is used to reset constraints to avoid piling up unrelated ones) @@ -2564,7 +2612,8 @@ A type's typeSymbol should never be inspected directly. undoLog record this // if we were compared against later typeskolems, repack the existential, // because skolems are only compatible if they were created at the same level - constr.inst = if (shouldRepackType) repackExistential(tp) else tp + val res = if (shouldRepackType) repackExistential(tp) else tp + constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res) } def addLoBound(tp: Type, isNumericBound: Boolean = false) { @@ -2641,11 +2690,10 @@ A type's typeSymbol should never be inspected directly. * type parameter we're trying to infer (the result will be sanity-checked later). */ def unifyFull(tpe: Type) = { - // Since the alias/widen variations are often no-ops, this - // keenly collects them in a Set to avoid redundant tests. + // The alias/widen variations are often no-ops. val tpes = ( - if (isLowerBound) Set(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias) - else Set(tpe) + if (isLowerBound) List(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias).distinct + else List(tpe) ) tpes exists { tp => val lhs = if (isLowerBound) tp.typeArgs else typeArgs @@ -2745,33 +2793,54 @@ A type's typeSymbol should never be inspected directly. || !containsSkolemAboveLevel(tp) // side-effects tracking boolean || enableTypeVarExperimentals // -Xexperimental: always say we're relatable, track consequences ) - override val isHigherKinded = typeArgs.isEmpty && params.nonEmpty - override def normalize: Type = + override def normalize: Type = ( if (constr.instValid) constr.inst // get here when checking higher-order subtyping of the typevar by itself // TODO: check whether this ever happens? else if (isHigherKinded) typeFun(params, applyArgs(params map (_.typeConstructor))) else super.normalize - + ) override def typeSymbol = origin.typeSymbol override def isStable = origin.isStable override def isVolatile = origin.isVolatile + private def tparamsOfSym(sym: Symbol) = sym.info match { + case PolyType(tparams, _) if tparams.nonEmpty => + tparams map (_.defString) mkString("[", ",", "]") + case _ => "" + } + def originName = { + val name = origin.typeSymbolDirect.decodedName + if (name startsWith "_$") origin.typeSymbol.decodedName else name + } + def originLocation = { + val sym = origin.typeSymbolDirect + val owner = sym.owner + val clazz = owner.enclClass + val ownsString = ( + if (owner.isMethod) "#" + owner.name + tparamsOfSym(owner) + else if (owner.isAbstractType) "#" + owner.defString + else "" + ) + clazz.decodedName + tparamsOfSym(clazz) + ownsString + } private def levelString = if (settings.explaintypes.value) level else "" + protected def typeVarString = originName override def safeToString = ( - if (constr eq null) "TVar<%s,constr=null>".format(origin) - else if (constr.inst eq null) "TVar<%s,constr.inst=null>".format(origin) - else if (constr.inst eq NoType) "?" + levelString + origin + typeArgsString(this) - else "" + constr.inst + if ((constr eq null) || (constr.inst eq null)) "TVar<" + originName + "=null>" + else if (constr.inst ne NoType) "" + constr.inst + else "?" + levelString + originName ) override def kind = "TypeVar" def cloneInternal = { // cloning a suspended type variable when it's suspended will cause the clone // to never be resumed with the current implementation - assert(!suspended) - TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params? + assert(!suspended, this) + TypeVar.trace("clone", originLocation)( + TypeVar(origin, constr cloneInternal, typeArgs, params) // @M TODO: clone args/params? + ) } } @@ -3326,10 +3395,18 @@ A type's typeSymbol should never be inspected directly. tc } - override def toString = - (loBounds map (_.safeToString)).mkString("[ _>:(", ",", ") ") + - (hiBounds map (_.safeToString)).mkString("| _<:(", ",", ") ] _= ") + - inst.safeToString + override def toString = { + val boundsStr = ( + if (loBounds.isEmpty && hiBounds.isEmpty) "[]" + else { + val lostr = if (loBounds.isEmpty) "" else loBounds map (_.safeToString) mkString("_>:(", ", ", ")") + val histr = if (hiBounds.isEmpty) "" else hiBounds map (_.safeToString) mkString("_<:(", ", ", ")") + List(lostr, histr) filterNot (_ == "") mkString ("[", " | ", "]") + } + ) + if (inst eq NoType) boundsStr + else boundsStr + " _= " + inst.safeToString + } } trait AnnotationFilter extends TypeMap { diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check index 01ef54e0ea..01bbe2de4e 100644 --- a/test/files/neg/names-defaults-neg.check +++ b/test/files/neg/names-defaults-neg.check @@ -83,7 +83,7 @@ names-defaults-neg.scala:76: error: no type parameters for method test4: (x: T[T --- because --- argument expression's type is not compatible with formal parameter type; found : List[Int] - required: ?T[?T[List[?T[X forSome { type X }]]]] + required: ?T Error occurred in an application involving default arguments. test4() ^ -- cgit v1.2.3 From f39537a369e3b137f5b1bef21cc8f5d86bc9d9d8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 6 Jan 2012 14:48:28 -0800 Subject: Fix for crasher during type inference. Well, "fix" is pretty generous, how about "workaround". It does seem to do the job. Closes SI-4070, review by @moors. --- .../scala/tools/nsc/typechecker/Infer.scala | 13 ++++++-- test/files/pos/t4070.scala | 37 ++++++++++++++++++++++ test/files/pos/t4070b.scala | 35 ++++++++++++++++++++ 3 files changed, 83 insertions(+), 2 deletions(-) create mode 100644 test/files/pos/t4070.scala create mode 100644 test/files/pos/t4070b.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 2bd307e31a..295b66b17f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1447,8 +1447,17 @@ trait Infer { /** A traverser to collect type parameters referred to in a type */ object freeTypeParamsOfTerms extends SymCollector { - protected def includeCondition(sym: Symbol): Boolean = - sym.isAbstractType && sym.owner.isTerm + // An inferred type which corresponds to an unknown type + // constructor creates a file/declaration order-dependent crasher + // situation, the behavior of which depends on the state at the + // time the typevar is created. Until we can deal with these + // properly, we can avoid it by ignoring type parameters which + // have type constructors amongst their bounds. See SI-4070. + protected def includeCondition(sym: Symbol) = ( + sym.isAbstractType + && sym.owner.isTerm + && !sym.info.bounds.exists(_.typeParams.nonEmpty) + ) } /** A traverser to collect type parameters referred to in a type diff --git a/test/files/pos/t4070.scala b/test/files/pos/t4070.scala new file mode 100644 index 0000000000..29c8d16e30 --- /dev/null +++ b/test/files/pos/t4070.scala @@ -0,0 +1,37 @@ +package a { + // method before classes + trait Foo { + def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () } + + class Dingus[T] + class Bippy[CC[X] <: Seq[X]]() extends Dingus[CC[Int]] + } +} + +package b { + // classes before method + trait Foo { + class Dingus[T] + class Bippy[CC[X] <: Seq[X]]() extends Dingus[CC[Int]] + + def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () } + } +} + + +/* +// With crash below the clasess: +% scalac -Dscalac.debug.tvar ./a.scala +[ create] ?_$1 ( In Foo#crash ) +[ setInst] tv[Int] ( In Foo#crash, _$1=tv[Int] ) +[ create] tv[Int] ( In Foo#crash ) +[ clone] tv[Int] ( Foo#crash ) + +// With crash above the classes: +% scalac -Dscalac.debug.tvar ./a.scala +[ create] ?tv ( In Foo#crash ) +./a.scala:2: error: Invalid type application in TypeVar: List(), List(Int) + def crash(x: Dingus[_]): Unit = x match { case m: Bippy[tv] => () } + ^ +one error found +*/ diff --git a/test/files/pos/t4070b.scala b/test/files/pos/t4070b.scala new file mode 100644 index 0000000000..36d03de80c --- /dev/null +++ b/test/files/pos/t4070b.scala @@ -0,0 +1,35 @@ +package a { + abstract class DeliteOp[B] + abstract class DeliteCollection[A] + abstract class Exp[T] { def Type: T } + + trait DeliteOpMap[A,B,C[X] <: DeliteCollection[X]] extends DeliteOp[C[B]] { + val in: Exp[C[A]] + val func: Exp[B] + val alloc: Exp[C[B]] + } + + object Test { + def f(x: DeliteOp[_]) = x match { + case map: DeliteOpMap[_,_,_] => map.alloc.Type + } + } +} + +package b { + object Test { + def f(x: DeliteOp[_]) = x match { + case map: DeliteOpMap[_,_,_] => map.alloc.Type + } + } + + abstract class DeliteOp[B] + abstract class DeliteCollection[A] + abstract class Exp[T] { def Type: T } + + trait DeliteOpMap[A,B,C[X] <: DeliteCollection[X]] extends DeliteOp[C[B]] { + val in: Exp[C[A]] + val func: Exp[B] + val alloc: Exp[C[B]] + } +} \ No newline at end of file -- cgit v1.2.3 From d6346f7c567894e635d92fe9408d2b340c93b9b4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 6 Jan 2012 15:51:09 -0800 Subject: Fix for crasher where Arrays meet abstract types. This sort of thing was crashing. No longer. trait Fooz[Q <: Array[_]] { def f0(x: Q) = x.length } --- src/compiler/scala/tools/nsc/transform/Erasure.scala | 10 ++++++---- test/files/run/array-existential-bound.check | 4 ++++ test/files/run/array-existential-bound.scala | 17 +++++++++++++++++ 3 files changed, 27 insertions(+), 4 deletions(-) create mode 100644 test/files/run/array-existential-bound.check create mode 100644 test/files/run/array-existential-bound.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index b327579c8b..f3b1e77c8d 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -887,8 +887,9 @@ abstract class Erasure extends AddInterfaces fun.symbol != Object_isInstanceOf) => // leave all other type tests/type casts, remove all other type applications preErase(fun) - case Apply(fn @ Select(qual, name), args) if (fn.symbol.owner == ArrayClass) => - if (unboundedGenericArrayLevel(qual.tpe.widen) == 1) + case Apply(fn @ Select(qual, name), args) if fn.symbol.owner == ArrayClass => + // Have to also catch calls to abstract types which are bounded by Array. + if (unboundedGenericArrayLevel(qual.tpe.widen) == 1 || qual.tpe.typeSymbol.isAbstractType) { // convert calls to apply/update/length on generic arrays to // calls of ScalaRunTime.array_xxx method calls global.typer.typedPos(tree.pos)({ @@ -901,14 +902,15 @@ abstract class Erasure extends AddInterfaces } gen.mkRuntimeCall(arrayMethodName, qual :: args) }) - else + } + else { // store exact array erasure in map to be retrieved later when we might // need to do the cast in adaptMember treeCopy.Apply( tree, SelectFromArray(qual, name, erasure(tree.symbol, qual.tpe)).copyAttrs(fn), args) - + } case Apply(fn @ Select(qual, _), Nil) if interceptedMethods(fn.symbol) => if (fn.symbol == Any_## || fn.symbol == Object_##) { // This is unattractive, but without it we crash here on ().## because after diff --git a/test/files/run/array-existential-bound.check b/test/files/run/array-existential-bound.check new file mode 100644 index 0000000000..f5cca843e3 --- /dev/null +++ b/test/files/run/array-existential-bound.check @@ -0,0 +1,4 @@ +2 +1000 +1000 +26 diff --git a/test/files/run/array-existential-bound.scala b/test/files/run/array-existential-bound.scala new file mode 100644 index 0000000000..bc442d39f7 --- /dev/null +++ b/test/files/run/array-existential-bound.scala @@ -0,0 +1,17 @@ +trait Fooz[Q <: Array[_]] { + def f0(x: Q) = x.length +} + +object Test extends Fooz[Array[Int]] { + val f1 = new Fooz[Array[String]] { } + val f2 = new Fooz[Array[Int]] { } + val f3 = new Fooz[Array[Any]] { } + val f4 = new Fooz[Array[_]] { } + + def main(args: Array[String]): Unit = { + println(f1.f0(Array[String]("a", "b"))) + println(f2.f0(1 to 1000 toArray)) + println(f3.f0((1 to 1000).toArray[Any])) + println(f4.f0('a' to 'z' toArray)) + } +} -- cgit v1.2.3 From a6ebd0f3ee2610ce1f5c3b2aee269ea8b2cfd6df Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 6 Jan 2012 21:23:56 -0800 Subject: Remedy spurious bounds conformance failure. Misters hkarg and hkparam have to work harder to see things from the same perspective, so they don't end up in a huff over bounds which were the same all along. Closes SI-5020, review by @moors. --- src/compiler/scala/reflect/internal/Kinds.scala | 10 ++++------ test/files/pos/t5020.scala | 19 +++++++++++++++++++ 2 files changed, 23 insertions(+), 6 deletions(-) create mode 100644 test/files/pos/t5020.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala index 15fcb5f94d..e675be43dc 100644 --- a/src/compiler/scala/reflect/internal/Kinds.scala +++ b/src/compiler/scala/reflect/internal/Kinds.scala @@ -110,10 +110,7 @@ trait Kinds { ): List[(Type, Symbol, KindErrors)] = { // instantiate type params that come from outside the abstract type we're currently checking - def transform(tp: Type, clazz: Symbol): Type = - tp.asSeenFrom(pre, clazz) - def transformedBounds(p: Symbol, o: Symbol) = - transform(p.info.instantiateTypeParams(tparams, targs).bounds, o) + def transform(tp: Type, clazz: Symbol): Type = tp.asSeenFrom(pre, clazz) // check that the type parameters hkargs to a higher-kinded type conform to the // expected params hkparams @@ -131,6 +128,7 @@ trait Kinds { // @M sometimes hkargs != arg.typeParams, the symbol and the type may // have very different type parameters val hkparams = param.typeParams + def kindCheck(cond: Boolean, f: KindErrors => KindErrors) { if (!cond) kindErrors = f(kindErrors) @@ -160,8 +158,8 @@ trait Kinds { // conceptually the same. Could also replace the types by // polytypes, but can't just strip the symbols, as ordering // is lost then. - val declaredBounds = transformedBounds(hkparam, paramowner) - val declaredBoundsInst = bindHKParams(declaredBounds) + val declaredBounds = transform(hkparam.info.instantiateTypeParams(tparams, targs).bounds, paramowner) + val declaredBoundsInst = transform(bindHKParams(declaredBounds), owner) val argumentBounds = transform(hkarg.info.bounds, owner) kindCheck(declaredBoundsInst <:< argumentBounds, _ strictnessError (hkarg -> hkparam)) diff --git a/test/files/pos/t5020.scala b/test/files/pos/t5020.scala new file mode 100644 index 0000000000..06f7723f9f --- /dev/null +++ b/test/files/pos/t5020.scala @@ -0,0 +1,19 @@ +package a { + sealed trait GenericList[U, M[_ <: U]] { + type Transformed[N[MMA <: U]] <: GenericList[U, N] + } + + trait GenericCons[U, M[_ <: U], T <: GenericList[U, M]] extends GenericList[U, M] { + type Transformed[N[MMB <: U]] = GenericCons[U, N, GenericList[U, M]#Transformed[N]] + } +} + +package b { + sealed trait GenericList[L, M[_ >: L]] { + type Transformed[N[MMA >: L]] <: GenericList[L, N] + } + + trait GenericCons[L, M[_ >: L], T <: GenericList[L, M]] extends GenericList[L, M] { + type Transformed[N[MMB >: L]] = GenericCons[L, N, T#Transformed[N]] + } +} \ No newline at end of file -- cgit v1.2.3 From 4787f883604d1344257c0b40c15790c3dde477f2 Mon Sep 17 00:00:00 2001 From: Szabolcs Berecz Date: Sat, 7 Jan 2012 17:40:00 +0100 Subject: Fixed equality and string representation of xml attributes with null value Prior to this patch was not equal to and it's string representation was "" instead of "" This includes changing MetaData.normalize() so that it doesn't reverse the chain. On the downside, the iterate function in MetaData.normalize() is not tail-recursive now. --- src/library/scala/xml/Elem.scala | 4 +++- src/library/scala/xml/MetaData.scala | 4 ++-- src/library/scala/xml/UnprefixedAttribute.scala | 2 +- src/library/scala/xml/Utility.scala | 2 +- test/files/jvm/xml03syntax.check | 2 +- test/files/run/xml-attribute.scala | 14 ++++++++++++++ 6 files changed, 22 insertions(+), 6 deletions(-) create mode 100644 test/files/run/xml-attribute.scala (limited to 'test/files') diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala index 127e6e0ab7..df52b34f87 100644 --- a/src/library/scala/xml/Elem.scala +++ b/src/library/scala/xml/Elem.scala @@ -41,7 +41,7 @@ object Elem { class Elem( override val prefix: String, val label: String, - override val attributes: MetaData, + attributes1: MetaData, override val scope: NamespaceBinding, val child: Node*) extends Node with Serializable @@ -49,6 +49,8 @@ extends Node with Serializable final override def doCollectNamespaces = true final override def doTransform = true + override val attributes = MetaData.normalize(attributes1, scope) + if (prefix == "") throw new IllegalArgumentException("prefix of zero length, use null instead") diff --git a/src/library/scala/xml/MetaData.scala b/src/library/scala/xml/MetaData.scala index 98e863eb37..c516747bae 100644 --- a/src/library/scala/xml/MetaData.scala +++ b/src/library/scala/xml/MetaData.scala @@ -38,8 +38,8 @@ object MetaData { def iterate(md: MetaData, normalized_attribs: MetaData, set: Set[String]): MetaData = { lazy val key = getUniversalKey(md, scope) if (md eq Null) normalized_attribs - else if (set(key)) iterate(md.next, normalized_attribs, set) - else iterate(md.next, md copy normalized_attribs, set + key) + else if ((md.value eq null) || set(key)) iterate(md.next, normalized_attribs, set) + else md copy iterate(md.next, normalized_attribs, set + key) } iterate(attribs, Null, Set()) } diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/library/scala/xml/UnprefixedAttribute.scala index c56fba1e6c..b6800d5ed1 100644 --- a/src/library/scala/xml/UnprefixedAttribute.scala +++ b/src/library/scala/xml/UnprefixedAttribute.scala @@ -22,7 +22,7 @@ extends Attribute final val pre = null val next = if (value ne null) next1 else next1.remove(key) - /** same as this(key, Text(value), next) */ + /** same as this(key, Text(value), next), or no attribute if value is null */ def this(key: String, value: String, next: MetaData) = this(key, if (value ne null) Text(value) else null: NodeSeq, next) diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala index 9b48f4e1bb..fc20b892b9 100644 --- a/src/library/scala/xml/Utility.scala +++ b/src/library/scala/xml/Utility.scala @@ -61,7 +61,7 @@ object Utility extends AnyRef with parsing.TokenTests { val key = md.key val smaller = sort(md.filter { m => m.key < key }) val greater = sort(md.filter { m => m.key > key }) - smaller.append( Null ).append(md.copy ( greater )) + smaller.copy(md.copy ( greater )) } /** Return the node with its attribute list sorted alphabetically diff --git a/test/files/jvm/xml03syntax.check b/test/files/jvm/xml03syntax.check index 75dc539137..9fbedc2ae6 100644 --- a/test/files/jvm/xml03syntax.check +++ b/test/files/jvm/xml03syntax.check @@ -23,4 +23,4 @@ true 4 node=, key=Some(hello) -node=, key=None +node=, key=None diff --git a/test/files/run/xml-attribute.scala b/test/files/run/xml-attribute.scala new file mode 100644 index 0000000000..2b83f70b22 --- /dev/null +++ b/test/files/run/xml-attribute.scala @@ -0,0 +1,14 @@ +import xml.Node + +object Test { + def main(args: Array[String]): Unit = { + val noAttr = + val attrNull = + val attrNone = + assert(noAttr == attrNull) + assert(noAttr == attrNone) + assert(noAttr.toString() == "") + assert(attrNull.toString() == "") + assert(attrNone.toString() == "") + } +} -- cgit v1.2.3 From 51089b34a7a535498dee42e6465d4d577d65b7d5 Mon Sep 17 00:00:00 2001 From: Szabolcs Berecz Date: Sat, 7 Jan 2012 18:23:21 +0100 Subject: Accept prefixed xml attributes with null value This changes makes PrefixedAttribute work the same way as UnprefixedAttribute with respect to null values: is accepted and results in --- src/library/scala/xml/PrefixedAttribute.scala | 15 +++++++++------ test/files/run/xml-attribute.scala | 25 ++++++++++++++++++++++--- 2 files changed, 31 insertions(+), 9 deletions(-) (limited to 'test/files') diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/library/scala/xml/PrefixedAttribute.scala index 436dfcda43..b80d6a1c73 100644 --- a/src/library/scala/xml/PrefixedAttribute.scala +++ b/src/library/scala/xml/PrefixedAttribute.scala @@ -13,22 +13,25 @@ package scala.xml * * @param pre ... * @param key ... - * @param value the attribute value, which may not be null + * @param value the attribute value * @param next ... */ class PrefixedAttribute( val pre: String, val key: String, val value: Seq[Node], - val next: MetaData) + val next1: MetaData) extends Attribute { - if (value eq null) - throw new UnsupportedOperationException("value is null") + val next = if (value ne null) next1 else next1.remove(key) - /** same as this(key, Utility.parseAttributeValue(value), next) */ + /** same as this(pre, key, Text(value), next), or no attribute if value is null */ def this(pre: String, key: String, value: String, next: MetaData) = - this(pre, key, Text(value), next) + this(pre, key, if (value ne null) Text(value) else null: NodeSeq, next) + + /** same as this(pre, key, value.get, next), or no attribute if value is None */ + def this(pre: String, key: String, value: Option[Seq[Node]], next: MetaData) = + this(pre, key, value.orNull, next) /** Returns a copy of this unprefixed attribute with the given * next field. diff --git a/test/files/run/xml-attribute.scala b/test/files/run/xml-attribute.scala index 2b83f70b22..8b261acc94 100644 --- a/test/files/run/xml-attribute.scala +++ b/test/files/run/xml-attribute.scala @@ -5,10 +5,29 @@ object Test { val noAttr = val attrNull = val attrNone = + val preAttrNull = + val preAttrNone = assert(noAttr == attrNull) assert(noAttr == attrNone) - assert(noAttr.toString() == "") - assert(attrNull.toString() == "") - assert(attrNone.toString() == "") + assert(noAttr == preAttrNull) + assert(noAttr == preAttrNone) + + val noAttrStr = "" + assert(noAttr.toString() == noAttrStr) + assert(attrNull.toString() == noAttrStr) + assert(attrNone.toString() == noAttrStr) + assert(preAttrNull.toString() == noAttrStr) + assert(preAttrNone.toString() == noAttrStr) + + val xml1 = + val xml2 = + val xml3 = + assert(xml1 == xml2) + assert(xml1 == xml3) + + val xml1Str = "" + assert(xml1.toString() == xml1Str) + assert(xml2.toString() == xml1Str) + assert(xml3.toString() == xml1Str) } } -- cgit v1.2.3 From 27d19715af59e2e438808ae668c093ad61c8f728 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 7 Jan 2012 13:31:11 -0800 Subject: Made Array manifests return type arguments. Closes SI-3758. --- src/library/scala/reflect/ClassManifest.scala | 4 ++-- src/library/scala/reflect/Manifest.scala | 2 +- test/files/run/t3758.scala | 10 ++++++++++ 3 files changed, 13 insertions(+), 3 deletions(-) create mode 100644 test/files/run/t3758.scala (limited to 'test/files') diff --git a/src/library/scala/reflect/ClassManifest.scala b/src/library/scala/reflect/ClassManifest.scala index acd28f04f5..466b57dea7 100644 --- a/src/library/scala/reflect/ClassManifest.scala +++ b/src/library/scala/reflect/ClassManifest.scala @@ -127,7 +127,7 @@ trait ClassManifest[T] extends OptManifest[T] with Equals with Serializable { java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]] def arrayManifest: ClassManifest[Array[T]] = - ClassManifest.classType[Array[T]](arrayClass[T](erasure)) + ClassManifest.classType[Array[T]](arrayClass[T](erasure), this) def newArray(len: Int): Array[T] = java.lang.reflect.Array.newInstance(erasure, len).asInstanceOf[Array[T]] @@ -220,7 +220,7 @@ object ClassManifest { new ClassTypeManifest[T](Some(prefix), clazz, args.toList) def arrayType[T](arg: OptManifest[_]): ClassManifest[Array[T]] = arg match { - case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]] + case NoManifest => Object.asInstanceOf[ClassManifest[Array[T]]] case m: ClassManifest[_] => m.asInstanceOf[ClassManifest[T]].arrayManifest } diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index 18fd34ed2e..be08409636 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -44,7 +44,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals { override def typeArguments: List[Manifest[_]] = Nil override def arrayManifest: Manifest[Array[T]] = - Manifest.classType[Array[T]](arrayClass[T](erasure)) + Manifest.classType[Array[T]](arrayClass[T](erasure), this) override def canEqual(that: Any): Boolean = that match { case _: Manifest[_] => true diff --git a/test/files/run/t3758.scala b/test/files/run/t3758.scala new file mode 100644 index 0000000000..18750b0a9c --- /dev/null +++ b/test/files/run/t3758.scala @@ -0,0 +1,10 @@ +object Test { + def main(args: Array[String]): Unit = { + assert(classManifest[Array[String]].typeArguments contains classManifest[String]) + assert(classManifest[Array[Int]].typeArguments contains classManifest[Int]) + assert(classManifest[Array[Float]].typeArguments contains classManifest[Float]) + assert(manifest[Array[String]].typeArguments contains manifest[String]) + assert(manifest[Array[Int]].typeArguments contains manifest[Int]) + assert(manifest[Array[Float]].typeArguments contains manifest[Float]) + } +} -- cgit v1.2.3 From dc1bbb919eda7e3ec49e4b5cd9d726f58d318cf7 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 7 Jan 2012 10:50:44 -0800 Subject: TypeConstraint/TypeVar refinement. I zeroed in on the actual conditions under which the parameter bounds can be utilized without poisoning the well. Also fixed a bug in ClassfileParser where it would get confused and set Any as a lower bound, as well as a bug or at least misbehavior where a TypeBounds with only Any/Nothing as an upper/lower bound would be treated differently than one with no bound at all. Review by @moors. --- src/compiler/scala/reflect/internal/Types.scala | 55 +++++++++++++--------- .../nsc/symtab/classfile/ClassfileParser.scala | 7 ++- test/files/continuations-neg/t2949.check | 2 +- 3 files changed, 39 insertions(+), 25 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index f5876291ea..0b1196c1d0 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -90,6 +90,10 @@ trait Types extends api.Types { self: SymbolTable => private final val traceTypeVars = sys.props contains "scalac.debug.tvar" /** In case anyone wants to turn off lub verification without reverting anything. */ private final val verifyLubs = true + /** In case anyone wants to turn off type parameter bounds being used + * to seed type constraints. + */ + private final val propagateParameterBoundsToTypeVars = !(sys.props contains "scalac.debug.no-prop-constraints") protected val enableTypeVarExperimentals = settings.Xexperimental.value @@ -1314,6 +1318,7 @@ trait Types extends api.Types { self: SymbolTable => case TypeBounds(_, _) => that <:< this case _ => lo <:< that && that <:< hi } + def isEmptyBounds = (lo.typeSymbolDirect eq NothingClass) && (hi.typeSymbolDirect eq AnyClass) // override def isNullable: Boolean = NullClass.tpe <:< lo; override def safeToString = ">: " + lo + " <: " + hi override def kind = "TypeBoundsType" @@ -2446,25 +2451,23 @@ A type's typeSymbol should never be inspected directly. /** Create a new TypeConstraint based on the given symbol. */ private def deriveConstraint(tparam: Symbol): TypeConstraint = { - // Force the info of a higher-order tparam's parameters. - // Otherwise things don't end well. See SI-5359. However - // we can't force all info, so we have to discriminate - // carefully. - val isHigher = tparam.isAbstractType && tparam.typeParams.nonEmpty - // See pos/tcpoly_infer_implicit_tuple_wrapper for the test which - // fails if I initialize the type constraint with the type parameter - // bounds. It seems that in that instance it interferes with the - // inference. Thus, the isHigherOrderTypeParameter condition. - val isExclude = isHigher && tparam.info.bounds.exists(_.typeSymbol.isHigherOrderTypeParameter) - - def message = "" + tparam.name + " in " + tparam.owner + ( - if (isExclude) ", empty due to higher order type parameter in bounds" - else "" - ) - /*TypeVar.trace[TypeConstraint]("constr", message)*/( - if (isHigher && !isExclude) new TypeConstraint(tparam.info.bounds) - else new TypeConstraint - ) + /** Must force the type parameter's info at this point + * or things don't end well for higher-order type params. + * See SI-5359. + */ + val bounds = tparam.info.bounds + /** We can seed the type constraint with the type parameter + * bounds as long as the types are concrete. This should lower + * the complexity of the search even if it doesn't improve + * any results. + */ + if (propagateParameterBoundsToTypeVars) { + val exclude = bounds.isEmptyBounds || bounds.exists(_.typeSymbolDirect.isNonClassType) + + if (exclude) new TypeConstraint + else TypeVar.trace("constraint", "For " + tparam.fullLocationString)(new TypeConstraint(bounds)) + } + else new TypeConstraint } def unapply(tv: TypeVar): Some[(Type, TypeConstraint)] = Some((tv.origin, tv.constr)) def apply(origin: Type, constr: TypeConstraint): TypeVar = apply(origin, constr, Nil, Nil) @@ -3342,8 +3345,12 @@ A type's typeSymbol should never be inspected directly. def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi)) def this() = this(List(), List()) - private var lobounds = lo0 - private var hibounds = hi0 + /** Guard these lists against AnyClass and NothingClass appearing, + * else loBounds.isEmpty will have different results for an empty + * constraint and one with Nothing as a lower bound. + */ + private var lobounds = lo0 filterNot (_.typeSymbolDirect eq NothingClass) + private var hibounds = hi0 filterNot (_.typeSymbolDirect eq AnyClass) private var numlo = numlo0 private var numhi = numhi0 private var avoidWidening = avoidWidening0 @@ -3359,7 +3366,8 @@ A type's typeSymbol should never be inspected directly. else if (!isNumericSubType(tp, numlo)) numlo = numericLoBound } - else lobounds ::= tp + else if (tp.typeSymbolDirect ne NothingClass) + lobounds ::= tp } def checkWidening(tp: Type) { @@ -3378,7 +3386,8 @@ A type's typeSymbol should never be inspected directly. else if (!isNumericSubType(numhi, tp)) numhi = numericHiBound } - else hibounds ::= tp + else if (tp.typeSymbolDirect ne AnyClass) + hibounds ::= tp } def isWithinBounds(tp: Type): Boolean = diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index ac72b4d22c..de11f3aa28 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -717,7 +717,12 @@ abstract class ClassfileParser { index += 1 val bounds = variance match { case '+' => TypeBounds.upper(objToAny(sig2type(tparams, skiptvs))) - case '-' => TypeBounds.lower(sig2type(tparams, skiptvs)) + case '-' => + val tp = sig2type(tparams, skiptvs) + // sig2type seems to return AnyClass regardless of the situation: + // we don't want Any as a LOWER bound. + if (tp.typeSymbol == definitions.AnyClass) TypeBounds.empty + else TypeBounds.lower(tp) case '*' => TypeBounds.empty } val newtparam = sym.newExistential(sym.pos, newTypeName("?"+i)) setInfo bounds diff --git a/test/files/continuations-neg/t2949.check b/test/files/continuations-neg/t2949.check index dd9768807c..411aed1b5b 100644 --- a/test/files/continuations-neg/t2949.check +++ b/test/files/continuations-neg/t2949.check @@ -1,6 +1,6 @@ t2949.scala:13: error: type mismatch; found : Int - required: ? @scala.util.continuations.cpsParam[List[?],Any] + required: ? @scala.util.continuations.cpsParam[List[?],?] x * y ^ one error found -- cgit v1.2.3 From af4a5299290090ff433f2ad7bb801bf4226f423f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 8 Jan 2012 19:36:33 -0800 Subject: Fix for PartialFunction NPE. Was going straight to the field and bypassing the null guard. Closes SI-5300. --- src/library/scala/runtime/AbstractPartialFunction.scala | 4 ++-- test/files/run/t5300.scala | 7 +++++++ 2 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 test/files/run/t5300.scala (limited to 'test/files') diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala index f48d99f5af..cbe778f09b 100644 --- a/src/library/scala/runtime/AbstractPartialFunction.scala +++ b/src/library/scala/runtime/AbstractPartialFunction.scala @@ -26,7 +26,7 @@ abstract class AbstractPartialFunction[-T1, +R] private var fallBackField: PartialFunction[T1 @uncheckedVariance, R @uncheckedVariance] = _ def fallBack: PartialFunction[T1, R] = synchronized { - if (fallBackField == null) fallBackField = PartialFunction.empty + if (fallBackField eq null) fallBackField = PartialFunction.empty fallBackField } @@ -38,7 +38,7 @@ abstract class AbstractPartialFunction[-T1, +R] override def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] = { val result = this.clone.asInstanceOf[AbstractPartialFunction[A1, B1]] result.synchronized { - result.fallBackField = this.fallBackField orElse that + result.fallBackField = if (this.fallBackField eq null) that else this.fallBackField orElse that result } } diff --git a/test/files/run/t5300.scala b/test/files/run/t5300.scala new file mode 100644 index 0000000000..073b29604a --- /dev/null +++ b/test/files/run/t5300.scala @@ -0,0 +1,7 @@ +object Test { + val pf: PartialFunction[Any, Unit] = { case _ => () } + + def main(args: Array[String]): Unit = { + pf orElse pf + } +} -- cgit v1.2.3