From d71f59ebda35bd58338b13eb55a6913a38d6d90e Mon Sep 17 00:00:00 2001 From: Kato Kazuyoshi Date: Wed, 16 Jan 2013 23:46:03 +0900 Subject: SI-4976 Scaladoc: Add a source link to package objects --- src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala | 15 +++++++++------ test/scaladoc/run/package-object.check | 1 + test/scaladoc/run/package-object.scala | 1 + 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala index c6cfc317ea..0a469c9227 100644 --- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala @@ -314,12 +314,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { inform("Creating doc template for " + sym) override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot - def inSource = - if (sym.sourceFile != null && ! sym.isSynthetic) - Some((sym.sourceFile, sym.pos.line)) + + protected def inSourceFromSymbol(symbol: Symbol) = + if (symbol.sourceFile != null && ! symbol.isSynthetic) + Some((symbol.sourceFile, symbol.pos.line)) else None + def inSource = inSourceFromSymbol(sym) + def sourceUrl = { def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/") val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/" @@ -508,11 +511,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package { override def inTemplate = inTpl override def toRoot: List[PackageImpl] = this :: inTpl.toRoot - override lazy val linearization = { - val symbol = sym.info.members.find { + override lazy val (inSource, linearization) = { + val representive = sym.info.members.find { s => s.isPackageObject } getOrElse sym - linearizationFromSymbol(symbol) + (inSourceFromSymbol(representive), linearizationFromSymbol(representive)) } def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p } } diff --git a/test/scaladoc/run/package-object.check b/test/scaladoc/run/package-object.check index 01dbcc682f..7da897a4f2 100644 --- a/test/scaladoc/run/package-object.check +++ b/test/scaladoc/run/package-object.check @@ -1,3 +1,4 @@ List(test.B, test.A, scala.AnyRef, scala.Any) List(B, A, AnyRef, Any) +Some((newSource,10)) Done. diff --git a/test/scaladoc/run/package-object.scala b/test/scaladoc/run/package-object.scala index 5fb5a4ddf1..f5c79b1332 100644 --- a/test/scaladoc/run/package-object.scala +++ b/test/scaladoc/run/package-object.scala @@ -11,6 +11,7 @@ object Test extends ScaladocModelTest { val p = root._package("test") println(p.linearizationTemplates) println(p.linearizationTypes) + println(p.inSource) } } -- cgit v1.2.3 From eab288442931f01b5bad2dcfa244a6183db0f4b6 Mon Sep 17 00:00:00 2001 From: James Iry Date: Wed, 23 Jan 2013 16:01:59 -0800 Subject: SI-5313 Do not eliminate stores that potentially wipe referenes Storing to local variables of reference or array type is indirectly observable because it potentially allows gc to collect an object. So this commit makes DeadCodeElimination mark a store necessary if it assigns to a local that potentially stored by a previous necessary store. --- .../nsc/backend/opt/DeadCodeElimination.scala | 84 ++++++++++++++++++++-- test/files/run/t5313.check | 8 +++ test/files/run/t5313.scala | 42 +++++++++++ 3 files changed, 127 insertions(+), 7 deletions(-) create mode 100644 test/files/run/t5313.check create mode 100644 test/files/run/t5313.scala diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index fee683ce3a..90d5686bd0 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -65,6 +65,9 @@ abstract class DeadCodeElimination extends SubComponent { /** what local variables have been accessed at least once? */ var accessedLocals: List[Local] = Nil + + /** Map from a local and a basic block to the instructions that store to that local in that basic block */ + val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() /** the current method. */ var method: IMethod = _ @@ -76,6 +79,7 @@ abstract class DeadCodeElimination extends SubComponent { if (m.hasCode) { debuglog("dead code elimination on " + m); dropOf.clear() + localStores.clear() m.code.blocks.clear() accessedLocals = m.params.reverse m.code.blocks ++= linearizer.linearize(m) @@ -104,10 +108,10 @@ abstract class DeadCodeElimination extends SubComponent { for (Pair(i, idx) <- bb.toList.zipWithIndex) { i match { - case LOAD_LOCAL(l) => + case LOAD_LOCAL(_) => defs = defs + Pair(((bb, idx)), rd.vars) - case STORE_LOCAL(_) => + case STORE_LOCAL(l) => /* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it * (otherwise any side-effects of the module's constructor go lost). * (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM) @@ -125,6 +129,16 @@ abstract class DeadCodeElimination extends SubComponent { } } if (necessary) worklist += ((bb, idx)) + // add it to the localStores map + val key = (l, bb) + val set = if(localStores contains key) + localStores(key) + else { + val set = new mutable.BitSet + localStores.put(key, set) + set + } + set += idx case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) | THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) | @@ -162,11 +176,18 @@ abstract class DeadCodeElimination extends SubComponent { def mark() { // log("Starting with worklist: " + worklist) while (!worklist.isEmpty) { - val (bb, idx) = worklist.iterator.next + val (bb, idx) = worklist.head worklist -= ((bb, idx)) debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx)) val instr = bb(idx) + // adds the instrutions that define the stack values about to be consumed to the work list to + // be marked useful + def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) { + debuglog(s"\t${bb1(idx1)} is consumed by $instr") + worklist += ((bb1, idx1)) + } + if (!useful(bb)(idx)) { useful(bb) += idx dropOf.get(bb, idx) foreach { @@ -180,6 +201,58 @@ abstract class DeadCodeElimination extends SubComponent { worklist += ((bb1, idx1)) } + // Storing to local variables of reference or array type may be indirectly + // observable because may remove a reference to an object which may allow the object to be + // gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a + // LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked + // useful then its clobbers must also go into the worklist to be marked useful. + case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType => + addDefs() + // previously visited blocks tracked to prevent searching forever in a cycle + val inspected = mutable.Set[BasicBlock]() + // our worklist of blocks that still need to be checked + val blocksToBeInspected = mutable.Set[BasicBlock]() + + // Tries to find the next clobber of l1 starting at idx1 in bb1. + // if it finds any it adds them clobber to the worklist. + // If not it adds both bb's exception blocks and direct + // successor blocks to the uninspectedBlocks + def findClobber(idx1: Int, bb1: BasicBlock) { + val key = ((l1, bb1)) + val foundClobber = (localStores contains key) && { + def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min + + // find the smallest index greater than or equal to idx1 + val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1)) + if (clobberIdx == -1) + false + else { + debuglog(s"\t${bb1(clobberIdx)} is a clobber of $instr") + if (!useful(bb1)(clobberIdx)) worklist += ((bb1, clobberIdx)) + true + } + } + + // if we found a clobber in this block then we don't need to add the successors + // because they'll be picked up when the worklist comes around to work on that clobber + if (!foundClobber) { + blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected) + blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected) + } + } + + // first search starting at the current index + // note we don't put bb in the inspected list yet because a loop may later force + // us back around to search from the beginning of bb + findClobber(idx + 1, bb) + // then loop until we've exhausted the set of uninspected blocks + while(!blocksToBeInspected.isEmpty) { + val bb1 = blocksToBeInspected.head + blocksToBeInspected -= bb1 + inspected += bb1 + findClobber(0, bb1) + } + case nw @ NEW(REFERENCE(sym)) => assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")") worklist += findInstruction(bb, nw.init) @@ -199,10 +272,7 @@ abstract class DeadCodeElimination extends SubComponent { () case _ => - for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) { - debuglog("\tAdding " + bb1(idx1)) - worklist += ((bb1, idx1)) - } + addDefs() } } } diff --git a/test/files/run/t5313.check b/test/files/run/t5313.check new file mode 100644 index 0000000000..dd8791f109 --- /dev/null +++ b/test/files/run/t5313.check @@ -0,0 +1,8 @@ +STORE_LOCAL(variable kept1) +STORE_LOCAL(value result) +STORE_LOCAL(variable kept1) +STORE_LOCAL(variable kept2) +STORE_LOCAL(value kept3) +STORE_LOCAL(variable kept2) +STORE_LOCAL(variable kept4) +STORE_LOCAL(variable kept4) diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala new file mode 100644 index 0000000000..b65311622a --- /dev/null +++ b/test/files/run/t5313.scala @@ -0,0 +1,42 @@ +import scala.tools.partest.IcodeTest + +object Test extends IcodeTest { + override def printIcodeAfterPhase = "dce" + + override def extraSettings: String = super.extraSettings + " -optimize" + + override def code = + """class Foo { + def foo = true + def bar = { + var kept1 = new Object + val result = new java.lang.ref.WeakReference(kept1) + kept1 = null // we can't eliminate this assigment because result can observe + // when the object has no more references. See SI-5313 + var erased2 = null // we can eliminate this store because it's never used + val erased3 = erased2 // and this + var erased4 = erased2 // and this + val erased5 = erased4 // and this + var kept2: Object = new Object // ultimately can't be eliminated + while(foo) { + val kept3 = kept2 + kept2 = null // this can't, because it clobbers x, which is ultimately used + erased4 = null // safe to eliminate + println(kept3) + } + var kept4 = new Object // have to keep, it's used + try + println(kept4) + catch { + case _ : Throwable => kept4 = null // have to keep, it clobbers kept4 which is used + } + result + } + }""".stripMargin + + override def show() { + val storeLocal = "STORE_LOCAL" + val lines1 = collectIcode("") filter (_ contains storeLocal) map (x => x.drop(x.indexOf(storeLocal))) + println(lines1 mkString "\n") + } +} -- cgit v1.2.3 From 9b4fa8382fe0ed6fef3ff91e2c153a1840c954b9 Mon Sep 17 00:00:00 2001 From: James Iry Date: Tue, 29 Jan 2013 09:13:30 -0800 Subject: SI-5313 Eliminate more stores by replacing clobbers with null stores When an unused store clobbers a previous store, replace it with storing a null. Don't mark clobbers as "used" so that the original clobber and all following clobbers can still be eliminated. --- .../nsc/backend/opt/DeadCodeElimination.scala | 128 +++++++++++++-------- test/files/run/t5313.check | 2 + test/files/run/t5313.scala | 8 +- 3 files changed, 87 insertions(+), 51 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 90d5686bd0..53270c0651 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -68,6 +68,9 @@ abstract class DeadCodeElimination extends SubComponent { /** Map from a local and a basic block to the instructions that store to that local in that basic block */ val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() + + /** Stores that clobber previous stores to array or ref locals. See SI-5313 */ + val clobbers = mutable.Set[(BasicBlock, Int)]() /** the current method. */ var method: IMethod = _ @@ -80,6 +83,7 @@ abstract class DeadCodeElimination extends SubComponent { debuglog("dead code elimination on " + m); dropOf.clear() localStores.clear() + clobbers.clear() m.code.blocks.clear() accessedLocals = m.params.reverse m.code.blocks ++= linearizer.linearize(m) @@ -201,58 +205,15 @@ abstract class DeadCodeElimination extends SubComponent { worklist += ((bb1, idx1)) } - // Storing to local variables of reference or array type may be indirectly - // observable because may remove a reference to an object which may allow the object to be - // gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a - // LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked - // useful then its clobbers must also go into the worklist to be marked useful. case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType => addDefs() - // previously visited blocks tracked to prevent searching forever in a cycle - val inspected = mutable.Set[BasicBlock]() - // our worklist of blocks that still need to be checked - val blocksToBeInspected = mutable.Set[BasicBlock]() - - // Tries to find the next clobber of l1 starting at idx1 in bb1. - // if it finds any it adds them clobber to the worklist. - // If not it adds both bb's exception blocks and direct - // successor blocks to the uninspectedBlocks - def findClobber(idx1: Int, bb1: BasicBlock) { - val key = ((l1, bb1)) - val foundClobber = (localStores contains key) && { - def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min - - // find the smallest index greater than or equal to idx1 - val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1)) - if (clobberIdx == -1) - false - else { - debuglog(s"\t${bb1(clobberIdx)} is a clobber of $instr") - if (!useful(bb1)(clobberIdx)) worklist += ((bb1, clobberIdx)) - true - } - } - - // if we found a clobber in this block then we don't need to add the successors - // because they'll be picked up when the worklist comes around to work on that clobber - if (!foundClobber) { - blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected) - blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected) - } - } - - // first search starting at the current index - // note we don't put bb in the inspected list yet because a loop may later force - // us back around to search from the beginning of bb - findClobber(idx + 1, bb) - // then loop until we've exhausted the set of uninspected blocks - while(!blocksToBeInspected.isEmpty) { - val bb1 = blocksToBeInspected.head - blocksToBeInspected -= bb1 - inspected += bb1 - findClobber(0, bb1) - } - + // see SI-5313 + // search for clobbers of this store if we aren't doing l1 = null + // this doesn't catch the second store in x=null;l1=x; but in practice this catches + // a lot of null stores very cheaply + if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null))) + findClobbers(l1, bb, idx + 1) + case nw @ NEW(REFERENCE(sym)) => assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")") worklist += findInstruction(bb, nw.init) @@ -277,6 +238,67 @@ abstract class DeadCodeElimination extends SubComponent { } } } + + /** + * Finds and marks all clobbers of the given local starting in the given + * basic block at the given index + * + * Storing to local variables of reference or array type may be indirectly + * observable because it may remove a reference to an object which may allow the object + * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a + * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked + * useful then its clobbers must go into the set of clobbers, which will be + * compensated for later + */ + def findClobbers(l: Local, bb: BasicBlock, idx: Int) { + // previously visited blocks tracked to prevent searching forever in a cycle + val inspected = mutable.Set[BasicBlock]() + // our worklist of blocks that still need to be checked + val blocksToBeInspected = mutable.Set[BasicBlock]() + + // Tries to find the next clobber of l1 in bb1 starting at idx1. + // if it finds one it adds the clobber to clobbers set for later + // handling. If not it adds the direct successor blocks to + // the uninspectedBlocks to try to find clobbers there. Either way + // it adds the exception successor blocks for further search + def findClobberInBlock(idx1: Int, bb1: BasicBlock) { + val key = ((l, bb1)) + val foundClobber = (localStores contains key) && { + def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min + + // find the smallest index greater than or equal to idx1 + val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1)) + if (clobberIdx == -1) + false + else { + debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}") + clobbers += ((bb1, clobberIdx)) + true + } + } + + // always need to look into the exception successors for additional clobbers + // because we don't know when flow might enter an exception handler + blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected) + // If we didn't find a clobber here then we need to look at successor blocks. + // if we found a clobber then we don't need to search in the direct successors + if (!foundClobber) { + blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected) + } + } + + // first search starting at the current index + // note we don't put bb in the inspected list yet because a loop may later force + // us back around to search from the beginning of bb + findClobberInBlock(idx, bb) + // then loop until we've exhausted the set of uninspected blocks + while(!blocksToBeInspected.isEmpty) { + val bb1 = blocksToBeInspected.head + blocksToBeInspected -= bb1 + inspected += bb1 + findClobberInBlock(0, bb1) + } + } def sweep(m: IMethod) { val compensations = computeCompensations(m) @@ -306,6 +328,12 @@ abstract class DeadCodeElimination extends SubComponent { i match { case NEW(REFERENCE(sym)) => log(s"Eliminated instantation of $sym inside $m") + case STORE_LOCAL(l) if clobbers contains ((bb, idx)) => + // if an unused instruction was a clobber of a used store to a reference or array type + // then we'll replace it with the store of a null to make sure the reference is + // eliminated. See SI-5313 + bb emit CONSTANT(Constant(null)) + bb emit STORE_LOCAL(l) case _ => () } debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")") diff --git a/test/files/run/t5313.check b/test/files/run/t5313.check index dd8791f109..aa30c0efa5 100644 --- a/test/files/run/t5313.check +++ b/test/files/run/t5313.check @@ -6,3 +6,5 @@ STORE_LOCAL(value kept3) STORE_LOCAL(variable kept2) STORE_LOCAL(variable kept4) STORE_LOCAL(variable kept4) +STORE_LOCAL(variable kept5) +STORE_LOCAL(variable kept5) diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala index b65311622a..64009e29af 100644 --- a/test/files/run/t5313.scala +++ b/test/files/run/t5313.scala @@ -13,6 +13,7 @@ object Test extends IcodeTest { val result = new java.lang.ref.WeakReference(kept1) kept1 = null // we can't eliminate this assigment because result can observe // when the object has no more references. See SI-5313 + kept1 = new Object // but we can eliminate this one because kept1 has already been clobbered var erased2 = null // we can eliminate this store because it's never used val erased3 = erased2 // and this var erased4 = erased2 // and this @@ -20,7 +21,7 @@ object Test extends IcodeTest { var kept2: Object = new Object // ultimately can't be eliminated while(foo) { val kept3 = kept2 - kept2 = null // this can't, because it clobbers x, which is ultimately used + kept2 = null // this can't, because it clobbers kept2, which is used erased4 = null // safe to eliminate println(kept3) } @@ -30,6 +31,11 @@ object Test extends IcodeTest { catch { case _ : Throwable => kept4 = null // have to keep, it clobbers kept4 which is used } + var kept5 = new Object + print(kept5) + kept5 = null // can't eliminate it's a clobber and it's used + print(kept5) + kept5 = null // can eliminate because we don't care about clobbers of nulls result } }""".stripMargin -- cgit v1.2.3 From 29892586382ef846b0ad46271c2fba9970943faf Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Wed, 30 Jan 2013 17:21:38 +0100 Subject: SI-6539 moves @compileTimeOnly away from scala-reflect The move is done to provide forward compatibility with 2.10.0. The annotation isn't replaced with one of the macro-based solutions right away (see comments for more information about those), because we lack necessary tech in 2.10.x. --- src/reflect/scala/reflect/api/Exprs.scala | 2 ++ .../scala/reflect/internal/Definitions.scala | 2 +- .../internal/annotations/compileTimeOnly.scala | 31 ++++++++++++++++++++++ .../scala/reflect/macros/compileTimeOnly.scala | 16 ----------- test/files/neg/t6539/Macro_1.scala | 2 +- test/files/neg/t6539/Test_2.scala | 6 +++++ 6 files changed, 41 insertions(+), 18 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala delete mode 100644 src/reflect/scala/reflect/macros/compileTimeOnly.scala diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala index 562b1da8e3..2ba18a8207 100644 --- a/src/reflect/scala/reflect/api/Exprs.scala +++ b/src/reflect/scala/reflect/api/Exprs.scala @@ -90,6 +90,7 @@ trait Exprs { self: Universe => * }}} * because expr of type Expr[T] itself does not have a method foo. */ + // @compileTimeOnly("Cannot use splice outside reify") def splice: T /** @@ -106,6 +107,7 @@ trait Exprs { self: Universe => * object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... } * }}} */ + // @compileTimeOnly("Cannot use value except for signatures of macro implementations") val value: T override def canEqual(x: Any) = x.isInstanceOf[Expr[_]] diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 4269b65297..3806f555ac 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -949,7 +949,7 @@ trait Definitions extends api.StandardDefinitions { lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty] lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty] lazy val CloneableAttr = requiredClass[scala.annotation.cloneable] - lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.macros.compileTimeOnly") + lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly") lazy val DeprecatedAttr = requiredClass[scala.deprecated] lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName] lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance] diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala new file mode 100644 index 0000000000..058ff61fbf --- /dev/null +++ b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala @@ -0,0 +1,31 @@ +package scala.reflect +package internal +package annotations + +import scala.annotation.meta._ + +/** + * An annotation that designates a member should not be referred to after + * type checking (which includes macro expansion); it must only be used in + * the arguments of some other macro that will eliminate it from the AST. + * + * Later on, this annotation should be removed and implemented with domain-specific macros. + * If a certain method `inner` mustn't be called outside the context of a given macro `outer`, + * then it should itself be declared as a macro. + * + * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and + * report an error if `outer` is not detected. In principle, we could use this approach right now, + * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise + * track of the stack of the trees being typechecked. + * + * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`. + * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner` + * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or carry out + * domain-specific logic. + * + * @param message the error message to print during compilation if a reference remains + * after type checking + * @since 2.10.1 + */ +@getter @setter @beanGetter @beanSetter +final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation diff --git a/src/reflect/scala/reflect/macros/compileTimeOnly.scala b/src/reflect/scala/reflect/macros/compileTimeOnly.scala deleted file mode 100644 index 5a3a352a53..0000000000 --- a/src/reflect/scala/reflect/macros/compileTimeOnly.scala +++ /dev/null @@ -1,16 +0,0 @@ -package scala.reflect -package macros - -import scala.annotation.meta._ - -/** - * An annotation that designates a member should not be referred to after - * type checking (which includes macro expansion); it must only be used in - * the arguments of some other macro that will eliminate it from the AST. - * - * @param message the error message to print during compilation if a reference remains - * after type checking - * @since 2.10.1 - */ -@getter @setter @beanGetter @beanSetter -final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation diff --git a/test/files/neg/t6539/Macro_1.scala b/test/files/neg/t6539/Macro_1.scala index ed52776d95..4f7d289e2e 100644 --- a/test/files/neg/t6539/Macro_1.scala +++ b/test/files/neg/t6539/Macro_1.scala @@ -5,6 +5,6 @@ object M { def m(a: Any, b: Any): Any = macro mImpl def mImpl(c: Context)(a: c.Expr[Any], b: c.Expr[Any]) = a - @reflect.macros.compileTimeOnly("cto may only be used as an argument to " + "m") + @reflect.internal.annotations.compileTimeOnly("cto may only be used as an argument to " + "m") def cto = 0 } diff --git a/test/files/neg/t6539/Test_2.scala b/test/files/neg/t6539/Test_2.scala index 5a602879ec..26f4504222 100644 --- a/test/files/neg/t6539/Test_2.scala +++ b/test/files/neg/t6539/Test_2.scala @@ -3,4 +3,10 @@ object Test { M.m(M.cto, ()) // error M.m((), M.cto) // okay M.cto // error + + locally { + val expr = scala.reflect.runtime.universe.reify(2) + val splice = expr.splice + val value = expr.value + } } -- cgit v1.2.3 From 2403d1ddcaa1bd76c1f376a32ec03a36d4dab48b Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 31 Jan 2013 19:46:51 +0100 Subject: SI-7046 reflection now auto-initializes knownDirectSubclasses knownDirectSubclasses joins the happy family of flags, annotations and privateWithin, which automatically trigger initialization, when used within runtime reflection. --- src/reflect/scala/reflect/internal/Symbols.scala | 6 +++++- test/files/run/t7046.check | 2 ++ test/files/run/t7046.scala | 13 +++++++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t7046.check create mode 100644 test/files/run/t7046.scala diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index c1b868f3cb..f4d70a991c 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -86,7 +86,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => case n: TypeName => if (isClass) newClassSymbol(n, pos, newFlags) else newNonClassSymbol(n, pos, newFlags) } - def knownDirectSubclasses = children + def knownDirectSubclasses = { + if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize + children + } + def baseClasses = info.baseClasses def module = sourceModule def thisPrefix: Type = thisType diff --git a/test/files/run/t7046.check b/test/files/run/t7046.check new file mode 100644 index 0000000000..427f1ce610 --- /dev/null +++ b/test/files/run/t7046.check @@ -0,0 +1,2 @@ +Set(class D, class E) +Set(class D, class E) diff --git a/test/files/run/t7046.scala b/test/files/run/t7046.scala new file mode 100644 index 0000000000..647a15cd18 --- /dev/null +++ b/test/files/run/t7046.scala @@ -0,0 +1,13 @@ +import scala.reflect.runtime.universe._ +import scala.reflect.runtime.{currentMirror => cm} + +sealed class C +class D extends C +class E extends C + +object Test extends App { + val c = cm.staticClass("C") + println(c.knownDirectSubclasses) + c.typeSignature + println(c.knownDirectSubclasses) +} \ No newline at end of file -- cgit v1.2.3 From a07555f015939412c6e4421860abce6e5f90f710 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 30 Jan 2013 14:57:05 -0800 Subject: bytecode diffing support in ByteCodeTest use sameByteCode(methodNode1, methodNode2) to check methods compile to identical bytecode (line number info is not taken into account) --- src/partest/scala/tools/partest/BytecodeTest.scala | 77 ++++++++++++++++++++-- 1 file changed, 73 insertions(+), 4 deletions(-) diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala index 93183c2095..1f43ee7d2e 100644 --- a/src/partest/scala/tools/partest/BytecodeTest.scala +++ b/src/partest/scala/tools/partest/BytecodeTest.scala @@ -8,11 +8,11 @@ import asm.tree.{ClassNode, MethodNode, InsnList} import java.io.InputStream /** - * Providies utilities for inspecting bytecode using ASM library. + * Provides utilities for inspecting bytecode using ASM library. * * HOW TO USE * 1. Create subdirectory in test/files/jvm for your test. Let's name it $TESTDIR. - * 2. Create $TESTDIR/BytecodeSrc_1.scala that contains Scala source file which you + * 2. Create $TESTDIR/BytecodeSrc_1.scala that contains Scala source file that you * want to inspect the bytecode for. The '_1' suffix signals to partest that it * should compile this file first. * 3. Create $TESTDIR/Test.scala: @@ -35,11 +35,23 @@ abstract class BytecodeTest { def main(args: Array[String]): Unit = show +// asserts + def sameBytecode(methA: MethodNode, methB: MethodNode) = { + val isa = instructions.fromMethod(methA) + val isb = instructions.fromMethod(methB) + if (isa == isb) println("bytecode identical") + else (isa, isb).zipped.foreach { case (a, b) => + if (a == b) println("OK : "+ a) + else println("DIFF: "+ a +" <=> "+ b) + } + } + +// loading protected def getMethod(classNode: ClassNode, name: String): MethodNode = classNode.methods.asScala.find(_.name == name) getOrElse sys.error(s"Didn't find method '$name' in class '${classNode.name}'") - protected def loadClassNode(name: String): ClassNode = { + protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = { val classBytes: InputStream = (for { classRep <- classpath.findClass(name) binary <- classRep.binary @@ -47,7 +59,7 @@ abstract class BytecodeTest { val cr = new ClassReader(classBytes) val cn = new ClassNode() - cr.accept(cn, 0) + cr.accept(cn, if (skipDebugInfo) ClassReader.SKIP_DEBUG else 0) cn } @@ -58,4 +70,61 @@ abstract class BytecodeTest { val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath) new JavaClassPath(containers, DefaultJavaContext) } + + // wrap ASM's instructions so we get case class-style `equals` and `toString` + object instructions { + def fromMethod(meth: MethodNode): List[instructions.Instruction] = { + val insns = meth.instructions + val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) } + + asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[instructions.Instruction]] + } + + sealed abstract class Instruction { def opcode: Int } + case class Field (opcode: Int, desc: String, name: String, owner: String) extends Instruction + case class Incr (opcode: Int, incr: Int, `var`: Int) extends Instruction + case class Op (opcode: Int) extends Instruction + case class IntOp (opcode: Int, operand: Int) extends Instruction + case class Jump (opcode: Int, label: Label) extends Instruction + case class Ldc (opcode: Int, cst: Any) extends Instruction + case class LookupSwitch (opcode: Int, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction + case class TableSwitch (opcode: Int, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction + case class Method (opcode: Int, desc: String, name: String, owner: String) extends Instruction + case class NewArray (opcode: Int, desc: String, dims: Int) extends Instruction + case class TypeOp (opcode: Int, desc: String) extends Instruction + case class VarOp (opcode: Int, `var`: Int) extends Instruction + case class Label (opcode: Int, offset: Int) extends Instruction + case class FrameEntry (opcode: Int, local: List[Any], stack: List[Any]) extends Instruction + case class LineNumber (opcode: Int, line: Int, start: Label) extends Instruction + } + + abstract class AsmToScala { + import instructions._ + def labelIndex(l: asm.tree.AbstractInsnNode): Int + + def mapOver(is: List[Any]): List[Any] = is map { + case i: asm.tree.AbstractInsnNode => apply(i) + case x => x + } + + def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList + def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label] + def apply(x: asm.tree.AbstractInsnNode): Instruction = x match { + case i: asm.tree.FieldInsnNode => Field (i.getOpcode: Int, i.desc: String, i.name: String, i.owner: String) + case i: asm.tree.IincInsnNode => Incr (i.getOpcode: Int, i.incr: Int, i.`var`: Int) + case i: asm.tree.InsnNode => Op (i.getOpcode: Int) + case i: asm.tree.IntInsnNode => IntOp (i.getOpcode: Int, i.operand: Int) + case i: asm.tree.JumpInsnNode => Jump (i.getOpcode: Int, this(i.label)) + case i: asm.tree.LdcInsnNode => Ldc (i.getOpcode: Int, i.cst: Any) + case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (i.getOpcode: Int, this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]]) + case i: asm.tree.TableSwitchInsnNode => TableSwitch (i.getOpcode: Int, this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]]) + case i: asm.tree.MethodInsnNode => Method (i.getOpcode: Int, i.desc: String, i.name: String, i.owner: String) + case i: asm.tree.MultiANewArrayInsnNode => NewArray (i.getOpcode: Int, i.desc: String, i.dims: Int) + case i: asm.tree.TypeInsnNode => TypeOp (i.getOpcode: Int, i.desc: String) + case i: asm.tree.VarInsnNode => VarOp (i.getOpcode: Int, i.`var`: Int) + case i: asm.tree.LabelNode => Label (i.getOpcode: Int, labelIndex(x)) + case i: asm.tree.FrameNode => FrameEntry (i.getOpcode: Int, mapOver(lst(i.local)), mapOver(lst(i.stack))) + case i: asm.tree.LineNumberNode => LineNumber (i.getOpcode: Int, i.line: Int, this(i.start): Label) + } + } } -- cgit v1.2.3 From 415becdab8aae63470deac5a6e122fa7a697cbe0 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 31 Jan 2013 10:20:32 -0800 Subject: support testing bytecode similarity in ByteCodeTest one similarity measure comes free of charge: it ignores which variable is stored/loaded, everything else must be identical like this: `similarBytecode(methNodeA, methNodeB, equalsModuloVar)` also implemented prettier diffing --- .../scala/tools/partest/ASMConverters.scala | 71 ++++++++++++++++ src/partest/scala/tools/partest/BytecodeTest.scala | 94 ++++++++-------------- 2 files changed, 104 insertions(+), 61 deletions(-) create mode 100644 src/partest/scala/tools/partest/ASMConverters.scala diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest/scala/tools/partest/ASMConverters.scala new file mode 100644 index 0000000000..d618e086f4 --- /dev/null +++ b/src/partest/scala/tools/partest/ASMConverters.scala @@ -0,0 +1,71 @@ +package scala.tools.partest + +import scala.collection.JavaConverters._ +import scala.tools.asm +import asm.tree.{ClassNode, MethodNode, InsnList} + +/** Makes using ASM from ByteCodeTests more convenient. + * + * Wraps ASM instructions in case classes so that equals and toString work + * for the purpose of bytecode diffing and pretty printing. + */ +trait ASMConverters { + // wrap ASM's instructions so we get case class-style `equals` and `toString` + object instructions { + def fromMethod(meth: MethodNode): List[Instruction] = { + val insns = meth.instructions + val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) } + + asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[Instruction]] + } + + sealed abstract class Instruction { def opcode: String } + case class Field (opcode: String, desc: String, name: String, owner: String) extends Instruction + case class Incr (opcode: String, incr: Int, `var`: Int) extends Instruction + case class Op (opcode: String) extends Instruction + case class IntOp (opcode: String, operand: Int) extends Instruction + case class Jump (opcode: String, label: Label) extends Instruction + case class Ldc (opcode: String, cst: Any) extends Instruction + case class LookupSwitch (opcode: String, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction + case class TableSwitch (opcode: String, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction + case class Method (opcode: String, desc: String, name: String, owner: String) extends Instruction + case class NewArray (opcode: String, desc: String, dims: Int) extends Instruction + case class TypeOp (opcode: String, desc: String) extends Instruction + case class VarOp (opcode: String, `var`: Int) extends Instruction + case class Label (offset: Int) extends Instruction { def opcode: String = "" } + case class FrameEntry (local: List[Any], stack: List[Any]) extends Instruction { def opcode: String = "" } + case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: String = "" } + } + + abstract class AsmToScala { + import instructions._ + + def labelIndex(l: asm.tree.AbstractInsnNode): Int + + def mapOver(is: List[Any]): List[Any] = is map { + case i: asm.tree.AbstractInsnNode => apply(i) + case x => x + } + + def op(i: asm.tree.AbstractInsnNode) = if (asm.util.Printer.OPCODES.isDefinedAt(i.getOpcode)) asm.util.Printer.OPCODES(i.getOpcode) else "?" + def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList + def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label] + def apply(x: asm.tree.AbstractInsnNode): Instruction = x match { + case i: asm.tree.FieldInsnNode => Field (op(i), i.desc: String, i.name: String, i.owner: String) + case i: asm.tree.IincInsnNode => Incr (op(i), i.incr: Int, i.`var`: Int) + case i: asm.tree.InsnNode => Op (op(i)) + case i: asm.tree.IntInsnNode => IntOp (op(i), i.operand: Int) + case i: asm.tree.JumpInsnNode => Jump (op(i), this(i.label)) + case i: asm.tree.LdcInsnNode => Ldc (op(i), i.cst: Any) + case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (op(i), this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]]) + case i: asm.tree.TableSwitchInsnNode => TableSwitch (op(i), this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]]) + case i: asm.tree.MethodInsnNode => Method (op(i), i.desc: String, i.name: String, i.owner: String) + case i: asm.tree.MultiANewArrayInsnNode => NewArray (op(i), i.desc: String, i.dims: Int) + case i: asm.tree.TypeInsnNode => TypeOp (op(i), i.desc: String) + case i: asm.tree.VarInsnNode => VarOp (op(i), i.`var`: Int) + case i: asm.tree.LabelNode => Label (labelIndex(x)) + case i: asm.tree.FrameNode => FrameEntry (mapOver(lst(i.local)), mapOver(lst(i.stack))) + case i: asm.tree.LineNumberNode => LineNumber (i.line: Int, this(i.start): Label) + } + } +} \ No newline at end of file diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala index 1f43ee7d2e..41329a8264 100644 --- a/src/partest/scala/tools/partest/BytecodeTest.scala +++ b/src/partest/scala/tools/partest/BytecodeTest.scala @@ -28,7 +28,7 @@ import java.io.InputStream * See test/files/jvm/bytecode-test-example for an example of bytecode test. * */ -abstract class BytecodeTest { +abstract class BytecodeTest extends ASMConverters { /** produce the output to be compared against a checkfile */ protected def show(): Unit @@ -40,9 +40,38 @@ abstract class BytecodeTest { val isa = instructions.fromMethod(methA) val isb = instructions.fromMethod(methB) if (isa == isb) println("bytecode identical") - else (isa, isb).zipped.foreach { case (a, b) => - if (a == b) println("OK : "+ a) - else println("DIFF: "+ a +" <=> "+ b) + else diffInstructions(isa, isb) + } + + import instructions._ + // bytecode is equal modulo local variable numbering + def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match { + case _ if a == b => true + case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true + case _ => false + } + + def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = { + val isa = fromMethod(methA) + val isb = fromMethod(methB) + if (isa == isb) println("bytecode identical") + else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar") + else diffInstructions(isa, isb) + } + + def diffInstructions(isa: List[Instruction], isb: List[Instruction]) = { + val len = Math.max(isa.length, isb.length) + if (len > 0 ) { + val width = isa.map(_.toString.length).max + val lineWidth = len.toString.length + (1 to len) foreach { line => + val isaPadded = isa.map(_.toString) orElse Stream.continually("") + val isbPadded = isb.map(_.toString) orElse Stream.continually("") + val a = isaPadded(line-1) + val b = isbPadded(line-1) + + println(s"""$line${" " * (lineWidth-line.toString.length)} ${if (a==b) "==" else "<>"} $a${" " * (width-a.length)} | $b""") + } } } @@ -70,61 +99,4 @@ abstract class BytecodeTest { val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath) new JavaClassPath(containers, DefaultJavaContext) } - - // wrap ASM's instructions so we get case class-style `equals` and `toString` - object instructions { - def fromMethod(meth: MethodNode): List[instructions.Instruction] = { - val insns = meth.instructions - val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) } - - asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[instructions.Instruction]] - } - - sealed abstract class Instruction { def opcode: Int } - case class Field (opcode: Int, desc: String, name: String, owner: String) extends Instruction - case class Incr (opcode: Int, incr: Int, `var`: Int) extends Instruction - case class Op (opcode: Int) extends Instruction - case class IntOp (opcode: Int, operand: Int) extends Instruction - case class Jump (opcode: Int, label: Label) extends Instruction - case class Ldc (opcode: Int, cst: Any) extends Instruction - case class LookupSwitch (opcode: Int, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction - case class TableSwitch (opcode: Int, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction - case class Method (opcode: Int, desc: String, name: String, owner: String) extends Instruction - case class NewArray (opcode: Int, desc: String, dims: Int) extends Instruction - case class TypeOp (opcode: Int, desc: String) extends Instruction - case class VarOp (opcode: Int, `var`: Int) extends Instruction - case class Label (opcode: Int, offset: Int) extends Instruction - case class FrameEntry (opcode: Int, local: List[Any], stack: List[Any]) extends Instruction - case class LineNumber (opcode: Int, line: Int, start: Label) extends Instruction - } - - abstract class AsmToScala { - import instructions._ - def labelIndex(l: asm.tree.AbstractInsnNode): Int - - def mapOver(is: List[Any]): List[Any] = is map { - case i: asm.tree.AbstractInsnNode => apply(i) - case x => x - } - - def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList - def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label] - def apply(x: asm.tree.AbstractInsnNode): Instruction = x match { - case i: asm.tree.FieldInsnNode => Field (i.getOpcode: Int, i.desc: String, i.name: String, i.owner: String) - case i: asm.tree.IincInsnNode => Incr (i.getOpcode: Int, i.incr: Int, i.`var`: Int) - case i: asm.tree.InsnNode => Op (i.getOpcode: Int) - case i: asm.tree.IntInsnNode => IntOp (i.getOpcode: Int, i.operand: Int) - case i: asm.tree.JumpInsnNode => Jump (i.getOpcode: Int, this(i.label)) - case i: asm.tree.LdcInsnNode => Ldc (i.getOpcode: Int, i.cst: Any) - case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (i.getOpcode: Int, this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]]) - case i: asm.tree.TableSwitchInsnNode => TableSwitch (i.getOpcode: Int, this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]]) - case i: asm.tree.MethodInsnNode => Method (i.getOpcode: Int, i.desc: String, i.name: String, i.owner: String) - case i: asm.tree.MultiANewArrayInsnNode => NewArray (i.getOpcode: Int, i.desc: String, i.dims: Int) - case i: asm.tree.TypeInsnNode => TypeOp (i.getOpcode: Int, i.desc: String) - case i: asm.tree.VarInsnNode => VarOp (i.getOpcode: Int, i.`var`: Int) - case i: asm.tree.LabelNode => Label (i.getOpcode: Int, labelIndex(x)) - case i: asm.tree.FrameNode => FrameEntry (i.getOpcode: Int, mapOver(lst(i.local)), mapOver(lst(i.stack))) - case i: asm.tree.LineNumberNode => LineNumber (i.getOpcode: Int, i.line: Int, this(i.start): Label) - } - } } -- cgit v1.2.3 From 62b37dd9a87afd17a67752c6c1b174987817b3e9 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 25 Jan 2013 10:57:49 -0800 Subject: refactor: prepare null check redundancy analysis this commit doesn't change any behavior --- .../tools/nsc/typechecker/PatternMatching.scala | 51 ++++++++++++++++------ 1 file changed, 38 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index b0745b4c09..9b2898741e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -409,14 +409,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // example check: List[Int] <:< ::[Int] // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530) - val (typeTestTreeMaker, patBinderOrCasted) = + val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) = if (needsTypeTest(patBinder.info.widen, extractor.paramType)) { // chain a type-testing extractor before the actual extractor call // it tests the type, checks the outer pointer and casts to the expected type // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC] // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder) val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true) - (List(treeMaker), treeMaker.nextBinder) + (List(treeMaker), treeMaker.nextBinder, false) } else { // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it) // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors) @@ -426,10 +426,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL if (settings.developer.value && !(patBinder.info =:= extractor.paramType)) devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}") */ - (Nil, patBinder setInfo extractor.paramType) + (Nil, patBinder setInfo extractor.paramType, false) } - withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*) + withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*) } @@ -622,8 +622,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // to which type should the previous binder be casted? def paramType : Type - // binder has been casted to paramType if necessary - def treeMaker(binder: Symbol, pos: Position): TreeMaker + /** Create the TreeMaker that embodies this extractor call + * + * `binder` has been casted to `paramType` if necessary + * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null + * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder + */ + def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker // `subPatBinders` are the variables bound by this pattern in the following patterns // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is) @@ -731,8 +736,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last) protected def rawSubPatTypes = constructorTp.paramTypes - // binder has type paramType - def treeMaker(binder: Symbol, pos: Position): TreeMaker = { + /** Create the TreeMaker that embodies this extractor call + * + * `binder` has been casted to `paramType` if necessary + * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null + * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder + */ + def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = { val paramAccessors = binder.constrParamAccessors // binders corresponding to mutable fields should be stored (SI-5158, SI-6070) val mutableBinders = @@ -741,7 +751,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL else Nil // checks binder ne null before chaining to the next extractor - ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders) + ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull) } // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component @@ -763,7 +773,12 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def resultType = tpe.finalResultType def isSeq = extractorCall.symbol.name == nme.unapplySeq - def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = { + /** Create the TreeMaker that embodies this extractor call + * + * `binder` has been casted to `paramType` if necessary + * `binderKnownNonNull` is not used in this subclass + */ + def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = { // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern) val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted)) val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type @@ -1081,7 +1096,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])( val subPatBinders: List[Symbol], val subPatRefs: List[Tree], - val mutableBinders: List[Symbol]) extends FunTreeMaker with PreserveSubPatBinders { + val mutableBinders: List[Symbol], + binderKnownNonNull: Boolean) extends FunTreeMaker with PreserveSubPatBinders { import CODE._ val nextBinder = prevBinder // just passing through @@ -1092,8 +1108,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def chainBefore(next: Tree)(casegen: Casegen): Tree = { val nullCheck = REF(prevBinder) OBJ_NE NULL - val cond = extraCond map (nullCheck AND _) getOrElse nullCheck - casegen.ifThenElseZero(cond, bindSubPats(substitution(next))) + val cond = + if (binderKnownNonNull) extraCond + else (extraCond map (nullCheck AND _) + orElse Some(nullCheck)) + + cond match { + case Some(cond) => + casegen.ifThenElseZero(cond, bindSubPats(substitution(next))) + case _ => + bindSubPats(substitution(next)) + } } override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution) -- cgit v1.2.3 From 71ea3e8278aad030cbe8c9093fe49790a4e419cb Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 25 Jan 2013 14:16:27 -0800 Subject: no null check for type-tested unapply arg pattern matching on case classes where pattern is not known to be a subclass of the unapply's argument type used to result in code like: ``` if (x1.isInstanceOf[Foo]) { val x2 = x1.asInstanceOf[Foo] if (x2 != null) { // redundant ... } } ``` this wastes byte code on the redundant null check with this patch, when previous type tests imply the variable cannot be null, there's no null check --- .../tools/nsc/typechecker/PatternMatching.scala | 35 +++- test/files/jvm/patmat_opt_no_nullcheck.check | 1 + test/files/jvm/patmat_opt_no_nullcheck.flags | 1 + .../jvm/patmat_opt_no_nullcheck/Analyzed_1.scala | 25 +++ test/files/jvm/patmat_opt_no_nullcheck/test.scala | 8 + test/files/run/inline-ex-handlers.check | 206 ++++++++++----------- 6 files changed, 167 insertions(+), 109 deletions(-) create mode 100644 test/files/jvm/patmat_opt_no_nullcheck.check create mode 100644 test/files/jvm/patmat_opt_no_nullcheck.flags create mode 100644 test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala create mode 100644 test/files/jvm/patmat_opt_no_nullcheck/test.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 9b2898741e..f32ca4bd8e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -409,6 +409,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // example check: List[Int] <:< ::[Int] // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530) + // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType` val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) = if (needsTypeTest(patBinder.info.widen, extractor.paramType)) { // chain a type-testing extractor before the actual extractor call @@ -416,7 +417,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC] // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder) val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true) - (List(treeMaker), treeMaker.nextBinder, false) + + // check whether typetest implies patBinder is not null, + // even though the eventual null check will be on patBinderOrCasted + // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder) + (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder)) } else { // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it) // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors) @@ -773,11 +778,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def resultType = tpe.finalResultType def isSeq = extractorCall.symbol.name == nme.unapplySeq - /** Create the TreeMaker that embodies this extractor call - * - * `binder` has been casted to `paramType` if necessary - * `binderKnownNonNull` is not used in this subclass - */ + /** Create the TreeMaker that embodies this extractor call + * + * `binder` has been casted to `paramType` if necessary + * `binderKnownNonNull` is not used in this subclass + * + * TODO: implement review feedback by @retronym: + * Passing the pair of values around suggests: + * case class Binder(sym: Symbol, knownNotNull: Boolean). + * Perhaps it hasn't reached critical mass, but it would already clean things up a touch. + */ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = { // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern) val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted)) @@ -1177,6 +1187,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def eqTest(pat: Tree, testedBinder: Symbol): Result = false def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false } + + def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy { + type Result = Boolean + + def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder + def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false + def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder + def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null + def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null + def and(a: Result, b: Result): Result = a || b + } } /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations) @@ -1260,6 +1281,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission) def isPureTypeTest = renderCondition(pureTypeTestChecker) + def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder)) + override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp) } diff --git a/test/files/jvm/patmat_opt_no_nullcheck.check b/test/files/jvm/patmat_opt_no_nullcheck.check new file mode 100644 index 0000000000..43f53aba12 --- /dev/null +++ b/test/files/jvm/patmat_opt_no_nullcheck.check @@ -0,0 +1 @@ +bytecode identical diff --git a/test/files/jvm/patmat_opt_no_nullcheck.flags b/test/files/jvm/patmat_opt_no_nullcheck.flags new file mode 100644 index 0000000000..1182725e86 --- /dev/null +++ b/test/files/jvm/patmat_opt_no_nullcheck.flags @@ -0,0 +1 @@ +-optimize \ No newline at end of file diff --git a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala new file mode 100644 index 0000000000..1594eb523c --- /dev/null +++ b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala @@ -0,0 +1,25 @@ +// this class's bytecode, compiled under -optimize is analyzed by the test +// method a's bytecode should be identical to method b's bytecode +case class Foo(x: Any) + +class SameBytecode { + def a = + (Foo(1): Any) match { + case Foo(_: String) => + } + + // there's no null check + def b: Unit = { + val x1: Any = Foo(1) + if (x1.isInstanceOf[Foo]) { + val x3 = x1.asInstanceOf[Foo] + if (x3.x.isInstanceOf[String]) { + val x4 = x3.x.asInstanceOf[String] + val x = () + return + } + } + + throw new MatchError(x1) + } +} \ No newline at end of file diff --git a/test/files/jvm/patmat_opt_no_nullcheck/test.scala b/test/files/jvm/patmat_opt_no_nullcheck/test.scala new file mode 100644 index 0000000000..2927e763d5 --- /dev/null +++ b/test/files/jvm/patmat_opt_no_nullcheck/test.scala @@ -0,0 +1,8 @@ +import scala.tools.partest.BytecodeTest + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("SameBytecode") + sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b")) + } +} diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check index 282542a732..905dfc3ee7 100644 --- a/test/files/run/inline-ex-handlers.check +++ b/test/files/run/inline-ex-handlers.check @@ -26,54 +26,55 @@ --- > locals: value args, variable result, value ex6, value x4, value x5, value x 397c393 -< blocks: [1,2,3,4,5,8,11,13,14,16] +< blocks: [1,2,3,4,5,8,10,11,13] --- -> blocks: [1,2,3,5,8,11,13,14,16,17] +> blocks: [1,2,3,5,8,10,11,13,14] 421c417,426 < 103 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 17 +> ? JUMP 14 > -> 17: +> 14: > 101 LOAD_LOCAL(value ex6) > 101 STORE_LOCAL(value x4) > 101 SCOPE_ENTER value x4 > 106 LOAD_LOCAL(value x4) > 106 IS_INSTANCE REF(class MyException) -> 106 CZJUMP (BOOL)NE ? 5 : 11 +> 106 CZJUMP (BOOL)NE ? 5 : 8 434,436d438 < 101 JUMP 4 < < 4: -450,453d451 +446,449d447 < 106 LOAD_LOCAL(value x5) < 106 CALL_METHOD MyException.message (dynamic) < 106 STORE_LOCAL(value message) < 106 SCOPE_ENTER value message -455c453,454 +451c449,450 < 106 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 106 CALL_METHOD MyException.message (dynamic) -527c526 +523c522 < blocks: [1,2,3,4,6,7,8,9,10] --- > blocks: [1,2,3,4,6,7,8,9,10,11,12,13] -556c555,560 +552c551 < 306 THROW(MyException) --- > ? JUMP 11 -> +553a553,557 > 11: > ? LOAD_LOCAL(variable monitor4) > 305 MONITOR_EXIT > ? JUMP 12 -562c566 +> +558c562 < ? THROW(Throwable) --- > ? JUMP 12 -568c572,579 +564c568,575 < ? THROW(Throwable) --- > ? STORE_LOCAL(value t) @@ -84,7 +85,7 @@ > 304 MONITOR_EXIT > ? STORE_LOCAL(value t) > ? JUMP 13 -583a595,606 +579a591,602 > 13: > 310 LOAD_MODULE object Predef > 310 CALL_PRIMITIVE(StartConcat) @@ -97,35 +98,35 @@ > 310 CALL_METHOD scala.Predef.println (dynamic) > 310 JUMP 2 > -592c615 +588c611 < catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6 --- > catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6 -595c618 +591c614 < catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3 --- > catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3 -627c650 +623c646 < blocks: [1,2,3,4,5,6,7,9,10] --- > blocks: [1,2,3,4,5,6,7,9,10,11,12] -651c674,675 +647c670,671 < 78 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) > ? JUMP 11 -652a677,681 +648a673,677 > 11: > 81 LOAD_LOCAL(value e) > ? STORE_LOCAL(variable exc1) > ? JUMP 12 > -680c709,710 +676c705,706 < 81 THROW(Exception) --- > ? STORE_LOCAL(variable exc1) > ? JUMP 12 -696a727,739 +692a723,735 > 12: > 83 LOAD_MODULE object Predef > 83 CONSTANT("finally") @@ -139,88 +140,88 @@ > 84 LOAD_LOCAL(variable exc1) > 84 THROW(Throwable) > -702c745 +698c741 < catch () in ArrayBuffer(4, 6, 7, 9) starting at: 3 --- > catch () in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3 -726c769 +722c765 < locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x -728c771 -< blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31] +724c767 +< blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25] --- -> blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31,32,33,34] -752c795,802 +> blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28] +748c791,798 < 172 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 32 +> ? JUMP 26 > -> 32: +> 26: > 170 LOAD_LOCAL(value ex6) > 170 STORE_LOCAL(value x4) > 170 SCOPE_ENTER value x4 -> 170 JUMP 18 -799,802d848 +> 170 JUMP 15 +791,794d840 < 175 LOAD_LOCAL(value x5) < 175 CALL_METHOD MyException.message (dynamic) < 175 STORE_LOCAL(value message) < 175 SCOPE_ENTER value message -804c850,851 +796c842,843 < 176 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 176 CALL_METHOD MyException.message (dynamic) -808c855,856 +800c847,848 < 177 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 177 CALL_METHOD MyException.message (dynamic) -810c858,859 +802c850,851 < 177 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 33 -814c863,864 +> ? JUMP 27 +806c855,856 < 170 THROW(Throwable) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 33 -823a874,879 -> 33: +> ? JUMP 27 +815a866,871 +> 27: > 169 LOAD_LOCAL(value ex6) > 169 STORE_LOCAL(value x4) > 169 SCOPE_ENTER value x4 > 169 JUMP 5 > -838,841d893 +826,829d881 < 180 LOAD_LOCAL(value x5) < 180 CALL_METHOD MyException.message (dynamic) < 180 STORE_LOCAL(value message) < 180 SCOPE_ENTER value message -843c895,896 +831c883,884 < 181 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 181 CALL_METHOD MyException.message (dynamic) -847c900,901 +835c888,889 < 182 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 182 CALL_METHOD MyException.message (dynamic) -849c903,904 +837c891,892 < 182 THROW(MyException) --- > ? STORE_LOCAL(variable exc2) -> ? JUMP 34 -853c908,909 +> ? JUMP 28 +841c896,897 < 169 THROW(Throwable) --- > ? STORE_LOCAL(variable exc2) -> ? JUMP 34 -869a926,938 -> 34: +> ? JUMP 28 +857a914,926 +> 28: > 184 LOAD_MODULE object Predef > 184 CONSTANT("finally") > 184 CALL_METHOD scala.Predef.println (dynamic) @@ -233,159 +234,158 @@ > 185 LOAD_LOCAL(variable exc2) > 185 THROW(Throwable) > -875c944 -< catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30) starting at: 4 +863c932 +< catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4 --- -> catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30, 32) starting at: 4 -878c947 -< catch () in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30) starting at: 3 +> catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4 +866c935 +< catch () in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3 --- -> catch () in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30, 32, 33) starting at: 3 -902c971 +> catch () in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3 +890c959 < locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value e, value ex6, value x4, value x5, value x -904c973 -< blocks: [1,2,3,6,7,8,11,14,16,17,19] +892c961 +< blocks: [1,2,3,6,7,8,11,13,14,16] --- -> blocks: [1,2,3,6,7,8,11,14,16,17,19,20] -928c997,1004 +> blocks: [1,2,3,6,7,8,11,13,14,16,17] +916c985,992 < 124 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 20 +> ? JUMP 17 > -> 20: +> 17: > 122 LOAD_LOCAL(value ex6) > 122 STORE_LOCAL(value x4) > 122 SCOPE_ENTER value x4 > 122 JUMP 7 -957,960d1032 +941,944d1016 < 127 LOAD_LOCAL(value x5) < 127 CALL_METHOD MyException.message (dynamic) < 127 STORE_LOCAL(value message) < 127 SCOPE_ENTER value message -962c1034,1035 +946c1018,1019 < 127 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 127 CALL_METHOD MyException.message (dynamic) -991c1064 -< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19) starting at: 3 +975c1048 +< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3 --- -> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19, 20) starting at: 3 -1015c1088 +> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3 +999c1072 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e --- > locals: value args, variable result, value ex6, value x4, value x5, value x, value e -1017c1090 -< blocks: [1,2,3,4,5,8,11,15,16,17,19] +1001c1074 +< blocks: [1,2,3,4,5,8,12,13,14,16] --- -> blocks: [1,2,3,5,8,11,15,16,17,19,20] -1041c1114,1123 +> blocks: [1,2,3,5,8,12,13,14,16,17] +1025c1098,1107 < 148 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 20 +> ? JUMP 17 > -> 20: +> 17: > 145 LOAD_LOCAL(value ex6) > 145 STORE_LOCAL(value x4) > 145 SCOPE_ENTER value x4 > 154 LOAD_LOCAL(value x4) > 154 IS_INSTANCE REF(class MyException) -> 154 CZJUMP (BOOL)NE ? 5 : 11 -1062,1064d1143 +> 154 CZJUMP (BOOL)NE ? 5 : 8 +1046,1048d1127 < 145 JUMP 4 < < 4: -1078,1081d1156 +1058,1061d1136 < 154 LOAD_LOCAL(value x5) < 154 CALL_METHOD MyException.message (dynamic) < 154 STORE_LOCAL(value message) < 154 SCOPE_ENTER value message -1083c1158,1159 +1063c1138,1139 < 154 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 154 CALL_METHOD MyException.message (dynamic) -1300c1376 +1280c1356 < blocks: [1,2,3,4,5,7] --- > blocks: [1,2,3,4,5,7,8] -1324c1400,1401 +1304c1380,1387 < 38 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) > ? JUMP 8 -1325a1403,1408 +> > 8: > 42 LOAD_MODULE object Predef > 42 CONSTANT("IllegalArgumentException") > 42 CALL_METHOD scala.Predef.println (dynamic) > 42 JUMP 2 -> -1371c1454 +1351c1434 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, value x4, value x5, value x -1373c1456 -< blocks: [1,2,3,4,5,8,11,13,14,16,17,19] +1353c1436 +< blocks: [1,2,3,4,5,8,10,11,13,14,16] --- -> blocks: [1,2,3,5,8,11,13,14,16,17,19,20] -1397c1480,1481 +> blocks: [1,2,3,5,8,10,11,13,14,16,17] +1377c1460,1461 < 203 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 20 -1417c1501,1510 +> ? JUMP 17 +1397c1481,1490 < 209 THROW(MyException) --- > ? STORE_LOCAL(value ex6) -> ? JUMP 20 +> ? JUMP 17 > -> 20: +> 17: > 200 LOAD_LOCAL(value ex6) > 200 STORE_LOCAL(value x4) > 200 SCOPE_ENTER value x4 > 212 LOAD_LOCAL(value x4) > 212 IS_INSTANCE REF(class MyException) -> 212 CZJUMP (BOOL)NE ? 5 : 11 -1430,1432d1522 +> 212 CZJUMP (BOOL)NE ? 5 : 8 +1410,1412d1502 < 200 JUMP 4 < < 4: -1446,1449d1535 +1422,1425d1511 < 212 LOAD_LOCAL(value x5) < 212 CALL_METHOD MyException.message (dynamic) < 212 STORE_LOCAL(value message) < 212 SCOPE_ENTER value message -1451c1537,1538 +1427c1513,1514 < 213 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 213 CALL_METHOD MyException.message (dynamic) -1495c1582 +1471c1558 < blocks: [1,2,3,4,5,7] --- > blocks: [1,2,3,4,5,7,8] -1519c1606,1607 +1495c1582,1583 < 58 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) > ? JUMP 8 -1520a1609,1614 +1496a1585,1590 > 8: > 62 LOAD_MODULE object Predef > 62 CONSTANT("RuntimeException") > 62 CALL_METHOD scala.Predef.println (dynamic) > 62 JUMP 2 > -1568c1662 +1544c1638 < blocks: [1,2,3,4] --- > blocks: [1,2,3,4,5] -1588c1682,1687 +1564c1658,1663 < 229 THROW(MyException) --- > ? JUMP 5 @@ -394,19 +394,19 @@ > ? LOAD_LOCAL(variable monitor1) > 228 MONITOR_EXIT > 228 THROW(Throwable) -1594c1693 +1570c1669 < ? THROW(Throwable) --- > 228 THROW(Throwable) -1622c1721 +1598c1697 < locals: value args, variable result, variable monitor2, variable monitorResult1 --- > locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1 -1624c1723 +1600c1699 < blocks: [1,2,3,4] --- > blocks: [1,2,3,4,5] -1647c1746,1754 +1623c1722,1730 < 245 THROW(MyException) --- > ? STORE_LOCAL(value exception$1) @@ -418,7 +418,7 @@ > ? LOAD_LOCAL(variable monitor2) > 244 MONITOR_EXIT > 244 THROW(Throwable) -1653c1760 +1629c1736 < ? THROW(Throwable) --- > 244 THROW(Throwable) -- cgit v1.2.3 From 494ba94518c9b40b7bf600ec7b70561842375597 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 29 Jan 2013 00:13:14 -0800 Subject: don't store subpats bound to underscore also, tweak fix in place for SI-5158 to appease SI-6941 don't store mutable fields from scala.* as we can assume these classes are well-behaved and do not mutate their case class fields --- .../tools/nsc/typechecker/PatternMatching.scala | 64 ++++++++++++++++++---- test/files/jvm/patmat_opt_ignore_underscore.check | 1 + test/files/jvm/patmat_opt_ignore_underscore.flags | 1 + .../patmat_opt_ignore_underscore/Analyzed_1.scala | 30 ++++++++++ .../jvm/patmat_opt_ignore_underscore/test.scala | 15 +++++ test/files/run/t6288.check | 10 +--- 6 files changed, 102 insertions(+), 19 deletions(-) create mode 100644 test/files/jvm/patmat_opt_ignore_underscore.check create mode 100644 test/files/jvm/patmat_opt_ignore_underscore.flags create mode 100644 test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala create mode 100644 test/files/jvm/patmat_opt_ignore_underscore/test.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index f32ca4bd8e..8ae2e0a3b8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -647,6 +647,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL case bp => bp } + // never store these in local variables (for PreserveSubPatBinders) + lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{ + case (b, PatternBoundToUnderscore()) => b + }.toSet + def subPatTypes: List[Type] = if(isSeq) { val TypeRef(pre, SeqClass, args) = seqTp @@ -750,13 +755,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = { val paramAccessors = binder.constrParamAccessors // binders corresponding to mutable fields should be stored (SI-5158, SI-6070) + // make an exception for classes under the scala package as they should be well-behaved, + // to optimize matching on List val mutableBinders = - if (paramAccessors exists (_.isMutable)) + if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) && + (paramAccessors exists (_.isMutable))) subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder } else Nil // checks binder ne null before chaining to the next extractor - ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull) + ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders) } // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component @@ -792,7 +800,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern) val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted)) val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type - ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted) + ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders) } override protected def seqTree(binder: Symbol): Tree = @@ -849,6 +857,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL } } + object PatternBoundToUnderscore { + def unapply(pat: Tree): Boolean = pat match { + case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol! + case Ident(nme.WILDCARD) => true + case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_)) + case Typed(PatternBoundToUnderscore(), _) => true + case _ => false + } + } + object Bound { def unapply(t: Tree): Option[(Symbol, Tree)] = t match { case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions @@ -1016,10 +1034,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL trait PreserveSubPatBinders extends TreeMaker { val subPatBinders: List[Symbol] val subPatRefs: List[Tree] + val ignoredSubPatBinders: Set[Symbol] // unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness // mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker - def storedBinders: Set[Symbol] = if (debugInfoEmitVars) subPatBinders.toSet else Set.empty + // sub patterns bound to wildcard (_) are never stored as they can't be referenced + // dirty debuggers will have to get dirty to see the wildcards + lazy val storedBinders: Set[Symbol] = + (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders + + // e.g., mutable fields of a case class in ProductExtractorTreeMaker + def extraStoredBinders: Set[Symbol] def emitVars = storedBinders.nonEmpty @@ -1040,10 +1065,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution import CODE._ - def bindSubPats(in: Tree): Tree = if (!emitVars) in + def bindSubPats(in: Tree): Tree = + if (!emitVars) in else { - val (subPatBindersStored, subPatRefsStored) = stored.unzip - Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in) + // binders in `subPatBindersStored` that are referenced by tree `in` + val usedBinders = new collection.mutable.HashSet[Symbol]() + // all potentially stored subpat binders + val potentiallyStoredBinders = stored.unzip._1.toSet + // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders + in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol) + + if (usedBinders.isEmpty) in + else { + // only store binders actually used + val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip + Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in) + } } } @@ -1063,7 +1100,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL val subPatRefs: List[Tree], extractorReturnsBoolean: Boolean, val checkedLength: Option[Int], - val prevBinder: Symbol) extends FunTreeMaker with PreserveSubPatBinders { + val prevBinder: Symbol, + val ignoredSubPatBinders: Set[Symbol] + ) extends FunTreeMaker with PreserveSubPatBinders { + + def extraStoredBinders: Set[Symbol] = Set() def chainBefore(next: Tree)(casegen: Casegen): Tree = { val condAndNext = extraCond match { @@ -1107,14 +1148,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL val subPatBinders: List[Symbol], val subPatRefs: List[Tree], val mutableBinders: List[Symbol], - binderKnownNonNull: Boolean) extends FunTreeMaker with PreserveSubPatBinders { + binderKnownNonNull: Boolean, + val ignoredSubPatBinders: Set[Symbol] + ) extends FunTreeMaker with PreserveSubPatBinders { import CODE._ val nextBinder = prevBinder // just passing through // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070) - // (the implementation could be optimized by duplicating code from `super.storedBinders`, but this seems more elegant) - override def storedBinders: Set[Symbol] = super.storedBinders ++ mutableBinders.toSet + def extraStoredBinders: Set[Symbol] = mutableBinders.toSet def chainBefore(next: Tree)(casegen: Casegen): Tree = { val nullCheck = REF(prevBinder) OBJ_NE NULL diff --git a/test/files/jvm/patmat_opt_ignore_underscore.check b/test/files/jvm/patmat_opt_ignore_underscore.check new file mode 100644 index 0000000000..43f53aba12 --- /dev/null +++ b/test/files/jvm/patmat_opt_ignore_underscore.check @@ -0,0 +1 @@ +bytecode identical diff --git a/test/files/jvm/patmat_opt_ignore_underscore.flags b/test/files/jvm/patmat_opt_ignore_underscore.flags new file mode 100644 index 0000000000..1182725e86 --- /dev/null +++ b/test/files/jvm/patmat_opt_ignore_underscore.flags @@ -0,0 +1 @@ +-optimize \ No newline at end of file diff --git a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala new file mode 100644 index 0000000000..ac59e41ba7 --- /dev/null +++ b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala @@ -0,0 +1,30 @@ +// this class's bytecode, compiled under -optimize is analyzed by the test +// method a's bytecode should be identical to method b's bytecode +// this is not the best test for shielding against regressing on this particular issue, +// but it sets the stage for checking the bytecode emitted by the pattern matcher and +// comparing it to manually tuned code using if/then/else etc. +class SameBytecode { + case class Foo(x: Any, y: String) + + def a = + Foo(1, "a") match { + case Foo(_: String, y) => y + } + + // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat) + // the test checks that bytecode for a and b is identical (modulo line numbers) + // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example) + // note that the actual tree is quite bad: we do an unnecessary null check, isInstanceOf and local val (x3) + // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat) + def b: String = { + val x1 = Foo(1, "a") + if (x1.ne(null)) { + if (x1.x.isInstanceOf[String]) { + val x3 = x1.x.asInstanceOf[String] + return x1.y + } + } + + throw new MatchError(x1) + } +} \ No newline at end of file diff --git a/test/files/jvm/patmat_opt_ignore_underscore/test.scala b/test/files/jvm/patmat_opt_ignore_underscore/test.scala new file mode 100644 index 0000000000..6179101a7e --- /dev/null +++ b/test/files/jvm/patmat_opt_ignore_underscore/test.scala @@ -0,0 +1,15 @@ +import scala.tools.partest.BytecodeTest + +import scala.tools.nsc.util.JavaClassPath +import java.io.InputStream +import scala.tools.asm +import asm.ClassReader +import asm.tree.{ClassNode, InsnList} +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("SameBytecode") + sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b")) + } +} diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check index af6bd5d269..4895c2c007 100644 --- a/test/files/run/t6288.check +++ b/test/files/run/t6288.check @@ -11,10 +11,7 @@ [64]case5()[84]{ [84] val o7: [84]Option[Int] = [84][84]Case3.unapply([84]x1); [84]if ([84]o7.isEmpty.unary_!) - [84]{ - [90]val nr: [90]Int = [90]o7.get; - [97][97]matchEnd4([97]()) - } + [97][97]matchEnd4([97]()) else [84][84]case6() }; @@ -38,10 +35,7 @@ [195] val o7: [195]Option[List[Int]] = [195][195]Case4.unapplySeq([195]x1); [195]if ([195]o7.isEmpty.unary_!) [195]if ([195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0))) - [195]{ - [201]val nr: [201]Int = [201][201]o7.get.apply([201]0); - [208][208]matchEnd4([208]()) - } + [208][208]matchEnd4([208]()) else [195][195]case6() else -- cgit v1.2.3 From b47bb0fe1a0eb2f60f280ef0fa62a6f0be65580b Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 29 Jan 2013 00:21:43 -0800 Subject: no type test if static type <:< primitive value class --- .../tools/nsc/typechecker/PatternMatching.scala | 44 +++++++++++++--------- test/files/jvm/patmat_opt_primitive_typetest.check | 1 + test/files/jvm/patmat_opt_primitive_typetest.flags | 1 + .../patmat_opt_primitive_typetest/Analyzed_1.scala | 25 ++++++++++++ .../jvm/patmat_opt_primitive_typetest/test.scala | 8 ++++ 5 files changed, 61 insertions(+), 18 deletions(-) create mode 100644 test/files/jvm/patmat_opt_primitive_typetest.check create mode 100644 test/files/jvm/patmat_opt_primitive_typetest.flags create mode 100644 test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala create mode 100644 test/files/jvm/patmat_opt_primitive_typetest/test.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index 8ae2e0a3b8..c6f80c9b20 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -411,7 +411,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530) // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType` val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) = - if (needsTypeTest(patBinder.info.widen, extractor.paramType)) { + if (patBinder.info.widen <:< extractor.paramType) { + // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it) + // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors) + // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call + // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now + /* TODO: uncomment when `settings.developer` and `devWarning` become available + if (settings.developer.value && !(patBinder.info =:= extractor.paramType)) + devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}") + */ + (Nil, patBinder setInfo extractor.paramType, false) + } else { // chain a type-testing extractor before the actual extractor call // it tests the type, checks the outer pointer and casts to the expected type // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC] @@ -422,16 +432,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // even though the eventual null check will be on patBinderOrCasted // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder) (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder)) - } else { - // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it) - // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors) - // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call - // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now - /* TODO: uncomment when `settings.developer` and `devWarning` become available - if (settings.developer.value && !(patBinder.info =:= extractor.paramType)) - devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}") - */ - (Nil, patBinder setInfo extractor.paramType, false) } withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*) @@ -1176,9 +1176,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution) } - // typetag-based tests are inserted by the type checker - def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt) - object TypeTestTreeMaker { // factored out so that we can consistently generate other representations of the tree that implements the test // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest) @@ -1192,12 +1189,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def equalsTest(pat: Tree, testedBinder: Symbol): Result def eqTest(pat: Tree, testedBinder: Symbol): Result def and(a: Result, b: Result): Result + def tru: Result } object treeCondStrategy extends TypeTestCondStrategy { import CODE._ type Result = Tree def and(a: Result, b: Result): Result = a AND b + def tru = TRUE_typed def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp) def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder) @@ -1228,6 +1227,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def equalsTest(pat: Tree, testedBinder: Symbol): Result = false def eqTest(pat: Tree, testedBinder: Symbol): Result = false def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false + def tru = true } def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy { @@ -1239,6 +1239,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null def and(a: Result, b: Result): Result = a || b + def tru = false } } @@ -1308,10 +1309,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // I think it's okay: // - the isInstanceOf test includes a test for the element type // - Scala's arrays are invariant (so we don't drop type tests unsoundly) - case _ if (expectedTp <:< AnyRefClass.tpe) && !needsTypeTest(testedBinder.info.widen, expectedTp) => - // do non-null check first to ensure we won't select outer on null - if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp)) - else nonNullTest(testedBinder) + case _ if testedBinder.info.widen <:< expectedTp => + // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer + // since the types conform, no further checking is required + if (expectedTp.typeSymbol.isPrimitiveValueClass) tru + // have to test outer and non-null only when it's a reference type + else if (expectedTp <:< AnyRefClass.tpe) { + // do non-null check first to ensure we won't select outer on null + if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp)) + else nonNullTest(testedBinder) + } else default case _ => default } @@ -1823,6 +1830,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder)) def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not == + def tru = TrueCond } ttm.renderCondition(condStrategy) case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree)) diff --git a/test/files/jvm/patmat_opt_primitive_typetest.check b/test/files/jvm/patmat_opt_primitive_typetest.check new file mode 100644 index 0000000000..43f53aba12 --- /dev/null +++ b/test/files/jvm/patmat_opt_primitive_typetest.check @@ -0,0 +1 @@ +bytecode identical diff --git a/test/files/jvm/patmat_opt_primitive_typetest.flags b/test/files/jvm/patmat_opt_primitive_typetest.flags new file mode 100644 index 0000000000..49d036a887 --- /dev/null +++ b/test/files/jvm/patmat_opt_primitive_typetest.flags @@ -0,0 +1 @@ +-optimize diff --git a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala new file mode 100644 index 0000000000..c5b8811449 --- /dev/null +++ b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala @@ -0,0 +1,25 @@ +// this class's bytecode, compiled under -optimize is analyzed by the test +// method a's bytecode should be identical to method b's bytecode +class SameBytecode { + case class Foo(x: Int, y: String) + + def a = + Foo(1, "a") match { + case Foo(_: Int, y) => y + } + + // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat) + // the test checks that bytecode for a and b is identical (modulo line numbers) + // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example) + // note that the actual tree is quite bad: we do an unnecessary null check, and local val (x3) + // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat) + def b: String = { + val x1 = Foo(1, "a") + if (x1.ne(null)) { + val x3 = x1.x + return x1.y + } + + throw new MatchError(x1) + } +} \ No newline at end of file diff --git a/test/files/jvm/patmat_opt_primitive_typetest/test.scala b/test/files/jvm/patmat_opt_primitive_typetest/test.scala new file mode 100644 index 0000000000..2927e763d5 --- /dev/null +++ b/test/files/jvm/patmat_opt_primitive_typetest/test.scala @@ -0,0 +1,8 @@ +import scala.tools.partest.BytecodeTest + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("SameBytecode") + sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b")) + } +} -- cgit v1.2.3 From b92396b57912f040d8f536b8a60b844ff586ff0d Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 29 Jan 2013 19:16:37 -0800 Subject: SI-6686 drop valdef unused in flatMapCond's block --- .../tools/nsc/typechecker/PatternMatching.scala | 16 ++- .../patmat_opt_ignore_underscore/Analyzed_1.scala | 1 - .../jvm/patmat_opt_no_nullcheck/Analyzed_1.scala | 1 - .../patmat_opt_primitive_typetest/Analyzed_1.scala | 1 - test/files/run/inline-ex-handlers.check | 136 ++++++++++----------- test/files/run/t6288b-jump-position.check | 6 +- 6 files changed, 80 insertions(+), 81 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index c6f80c9b20..4b53802d95 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -3792,11 +3792,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL // nextBinder: T // next == MatchMonad[U] // returns MatchMonad[U] - def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = - ifThenElseZero(cond, BLOCK( - VAL(nextBinder) === res, - next - )) + def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = { + val rest = + // only emit a local val for `nextBinder` if it's actually referenced in `next` + if (next.exists(_.symbol eq nextBinder)) + BLOCK( + VAL(nextBinder) === res, + next + ) + else next + ifThenElseZero(cond, rest) + } // guardTree: Boolean // next: MatchMonad[T] diff --git a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala index ac59e41ba7..fa3639380d 100644 --- a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala +++ b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala @@ -20,7 +20,6 @@ class SameBytecode { val x1 = Foo(1, "a") if (x1.ne(null)) { if (x1.x.isInstanceOf[String]) { - val x3 = x1.x.asInstanceOf[String] return x1.y } } diff --git a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala index 1594eb523c..3a594c401e 100644 --- a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala +++ b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala @@ -14,7 +14,6 @@ class SameBytecode { if (x1.isInstanceOf[Foo]) { val x3 = x1.asInstanceOf[Foo] if (x3.x.isInstanceOf[String]) { - val x4 = x3.x.asInstanceOf[String] val x = () return } diff --git a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala index c5b8811449..e5db6c4dd0 100644 --- a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala +++ b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala @@ -16,7 +16,6 @@ class SameBytecode { def b: String = { val x1 = Foo(1, "a") if (x1.ne(null)) { - val x3 = x1.x return x1.y } diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check index 905dfc3ee7..f2f0b60687 100644 --- a/test/files/run/inline-ex-handlers.check +++ b/test/files/run/inline-ex-handlers.check @@ -21,15 +21,15 @@ < 92 JUMP 7 < < 7: -395c391 +391c387 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, value x4, value x5, value x -397c393 +393c389 < blocks: [1,2,3,4,5,8,10,11,13] --- > blocks: [1,2,3,5,8,10,11,13,14] -421c417,426 +417c413,422 < 103 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -42,39 +42,39 @@ > 106 LOAD_LOCAL(value x4) > 106 IS_INSTANCE REF(class MyException) > 106 CZJUMP (BOOL)NE ? 5 : 8 -434,436d438 +430,432d434 < 101 JUMP 4 < < 4: -446,449d447 +442,445d443 < 106 LOAD_LOCAL(value x5) < 106 CALL_METHOD MyException.message (dynamic) < 106 STORE_LOCAL(value message) < 106 SCOPE_ENTER value message -451c449,450 +447c445,446 < 106 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 106 CALL_METHOD MyException.message (dynamic) -523c522 +519c518 < blocks: [1,2,3,4,6,7,8,9,10] --- > blocks: [1,2,3,4,6,7,8,9,10,11,12,13] -552c551 +548c547 < 306 THROW(MyException) --- > ? JUMP 11 -553a553,557 +549a549,553 > 11: > ? LOAD_LOCAL(variable monitor4) > 305 MONITOR_EXIT > ? JUMP 12 > -558c562 +554c558 < ? THROW(Throwable) --- > ? JUMP 12 -564c568,575 +560c564,571 < ? THROW(Throwable) --- > ? STORE_LOCAL(value t) @@ -85,7 +85,7 @@ > 304 MONITOR_EXIT > ? STORE_LOCAL(value t) > ? JUMP 13 -579a591,602 +575a587,598 > 13: > 310 LOAD_MODULE object Predef > 310 CALL_PRIMITIVE(StartConcat) @@ -98,35 +98,35 @@ > 310 CALL_METHOD scala.Predef.println (dynamic) > 310 JUMP 2 > -588c611 +584c607 < catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6 --- > catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6 -591c614 +587c610 < catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3 --- > catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3 -623c646 +619c642 < blocks: [1,2,3,4,5,6,7,9,10] --- > blocks: [1,2,3,4,5,6,7,9,10,11,12] -647c670,671 +643c666,667 < 78 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) > ? JUMP 11 -648a673,677 +644a669,673 > 11: > 81 LOAD_LOCAL(value e) > ? STORE_LOCAL(variable exc1) > ? JUMP 12 > -676c705,706 +672c701,702 < 81 THROW(Exception) --- > ? STORE_LOCAL(variable exc1) > ? JUMP 12 -692a723,735 +688a719,731 > 12: > 83 LOAD_MODULE object Predef > 83 CONSTANT("finally") @@ -140,19 +140,19 @@ > 84 LOAD_LOCAL(variable exc1) > 84 THROW(Throwable) > -698c741 +694c737 < catch () in ArrayBuffer(4, 6, 7, 9) starting at: 3 --- > catch () in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3 -722c765 +718c761 < locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x -724c767 +720c763 < blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25] --- > blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28] -748c791,798 +744c787,794 < 172 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -163,64 +163,64 @@ > 170 STORE_LOCAL(value x4) > 170 SCOPE_ENTER value x4 > 170 JUMP 15 -791,794d840 +787,790d836 < 175 LOAD_LOCAL(value x5) < 175 CALL_METHOD MyException.message (dynamic) < 175 STORE_LOCAL(value message) < 175 SCOPE_ENTER value message -796c842,843 +792c838,839 < 176 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 176 CALL_METHOD MyException.message (dynamic) -800c847,848 +796c843,844 < 177 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 177 CALL_METHOD MyException.message (dynamic) -802c850,851 +798c846,847 < 177 THROW(MyException) --- > ? STORE_LOCAL(value ex6) > ? JUMP 27 -806c855,856 +802c851,852 < 170 THROW(Throwable) --- > ? STORE_LOCAL(value ex6) > ? JUMP 27 -815a866,871 +811a862,867 > 27: > 169 LOAD_LOCAL(value ex6) > 169 STORE_LOCAL(value x4) > 169 SCOPE_ENTER value x4 > 169 JUMP 5 > -826,829d881 +822,825d877 < 180 LOAD_LOCAL(value x5) < 180 CALL_METHOD MyException.message (dynamic) < 180 STORE_LOCAL(value message) < 180 SCOPE_ENTER value message -831c883,884 +827c879,880 < 181 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 181 CALL_METHOD MyException.message (dynamic) -835c888,889 +831c884,885 < 182 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 182 CALL_METHOD MyException.message (dynamic) -837c891,892 +833c887,888 < 182 THROW(MyException) --- > ? STORE_LOCAL(variable exc2) > ? JUMP 28 -841c896,897 +837c892,893 < 169 THROW(Throwable) --- > ? STORE_LOCAL(variable exc2) > ? JUMP 28 -857a914,926 +853a910,922 > 28: > 184 LOAD_MODULE object Predef > 184 CONSTANT("finally") @@ -234,23 +234,23 @@ > 185 LOAD_LOCAL(variable exc2) > 185 THROW(Throwable) > -863c932 +859c928 < catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4 --- > catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4 -866c935 +862c931 < catch () in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3 --- > catch () in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3 -890c959 +886c955 < locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value e, value ex6, value x4, value x5, value x -892c961 +888c957 < blocks: [1,2,3,6,7,8,11,13,14,16] --- > blocks: [1,2,3,6,7,8,11,13,14,16,17] -916c985,992 +912c981,988 < 124 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -261,29 +261,29 @@ > 122 STORE_LOCAL(value x4) > 122 SCOPE_ENTER value x4 > 122 JUMP 7 -941,944d1016 +937,940d1012 < 127 LOAD_LOCAL(value x5) < 127 CALL_METHOD MyException.message (dynamic) < 127 STORE_LOCAL(value message) < 127 SCOPE_ENTER value message -946c1018,1019 +942c1014,1015 < 127 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 127 CALL_METHOD MyException.message (dynamic) -975c1048 +971c1044 < catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3 --- > catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3 -999c1072 +995c1068 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e --- > locals: value args, variable result, value ex6, value x4, value x5, value x, value e -1001c1074 +997c1070 < blocks: [1,2,3,4,5,8,12,13,14,16] --- > blocks: [1,2,3,5,8,12,13,14,16,17] -1025c1098,1107 +1021c1094,1103 < 148 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -296,25 +296,25 @@ > 154 LOAD_LOCAL(value x4) > 154 IS_INSTANCE REF(class MyException) > 154 CZJUMP (BOOL)NE ? 5 : 8 -1046,1048d1127 +1042,1044d1123 < 145 JUMP 4 < < 4: -1058,1061d1136 +1054,1057d1132 < 154 LOAD_LOCAL(value x5) < 154 CALL_METHOD MyException.message (dynamic) < 154 STORE_LOCAL(value message) < 154 SCOPE_ENTER value message -1063c1138,1139 +1059c1134,1135 < 154 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 154 CALL_METHOD MyException.message (dynamic) -1280c1356 +1276c1352 < blocks: [1,2,3,4,5,7] --- > blocks: [1,2,3,4,5,7,8] -1304c1380,1387 +1300c1376,1383 < 38 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) @@ -325,20 +325,20 @@ > 42 CONSTANT("IllegalArgumentException") > 42 CALL_METHOD scala.Predef.println (dynamic) > 42 JUMP 2 -1351c1434 +1347c1430 < locals: value args, variable result, value ex6, value x4, value x5, value message, value x --- > locals: value args, variable result, value ex6, value x4, value x5, value x -1353c1436 +1349c1432 < blocks: [1,2,3,4,5,8,10,11,13,14,16] --- > blocks: [1,2,3,5,8,10,11,13,14,16,17] -1377c1460,1461 +1373c1456,1457 < 203 THROW(MyException) --- > ? STORE_LOCAL(value ex6) > ? JUMP 17 -1397c1481,1490 +1393c1477,1486 < 209 THROW(MyException) --- > ? STORE_LOCAL(value ex6) @@ -351,41 +351,41 @@ > 212 LOAD_LOCAL(value x4) > 212 IS_INSTANCE REF(class MyException) > 212 CZJUMP (BOOL)NE ? 5 : 8 -1410,1412d1502 +1406,1408d1498 < 200 JUMP 4 < < 4: -1422,1425d1511 +1418,1421d1507 < 212 LOAD_LOCAL(value x5) < 212 CALL_METHOD MyException.message (dynamic) < 212 STORE_LOCAL(value message) < 212 SCOPE_ENTER value message -1427c1513,1514 +1423c1509,1510 < 213 LOAD_LOCAL(value message) --- > ? LOAD_LOCAL(value x5) > 213 CALL_METHOD MyException.message (dynamic) -1471c1558 +1467c1554 < blocks: [1,2,3,4,5,7] --- > blocks: [1,2,3,4,5,7,8] -1495c1582,1583 +1491c1578,1579 < 58 THROW(IllegalArgumentException) --- > ? STORE_LOCAL(value e) > ? JUMP 8 -1496a1585,1590 +1492a1581,1586 > 8: > 62 LOAD_MODULE object Predef > 62 CONSTANT("RuntimeException") > 62 CALL_METHOD scala.Predef.println (dynamic) > 62 JUMP 2 > -1544c1638 +1540c1634 < blocks: [1,2,3,4] --- > blocks: [1,2,3,4,5] -1564c1658,1663 +1560c1654,1659 < 229 THROW(MyException) --- > ? JUMP 5 @@ -394,19 +394,19 @@ > ? LOAD_LOCAL(variable monitor1) > 228 MONITOR_EXIT > 228 THROW(Throwable) -1570c1669 +1566c1665 < ? THROW(Throwable) --- > 228 THROW(Throwable) -1598c1697 +1594c1693 < locals: value args, variable result, variable monitor2, variable monitorResult1 --- > locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1 -1600c1699 +1596c1695 < blocks: [1,2,3,4] --- > blocks: [1,2,3,4,5] -1623c1722,1730 +1619c1718,1726 < 245 THROW(MyException) --- > ? STORE_LOCAL(value exception$1) @@ -418,7 +418,7 @@ > ? LOAD_LOCAL(variable monitor2) > 244 MONITOR_EXIT > 244 THROW(Throwable) -1629c1736 +1625c1732 < ? THROW(Throwable) --- > 244 THROW(Throwable) diff --git a/test/files/run/t6288b-jump-position.check b/test/files/run/t6288b-jump-position.check index 45ec31c308..ece88b18f0 100644 --- a/test/files/run/t6288b-jump-position.check +++ b/test/files/run/t6288b-jump-position.check @@ -19,7 +19,7 @@ object Case3 extends Object { Exception handlers: def main(args: Array[String] (ARRAY[REF(class String)])): Unit { - locals: value args, value x1, value x2, value x + locals: value args, value x1, value x startBlock: 1 blocks: [1,2,3,6,7] @@ -35,10 +35,6 @@ object Case3 extends Object { 5 CZJUMP (BOOL)NE ? 3 : 6 3: - 5 LOAD_LOCAL(value x1) - 5 CHECK_CAST REF(class String) - 5 STORE_LOCAL(value x2) - 5 SCOPE_ENTER value x2 6 LOAD_MODULE object Predef 6 CONSTANT("case 0") 6 CALL_METHOD scala.Predef.println (dynamic) -- cgit v1.2.3 From b2117cf6f4080eff9028b2aa33a013329022efd6 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 31 Jan 2013 10:18:26 -0800 Subject: SI-6941 tests tests that the methods' bytecodes are similar as variable load/stores are reordered, it ignores which variables are modified when checking for bytecode equality the assert is: `similarBytecode(methNodeA, methNodeB, equalsModuloVar)` --- test/files/jvm/t6941.check | 1 + test/files/jvm/t6941.flags | 1 + test/files/jvm/t6941/Analyzed_1.scala | 11 +++++++++++ test/files/jvm/t6941/test.scala | 15 +++++++++++++++ 4 files changed, 28 insertions(+) create mode 100644 test/files/jvm/t6941.check create mode 100644 test/files/jvm/t6941.flags create mode 100644 test/files/jvm/t6941/Analyzed_1.scala create mode 100644 test/files/jvm/t6941/test.scala diff --git a/test/files/jvm/t6941.check b/test/files/jvm/t6941.check new file mode 100644 index 0000000000..43f53aba12 --- /dev/null +++ b/test/files/jvm/t6941.check @@ -0,0 +1 @@ +bytecode identical diff --git a/test/files/jvm/t6941.flags b/test/files/jvm/t6941.flags new file mode 100644 index 0000000000..49d036a887 --- /dev/null +++ b/test/files/jvm/t6941.flags @@ -0,0 +1 @@ +-optimize diff --git a/test/files/jvm/t6941/Analyzed_1.scala b/test/files/jvm/t6941/Analyzed_1.scala new file mode 100644 index 0000000000..549abd5e64 --- /dev/null +++ b/test/files/jvm/t6941/Analyzed_1.scala @@ -0,0 +1,11 @@ +// this class's bytecode, compiled under -optimize is analyzed by the test +// method a's bytecode should be identical to method b's bytecode +class SameBytecode { + def a(xs: List[Int]) = xs match { + case x :: _ => x + } + + def b(xs: List[Int]) = xs match { + case xs: ::[Int] => xs.hd$1 + } +} \ No newline at end of file diff --git a/test/files/jvm/t6941/test.scala b/test/files/jvm/t6941/test.scala new file mode 100644 index 0000000000..248617f71f --- /dev/null +++ b/test/files/jvm/t6941/test.scala @@ -0,0 +1,15 @@ +import scala.tools.partest.BytecodeTest + +import scala.tools.nsc.util.JavaClassPath +import java.io.InputStream +import scala.tools.asm +import asm.ClassReader +import asm.tree.{ClassNode, InsnList} +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + def show: Unit = { + val classNode = loadClassNode("SameBytecode") + similarBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"), equalsModuloVar) + } +} -- cgit v1.2.3 From 3f78bee128bd6a478bef6a66c5574f77a2d6dd74 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Wed, 30 Jan 2013 17:08:42 +0100 Subject: SI-7029 - Makes sure that uncaught exceptions are propagated to the UEH for the global ExecutionContext --- .../concurrent/impl/ExecutionContextImpl.scala | 34 +++++++++++++++++----- src/library/scala/concurrent/impl/Promise.scala | 2 +- test/files/jvm/future-spec/FutureTests.scala | 24 +++++++++++++++ 3 files changed, 51 insertions(+), 9 deletions(-) diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala index 215f90b17e..77625e381c 100644 --- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala @@ -25,11 +25,15 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: case some => some } + private val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler { + def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause) + } + // Implement BlockContext on FJP threads class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { def wire[T <: Thread](thread: T): T = { thread.setDaemon(daemonic) - //Potentially set things like uncaught exception handler, name etc + thread.setUncaughtExceptionHandler(uncaughtExceptionHandler) thread } @@ -73,7 +77,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: new ForkJoinPool( desiredParallelism, threadFactory, - null, //FIXME we should have an UncaughtExceptionHandler, see what Akka does + uncaughtExceptionHandler, true) // Async all the way baby } catch { case NonFatal(t) => @@ -94,13 +98,13 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: def execute(runnable: Runnable): Unit = executor match { case fj: ForkJoinPool => + val fjt = runnable match { + case t: ForkJoinTask[_] => t + case r => new ExecutionContextImpl.AdaptedForkJoinTask(r) + } Thread.currentThread match { - case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => - (runnable match { - case fjt: ForkJoinTask[_] => fjt - case _ => ForkJoinTask.adapt(runnable) - }).fork - case _ => fj.execute(runnable) + case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork() + case _ => fj execute fjt } case generic => generic execute runnable } @@ -111,6 +115,20 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter: private[concurrent] object ExecutionContextImpl { + final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] { + final override def setRawResult(u: Unit): Unit = () + final override def getRawResult(): Unit = () + final override def exec(): Boolean = try { runnable.run(); true } catch { + case anything: Throwable ⇒ + val t = Thread.currentThread + t.getUncaughtExceptionHandler match { + case null ⇒ + case some ⇒ some.uncaughtException(t, anything) + } + throw anything + } + } + def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter) def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService = new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService { diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index e9da45a079..52f1075137 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -34,7 +34,7 @@ private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete value = v // Note that we cannot prepare the ExecutionContext at this point, since we might // already be running on a different thread! - executor.execute(this) + try executor.execute(this) catch { case NonFatal(t) => executor reportFailure t } } } diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala index 8674be168c..ae2e72bf96 100644 --- a/test/files/jvm/future-spec/FutureTests.scala +++ b/test/files/jvm/future-spec/FutureTests.scala @@ -74,6 +74,30 @@ object FutureTests extends MinimalScalaTest { "A future with global ExecutionContext" should { import ExecutionContext.Implicits._ + "output uncaught exceptions" in { + import java.io.{ ByteArrayOutputStream, PrintStream } + + val baos = new ByteArrayOutputStream(1 << 16) { def isEmpty: Boolean = count == 0 } + val tmpErr = new PrintStream(baos) + + def assertPrintedToErr(t: Throwable): Unit = { + t.printStackTrace(tmpErr) + tmpErr.flush() + val expected = baos.toByteArray.toIndexedSeq + baos.reset() + val f = Future(throw t) + val d = Deadline.now + 5.seconds + while(d.hasTimeLeft && baos.isEmpty) Thread.sleep(10) + baos.toByteArray.toIndexedSeq mustBe (expected) + } + + val oldErr = System.err + System.setErr(tmpErr) + try { + assertPrintedToErr(new NotImplementedError("foo")) + } finally System.setErr(oldErr) + } + "compose with for-comprehensions" in { def async(x: Int) = future { (x * 2).toString } val future0 = future[Any] { -- cgit v1.2.3 From 5275baee6c563418f53abd2486764be08916c8c5 Mon Sep 17 00:00:00 2001 From: Philipp Haller Date: Fri, 1 Feb 2013 03:32:18 +0100 Subject: SI-7029 - Make test more robust --- test/files/jvm/future-spec/FutureTests.scala | 36 ++++++++++------------------ 1 file changed, 12 insertions(+), 24 deletions(-) diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala index ae2e72bf96..0efa83fbd9 100644 --- a/test/files/jvm/future-spec/FutureTests.scala +++ b/test/files/jvm/future-spec/FutureTests.scala @@ -70,33 +70,21 @@ object FutureTests extends MinimalScalaTest { //FIXME should check } } - - "A future with global ExecutionContext" should { - import ExecutionContext.Implicits._ - "output uncaught exceptions" in { - import java.io.{ ByteArrayOutputStream, PrintStream } - - val baos = new ByteArrayOutputStream(1 << 16) { def isEmpty: Boolean = count == 0 } - val tmpErr = new PrintStream(baos) - - def assertPrintedToErr(t: Throwable): Unit = { - t.printStackTrace(tmpErr) - tmpErr.flush() - val expected = baos.toByteArray.toIndexedSeq - baos.reset() - val f = Future(throw t) - val d = Deadline.now + 5.seconds - while(d.hasTimeLeft && baos.isEmpty) Thread.sleep(10) - baos.toByteArray.toIndexedSeq mustBe (expected) - } + "The default ExecutionContext" should { + "report uncaught exceptions" in { + val p = Promise[Throwable]() + val logThrowable: Throwable => Unit = p.trySuccess(_) + val ec: ExecutionContext = ExecutionContext.fromExecutor(null, logThrowable) - val oldErr = System.err - System.setErr(tmpErr) - try { - assertPrintedToErr(new NotImplementedError("foo")) - } finally System.setErr(oldErr) + val t = new NotImplementedError("foo") + val f = Future(throw t)(ec) + Await.result(p.future, 2.seconds) mustBe t } + } + + "A future with global ExecutionContext" should { + import ExecutionContext.Implicits._ "compose with for-comprehensions" in { def async(x: Int) = future { (x * 2).toString } -- cgit v1.2.3 From a6137d19b6ec7c63fbbae274de3c78e310bba4ae Mon Sep 17 00:00:00 2001 From: Iulian Dragos Date: Fri, 1 Feb 2013 14:36:14 +0100 Subject: Fix SI-6578. Deprecated `askType` because of possible race conditions in type checker. AskType triggers type-checks the given source and returns a typed tree. If that source is already loaded (a precondition), the background compilation loop may actually be compiling that same source. The new type checker run may then get into an inconsistent state and try to add twice the same synthetic members, like `canEqual`. Most of the times, `askLoadedTyped` (that waits for the type checker to finish, and returns the most recent typed tree) *is* the right way to go. Removed occurrences of the deprecated method in tests and interactive.REPL. @reviewby @huitseeker,@odersky --- .../scala/tools/nsc/interactive/CompilerControl.scala | 5 +++++ src/compiler/scala/tools/nsc/interactive/REPL.scala | 9 +-------- .../tools/nsc/interactive/tests/InteractiveTest.scala | 1 - .../tools/nsc/interactive/tests/core/AskCommand.scala | 17 ----------------- 4 files changed, 6 insertions(+), 26 deletions(-) diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala index b4af8f00d6..7dc0b786a7 100644 --- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala @@ -139,7 +139,12 @@ trait CompilerControl { self: Global => /** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`. * @pre `source` needs to be loaded. + * + * @note Deprecated because of race conditions in the typechecker when the background compiler + * is interrupted while typing the same `source`. + * @see SI-6578 */ + @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1") def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) = postWorkItem(new AskTypeItem(source, forceReload, response)) diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala index dacfa679dd..7b89d5b0aa 100644 --- a/src/compiler/scala/tools/nsc/interactive/REPL.scala +++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala @@ -110,11 +110,6 @@ object REPL { show(completeResult) } - def doTypedTree(file: String) { - comp.askType(toSourceFile(file), true, typedResult) - show(typedResult) - } - def doStructure(file: String) { comp.askParsedEntered(toSourceFile(file), false, structureResult) show(structureResult) @@ -175,10 +170,8 @@ object REPL { comp.askReload(List(toSourceFile(file)), reloadResult) Thread.sleep(millis.toInt) println("ask type now") - comp.askType(toSourceFile(file), false, typedResult) + comp.askLoadedTyped(toSourceFile(file), typedResult) typedResult.get - case List("typed", file) => - doTypedTree(file) case List("typeat", file, off1, off2) => doTypeAt(makePos(file, off1, off2)) case List("typeat", file, off1) => diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala index 62d274bc70..597b9012ce 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala @@ -55,7 +55,6 @@ abstract class InteractiveTest with AskShutdown with AskReload with AskLoadedTyped - with AskType with PresentationCompilerInstance with CoreTestDefs with InteractiveTestSettings { self => diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala index eb902e3e6c..8d446cbbf8 100644 --- a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala +++ b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala @@ -97,23 +97,6 @@ trait AskTypeAt extends AskCommand { } } - -trait AskType extends AskCommand { - import compiler.Tree - - protected def askType(source: SourceFile, forceReload: Boolean)(implicit reporter: Reporter): Response[Tree] = { - ask { - compiler.askType(source, forceReload, _) - } - } - - protected def askType(sources: Seq[SourceFile], forceReload: Boolean)(implicit reporter: Reporter): Seq[Response[Tree]] = { - for(source <- sources) yield - askType(source, forceReload) - } -} - - trait AskLoadedTyped extends AskCommand { import compiler.Tree -- cgit v1.2.3 From f1701f704a2485fcc2eb6d5d8b5d0228beddd9b3 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 31 Jan 2013 20:40:16 +0100 Subject: SI-7008 @throws annotations are now populated in reflect Runtime reflection in JavaMirrors previously forgot to fill in @throws when importing Java reflection artifacts. Now this is fixed. Note that generic exception types used in `throws` specifications will be garbled (i.e. erased), because we don't use `getGenericExceptionTypes` in favor of just `getExceptionTypes` to stay compatible with the behavior of ClassfileParser. That's a bug, but a separate one and should be fixed separately. Also note that this commit updated javac-artifacts.jar, because we need to test how reflection works with javac-produced classfiles. The sources that were used to produce those classfiles can be found in the jar next to the classfiles. --- .../tools/nsc/symtab/classfile/ClassfileParser.scala | 8 +------- .../scala/reflect/internal/AnnotationInfos.scala | 17 ++++++++++++++--- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 8 ++++++++ test/files/lib/javac-artifacts.jar.desired.sha1 | 2 +- test/files/run/t7008.check | 9 +++++++++ test/files/run/t7008/Impls_Macros_1.scala | 12 ++++++++++++ test/files/run/t7008/Test_2.scala | 9 +++++++++ 7 files changed, 54 insertions(+), 11 deletions(-) create mode 100644 test/files/run/t7008.check create mode 100644 test/files/run/t7008/Impls_Macros_1.scala create mode 100644 test/files/run/t7008/Test_2.scala diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 4b1d3c34f3..7ac7b4d637 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1043,13 +1043,7 @@ abstract class ClassfileParser { val nClasses = in.nextChar for (n <- 0 until nClasses) { val cls = pool.getClassSymbol(in.nextChar.toInt) - val tp = if (cls.isMonomorphicType) cls.tpe else { - debuglog(s"Encountered polymorphic exception `${cls.fullName}` while parsing class file.") - // in case we encounter polymorphic exception the best we can do is to convert that type to - // monomorphic one by introducing existientals, see SI-7009 for details - typer.packSymbols(cls.typeParams, cls.tpe) - } - sym.addAnnotation(appliedType(definitions.ThrowsClass, tp), Literal(Constant(tp))) + sym.addThrowsAnnotation(cls) } } diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 6a5a742cc7..29879f9806 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -33,6 +33,17 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => case ThrownException(exc) => exc } + def addThrowsAnnotation(excSym: Symbol): Self = { + val excTpe = if (excSym.isMonomorphicType) excSym.tpe else { + debuglog(s"Encountered polymorphic exception `${excSym.fullName}` while parsing class file.") + // in case we encounter polymorphic exception the best we can do is to convert that type to + // monomorphic one by introducing existentials, see SI-7009 for details + existentialAbstraction(excSym.typeParams, excSym.tpe) + } + val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, excTpe), List(Literal(Constant(excTpe))), Nil) + withAnnotations(List(throwsAnn)) + } + /** Tests for, get, or remove an annotation */ def hasAnnotation(cls: Symbol): Boolean = //OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls) @@ -330,14 +341,14 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo]) object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil) - + /** Extracts symbol of thrown exception from AnnotationInfo. - * + * * Supports both “old-style” `@throws(classOf[Exception])` * as well as “new-stye” `@throws[Exception]("cause")` annotations. */ object ThrownException { - def unapply(ann: AnnotationInfo): Option[Symbol] = + def unapply(ann: AnnotationInfo): Option[Symbol] = ann match { case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass => None diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 01e0634902..698b60b929 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -610,11 +610,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni /** * Copy all annotations of Java annotated element `jann` over to Scala symbol `sym`. + * Also creates `@throws` annotations if necessary. * Pre: `sym` is already initialized with a concrete type. * Note: If `sym` is a method or constructor, its parameter annotations are copied as well. */ private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) { sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList + // FIXME: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser + val jexTpes = jann match { + case jm: jMethod => jm.getExceptionTypes.toList + case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList + case _ => Nil + } + jexTpes foreach (jexTpe => sym.addThrowsAnnotation(classSymbol(jexTpe))) } /** diff --git a/test/files/lib/javac-artifacts.jar.desired.sha1 b/test/files/lib/javac-artifacts.jar.desired.sha1 index a49c986386..508141b771 100644 --- a/test/files/lib/javac-artifacts.jar.desired.sha1 +++ b/test/files/lib/javac-artifacts.jar.desired.sha1 @@ -1 +1 @@ -61931a51bb1a2d308d214b96d06e9a8808515dcf ?javac-artifacts.jar +a05ca69ac68fa7006b1b8ed98597046e105b4047 ?javac-artifacts.jar diff --git a/test/files/run/t7008.check b/test/files/run/t7008.check new file mode 100644 index 0000000000..ee077f90ff --- /dev/null +++ b/test/files/run/t7008.check @@ -0,0 +1,9 @@ +: List(throws[NullPointerException](classOf[java.lang.NullPointerException])) +bar: List(throws[Exception](classOf[java.lang.Exception])) +baz: List(throws[IllegalStateException](classOf[java.lang.IllegalStateException])) +foo: List(throws[Exception](classOf[java.lang.Exception])) +============= +: List(throws[java.lang.NullPointerException](classOf[java.lang.NullPointerException])) +bar: List(throws[java.lang.Exception](classOf[java.lang.Exception])) +baz: List(throws[java.lang.IllegalStateException](classOf[java.lang.IllegalStateException])) +foo: List(throws[java.lang.Exception](classOf[java.lang.Exception])) diff --git a/test/files/run/t7008/Impls_Macros_1.scala b/test/files/run/t7008/Impls_Macros_1.scala new file mode 100644 index 0000000000..f2eb7425f5 --- /dev/null +++ b/test/files/run/t7008/Impls_Macros_1.scala @@ -0,0 +1,12 @@ +import language.experimental.macros +import scala.reflect.macros.Context + +object Macros { + def impl(c: Context) = { + val decls = c.typeOf[JavaClassWithCheckedExceptions[_]].declarations.toList + val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL) + c.universe.reify(println(c.literal(s).splice)) + } + + def foo = macro impl +} \ No newline at end of file diff --git a/test/files/run/t7008/Test_2.scala b/test/files/run/t7008/Test_2.scala new file mode 100644 index 0000000000..b67faa327f --- /dev/null +++ b/test/files/run/t7008/Test_2.scala @@ -0,0 +1,9 @@ +import scala.reflect.runtime.universe._ + +object Test extends App { + Macros.foo + println("=============") + + val decls = typeOf[JavaClassWithCheckedExceptions[_]].declarations.toList + decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}")) +} \ No newline at end of file -- cgit v1.2.3 From adf50a3ac0c6861a77a781abc0814c5d17927175 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Fri, 1 Feb 2013 19:23:00 +0100 Subject: evicts javac-artifacts.jar Apparently, the usual _1, _2, _3... naming scheme also works for java files, which need to be compiled together with partests. This allows us to get rid of javac-artifacts.jar. --- test/files/lib/javac-artifacts.jar.desired.sha1 | 1 - test/files/run/reflection-java-annotations.check | 2 +- test/files/run/reflection-java-annotations.scala | 7 ---- .../JavaAnnottee_1.java | 47 ++++++++++++++++++++++ .../JavaComplexAnnotation_1.java | 34 ++++++++++++++++ .../JavaSimpleAnnotation_1.java | 21 ++++++++++ .../JavaSimpleEnumeration_1.java | 4 ++ .../run/reflection-java-annotations/Test_2.scala | 7 ++++ test/files/run/reflection-java-crtp.scala | 8 ---- .../JavaSimpleEnumeration_1.java | 4 ++ test/files/run/reflection-java-crtp/Main_2.scala | 8 ++++ test/files/run/t6548.check | 2 +- test/files/run/t6548.scala | 12 ------ .../run/t6548/JavaAnnotationWithNestedEnum_1.java | 17 ++++++++ test/files/run/t6548/Test_2.scala | 12 ++++++ test/files/run/t7008/Impls_Macros_1.scala | 12 ------ test/files/run/t7008/Impls_Macros_2.scala | 12 ++++++ .../t7008/JavaClassWithCheckedExceptions_1.java | 7 ++++ test/files/run/t7008/Test_2.scala | 9 ----- test/files/run/t7008/Test_3.scala | 9 +++++ 20 files changed, 184 insertions(+), 51 deletions(-) delete mode 100644 test/files/lib/javac-artifacts.jar.desired.sha1 delete mode 100644 test/files/run/reflection-java-annotations.scala create mode 100644 test/files/run/reflection-java-annotations/JavaAnnottee_1.java create mode 100644 test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java create mode 100644 test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java create mode 100644 test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java create mode 100644 test/files/run/reflection-java-annotations/Test_2.scala delete mode 100644 test/files/run/reflection-java-crtp.scala create mode 100644 test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java create mode 100644 test/files/run/reflection-java-crtp/Main_2.scala delete mode 100644 test/files/run/t6548.scala create mode 100644 test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java create mode 100644 test/files/run/t6548/Test_2.scala delete mode 100644 test/files/run/t7008/Impls_Macros_1.scala create mode 100644 test/files/run/t7008/Impls_Macros_2.scala create mode 100644 test/files/run/t7008/JavaClassWithCheckedExceptions_1.java delete mode 100644 test/files/run/t7008/Test_2.scala create mode 100644 test/files/run/t7008/Test_3.scala diff --git a/test/files/lib/javac-artifacts.jar.desired.sha1 b/test/files/lib/javac-artifacts.jar.desired.sha1 deleted file mode 100644 index 508141b771..0000000000 --- a/test/files/lib/javac-artifacts.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -a05ca69ac68fa7006b1b8ed98597046e105b4047 ?javac-artifacts.jar diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check index 53c53cfbcc..2d37fff1f4 100644 --- a/test/files/run/reflection-java-annotations.check +++ b/test/files/run/reflection-java-annotations.check @@ -1 +1 @@ -List(JavaComplexAnnotation(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation], classOf[JavaComplexAnnotation]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false)) +List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false)) diff --git a/test/files/run/reflection-java-annotations.scala b/test/files/run/reflection-java-annotations.scala deleted file mode 100644 index 2e3fed48ce..0000000000 --- a/test/files/run/reflection-java-annotations.scala +++ /dev/null @@ -1,7 +0,0 @@ -object Test extends App { - import scala.reflect.runtime.universe._ - val sym = typeOf[JavaAnnottee].typeSymbol - sym.typeSignature - sym.annotations foreach (_.javaArgs) - println(sym.annotations) -} \ No newline at end of file diff --git a/test/files/run/reflection-java-annotations/JavaAnnottee_1.java b/test/files/run/reflection-java-annotations/JavaAnnottee_1.java new file mode 100644 index 0000000000..b241f5d25e --- /dev/null +++ b/test/files/run/reflection-java-annotations/JavaAnnottee_1.java @@ -0,0 +1,47 @@ +@JavaComplexAnnotation_1( + v1 = (byte)1, + v2 = (short)2, + v3 = (char)3, + v4 = (int)4, + v5 = (long)5, + v6 = (float)6, + v7 = (double)7, + v10 = "hello", + v11 = JavaAnnottee_1.class, + v12 = JavaSimpleEnumeration_1.FOO, + v13 = @JavaSimpleAnnotation_1( + v1 = (byte)11, + v2 = (short)12, + v3 = (char)13, + v4 = (int)14, + v5 = (long)15, + v6 = (float)16, + v7 = (double)17, + v10 = "world1", + v11 = JavaSimpleAnnotation_1.class, + v12 = JavaSimpleEnumeration_1.FOO + ), + v101 = {(byte)101, (byte)101}, + v102 = {(short)102, (short)102}, + v103 = {(char)103, (char)103}, + v104 = {(int)104, (int)104}, + v105 = {(long)105, (long)105}, + v106 = {(float)106, (float)106}, + v107 = {(double)107, (double)107}, + v108 = {false, true}, + v110 = {"hello", "world"}, + v111 = {JavaSimpleAnnotation_1.class, JavaComplexAnnotation_1.class}, + v112 = {JavaSimpleEnumeration_1.FOO, JavaSimpleEnumeration_1.BAR}, + v113 = {@JavaSimpleAnnotation_1( + v1 = (byte)21, + v2 = (short)22, + v3 = (char)23, + v4 = (int)24, + v5 = (long)25, + v6 = (float)26, + v7 = (double)27, + v10 = "world2", + v11 = JavaComplexAnnotation_1.class, + v12 = JavaSimpleEnumeration_1.BAR + )}) +public class JavaAnnottee_1 {} \ No newline at end of file diff --git a/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java b/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java new file mode 100644 index 0000000000..645eeb9399 --- /dev/null +++ b/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java @@ -0,0 +1,34 @@ +import java.lang.annotation.Target; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) +public @interface JavaComplexAnnotation_1 { + byte v1(); + short v2(); + char v3(); + int v4(); + long v5(); + float v6(); + double v7(); + boolean v8() default false; + // void v9(); + String v10(); + Class v11(); + JavaSimpleEnumeration_1 v12(); + JavaSimpleAnnotation_1 v13(); + byte[] v101(); + short[] v102(); + char[] v103(); + int[] v104(); + long[] v105(); + float[] v106(); + double[] v107(); + boolean[] v108(); + String[] v110(); + Class[] v111(); + JavaSimpleEnumeration_1[] v112(); + JavaSimpleAnnotation_1[] v113(); +} \ No newline at end of file diff --git a/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java b/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java new file mode 100644 index 0000000000..c0f92fad2c --- /dev/null +++ b/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java @@ -0,0 +1,21 @@ +import java.lang.annotation.Target; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE}) +public @interface JavaSimpleAnnotation_1 { + byte v1(); + short v2(); + char v3(); + int v4(); + long v5(); + float v6(); + double v7(); + boolean v8() default false; + // void v9(); + String v10(); + Class v11(); + JavaSimpleEnumeration_1 v12(); +} \ No newline at end of file diff --git a/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java b/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java new file mode 100644 index 0000000000..39246141cc --- /dev/null +++ b/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java @@ -0,0 +1,4 @@ +enum JavaSimpleEnumeration_1 { + FOO, + BAR +} \ No newline at end of file diff --git a/test/files/run/reflection-java-annotations/Test_2.scala b/test/files/run/reflection-java-annotations/Test_2.scala new file mode 100644 index 0000000000..d2c3157071 --- /dev/null +++ b/test/files/run/reflection-java-annotations/Test_2.scala @@ -0,0 +1,7 @@ +object Test extends App { + import scala.reflect.runtime.universe._ + val sym = typeOf[JavaAnnottee_1].typeSymbol + sym.typeSignature + sym.annotations foreach (_.javaArgs) + println(sym.annotations) +} \ No newline at end of file diff --git a/test/files/run/reflection-java-crtp.scala b/test/files/run/reflection-java-crtp.scala deleted file mode 100644 index 260d3540dc..0000000000 --- a/test/files/run/reflection-java-crtp.scala +++ /dev/null @@ -1,8 +0,0 @@ -object Test extends App { - import scala.reflect.runtime.universe._ - val enum = typeOf[JavaSimpleEnumeration].baseClasses(1).asClass - // make sure that the E's in Enum> are represented by the same symbol - val e1 = enum.typeParams(0).asType - val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.typeSignature - println(e1, e2, e1 eq e2) -} \ No newline at end of file diff --git a/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java b/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java new file mode 100644 index 0000000000..39246141cc --- /dev/null +++ b/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java @@ -0,0 +1,4 @@ +enum JavaSimpleEnumeration_1 { + FOO, + BAR +} \ No newline at end of file diff --git a/test/files/run/reflection-java-crtp/Main_2.scala b/test/files/run/reflection-java-crtp/Main_2.scala new file mode 100644 index 0000000000..fb5668f323 --- /dev/null +++ b/test/files/run/reflection-java-crtp/Main_2.scala @@ -0,0 +1,8 @@ +object Test extends App { + import scala.reflect.runtime.universe._ + val enum = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass + // make sure that the E's in Enum> are represented by the same symbol + val e1 = enum.typeParams(0).asType + val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.typeSignature + println(e1, e2, e1 eq e2) +} \ No newline at end of file diff --git a/test/files/run/t6548.check b/test/files/run/t6548.check index e82cae110a..5dfcb12e02 100644 --- a/test/files/run/t6548.check +++ b/test/files/run/t6548.check @@ -1,2 +1,2 @@ false -List(JavaAnnotationWithNestedEnum(value = VALUE)) +List(JavaAnnotationWithNestedEnum_1(value = VALUE)) diff --git a/test/files/run/t6548.scala b/test/files/run/t6548.scala deleted file mode 100644 index be3eb5b932..0000000000 --- a/test/files/run/t6548.scala +++ /dev/null @@ -1,12 +0,0 @@ -import scala.reflect.runtime.universe._ -import scala.reflect.runtime.{currentMirror => cm} - -class Bean { - @JavaAnnotationWithNestedEnum(JavaAnnotationWithNestedEnum.Value.VALUE) - def value = 1 -} - -object Test extends App { - println(cm.staticClass("Bean").isCaseClass) - println(typeOf[Bean].declaration(newTermName("value")).annotations) -} diff --git a/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java b/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java new file mode 100644 index 0000000000..32004de537 --- /dev/null +++ b/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java @@ -0,0 +1,17 @@ +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Target({ElementType.ANNOTATION_TYPE, ElementType.METHOD, ElementType.FIELD, + ElementType.TYPE, ElementType.PARAMETER}) +@Retention(RetentionPolicy.RUNTIME) +public @interface JavaAnnotationWithNestedEnum_1 +{ + public Value value() default Value.VALUE; + + public enum Value + { + VALUE; + } +} \ No newline at end of file diff --git a/test/files/run/t6548/Test_2.scala b/test/files/run/t6548/Test_2.scala new file mode 100644 index 0000000000..6e4f6ba92a --- /dev/null +++ b/test/files/run/t6548/Test_2.scala @@ -0,0 +1,12 @@ +import scala.reflect.runtime.universe._ +import scala.reflect.runtime.{currentMirror => cm} + +class Bean { + @JavaAnnotationWithNestedEnum_1(JavaAnnotationWithNestedEnum_1.Value.VALUE) + def value = 1 +} + +object Test extends App { + println(cm.staticClass("Bean").isCaseClass) + println(typeOf[Bean].declaration(newTermName("value")).annotations) +} diff --git a/test/files/run/t7008/Impls_Macros_1.scala b/test/files/run/t7008/Impls_Macros_1.scala deleted file mode 100644 index f2eb7425f5..0000000000 --- a/test/files/run/t7008/Impls_Macros_1.scala +++ /dev/null @@ -1,12 +0,0 @@ -import language.experimental.macros -import scala.reflect.macros.Context - -object Macros { - def impl(c: Context) = { - val decls = c.typeOf[JavaClassWithCheckedExceptions[_]].declarations.toList - val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL) - c.universe.reify(println(c.literal(s).splice)) - } - - def foo = macro impl -} \ No newline at end of file diff --git a/test/files/run/t7008/Impls_Macros_2.scala b/test/files/run/t7008/Impls_Macros_2.scala new file mode 100644 index 0000000000..7a17314085 --- /dev/null +++ b/test/files/run/t7008/Impls_Macros_2.scala @@ -0,0 +1,12 @@ +import language.experimental.macros +import scala.reflect.macros.Context + +object Macros { + def impl(c: Context) = { + val decls = c.typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList + val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL) + c.universe.reify(println(c.literal(s).splice)) + } + + def foo = macro impl +} \ No newline at end of file diff --git a/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java b/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java new file mode 100644 index 0000000000..dda2128302 --- /dev/null +++ b/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java @@ -0,0 +1,7 @@ +class JavaClassWithCheckedExceptions_1 { + public JavaClassWithCheckedExceptions_1() throws NullPointerException {} + + public void bar() throws E1 {} + public void baz(int x) throws IllegalStateException {} + public void foo() throws E2 {} +} \ No newline at end of file diff --git a/test/files/run/t7008/Test_2.scala b/test/files/run/t7008/Test_2.scala deleted file mode 100644 index b67faa327f..0000000000 --- a/test/files/run/t7008/Test_2.scala +++ /dev/null @@ -1,9 +0,0 @@ -import scala.reflect.runtime.universe._ - -object Test extends App { - Macros.foo - println("=============") - - val decls = typeOf[JavaClassWithCheckedExceptions[_]].declarations.toList - decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}")) -} \ No newline at end of file diff --git a/test/files/run/t7008/Test_3.scala b/test/files/run/t7008/Test_3.scala new file mode 100644 index 0000000000..b2961a829e --- /dev/null +++ b/test/files/run/t7008/Test_3.scala @@ -0,0 +1,9 @@ +import scala.reflect.runtime.universe._ + +object Test extends App { + Macros.foo + println("=============") + + val decls = typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList + decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}")) +} \ No newline at end of file -- cgit v1.2.3 From 8ae0e2a37764e0193dccd5658fd35f56edcbfd69 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 1 Feb 2013 11:55:01 -0800 Subject: SI-7039 unapplySeq result type independent of subpattern count Fixes a bug in the implementation of the `unapplySeq` part of the spec below. An `unapply` method with result type `R` in an object `x` matches the pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either: - `n = 0` and `R =:= Boolean`, or - `n = 1` and `R <:< Option[T]`, for some type `T`. The argument pattern `p1` is typed in turn with expected type `T`. - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are typed with expected types `T_1, ..., T_n`. An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)` if it takes exactly one argument and its result type is of the form `Option[S]`, where either: - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`) - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`). The argument patterns `p_1, ..., p_n` are typed with expected types `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times. --- .../scala/tools/nsc/typechecker/Infer.scala | 70 +++++++++++++--------- .../tools/nsc/typechecker/PatternMatching.scala | 2 +- test/files/run/t7039.check | 1 + test/files/run/t7039.scala | 11 ++++ 4 files changed, 55 insertions(+), 29 deletions(-) create mode 100644 test/files/run/t7039.check create mode 100644 test/files/run/t7039.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 581f9f3bfa..d724bc430e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -58,20 +58,31 @@ trait Infer extends Checkable { * @throws TypeError when the unapply[Seq] definition is ill-typed * @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor * - * From the spec: + * This is the spec currently implemented -- TODO: update it. + * * 8.1.8 ExtractorPatterns * * An extractor pattern x(p1, ..., pn) where n ≥ 0 is of the same syntactic form as a constructor pattern. * However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern. - * An unapply method in an object x matches the pattern x(p1, ..., pn) if it takes exactly one argument and one of the following applies: * - * n = 0 and unapply’s result type is Boolean. + * An `unapply` method with result type `R` in an object `x` matches the + * pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either: + * - `n = 0` and `R =:= Boolean`, or + * - `n = 1` and `R <:< Option[T]`, for some type `T`. + * The argument pattern `p1` is typed in turn with expected type `T`. + * - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some + * types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are + * typed with expected types `T_1, ..., T_n`. + * + * An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)` + * if it takes exactly one argument and its result type is of the form `Option[S]`, + * where either: + * - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`) + * - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`). * - * n = 1 and unapply’s result type is Option[T], for some type T. - * the (only) argument pattern p1 is typed in turn with expected type T + * The argument patterns `p_1, ..., p_n` are typed with expected types + * `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times. * - * n > 1 and unapply’s result type is Option[(T1, ..., Tn)], for some types T1, ..., Tn. - * the argument patterns p1, ..., pn are typed in turn with expected types T1, ..., Tn */ def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int, unappSym: Symbol): (List[Type], List[Type]) = { val isUnapplySeq = unappSym.name == nme.unapplySeq @@ -83,31 +94,34 @@ trait Infer extends Checkable { else toRepeated } + // empty list --> error, otherwise length == 1 + lazy val optionArgs = resTp.baseType(OptionClass).typeArgs + // empty list --> not a ProductN, otherwise product element types + def productArgs = getProductArgs(optionArgs.head) + val formals = - if (nbSubPats == 0 && booleanExtractor && !isUnapplySeq) Nil - else resTp.baseType(OptionClass).typeArgs match { - case optionTArg :: Nil => - def productArgs = getProductArgs(optionTArg) + // convert Seq[T] to the special repeated argument type + // so below we can use formalTypes to expand formals to correspond to the number of actuals + if (isUnapplySeq) { + if (optionArgs.nonEmpty) + productArgs match { + case Nil => List(seqToRepeatedChecked(optionArgs.head)) + case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp) + } + else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]") + } else { + if (booleanExtractor && nbSubPats == 0) Nil + else if (optionArgs.nonEmpty) if (nbSubPats == 1) { - if (isUnapplySeq) List(seqToRepeatedChecked(optionTArg)) - else { - val productArity = productArgs.size - if (productArity > 1 && settings.lint.value) - global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionTArg}") - List(optionTArg) - } + val productArity = productArgs.size + if (productArity > 1 && settings.lint.value) + global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}") + optionArgs } // TODO: update spec to reflect we allow any ProductN, not just TupleN - else productArgs match { - case Nil if isUnapplySeq => List(seqToRepeatedChecked(optionTArg)) - case tps if isUnapplySeq => tps.init :+ seqToRepeatedChecked(tps.last) - case tps => tps - } - case _ => - if (isUnapplySeq) - throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.owner+unappSym.owner.locationString} not in {Option[_], Some[_]}") - else - throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.owner+unappSym.owner.locationString} not in {Boolean, Option[_], Some[_]}") + else productArgs + else + throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean") } // for unapplySeq, replace last vararg by as many instances as required by nbSubPats diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala index b0745b4c09..59ced09451 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala @@ -800,7 +800,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL protected lazy val rawSubPatTypes = if (resultInMonad.typeSymbol eq UnitClass) Nil - else if(nbSubPats == 1) List(resultInMonad) + else if(!isSeq && nbSubPats == 1) List(resultInMonad) else getProductArgs(resultInMonad) match { case Nil => List(resultInMonad) case x => x diff --git a/test/files/run/t7039.check b/test/files/run/t7039.check new file mode 100644 index 0000000000..954906040f --- /dev/null +++ b/test/files/run/t7039.check @@ -0,0 +1 @@ +Matched! diff --git a/test/files/run/t7039.scala b/test/files/run/t7039.scala new file mode 100644 index 0000000000..475c4ae267 --- /dev/null +++ b/test/files/run/t7039.scala @@ -0,0 +1,11 @@ +object UnapplySeqTest { + def unapplySeq(any: Any): Option[(Int, Seq[Int])] = Some((5, List(1))) +} + +object Test extends App { + null match { + case UnapplySeqTest(5) => println("uh-oh") + case UnapplySeqTest(5, 1) => println("Matched!") // compiles + case UnapplySeqTest(5, xs @ _*) => println("toooo long: "+ (xs: Seq[Int])) + } +} \ No newline at end of file -- cgit v1.2.3 From 4ed88363f6867e5b4ebedfde10b88c0167f24f23 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 26 Jan 2013 10:37:12 +0100 Subject: [backport] SI-6482, lost bounds in extension methods. Squashed commit of the following: commit 5c156185306ba797c0443d9dccae0ae7ce462a1f Author: Paul Phillips Date: Sat Oct 6 15:42:50 2012 -0700 A little more housecleaning in ExtensionMethods. The only real contribution is readability. (cherry picked from commit 61f12faacaaccf366f9211ba6493fb042a91f1d2) Conflicts: src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala commit 79f443edf584745d614e24fb9ca6644c6b18d439 Author: Paul Phillips Date: Sat Oct 6 14:22:19 2012 -0700 Incorporated pull request feedback. (cherry picked from commit 153ccb4757718cceb219988f30381f73362e6075) commit 707f580b0cdcb01e27ca4c76991dea427945b5bd Author: Paul Phillips Date: Sat Oct 6 10:20:45 2012 -0700 Fix for SI-6482, lost bounds in extension methods. That was a good one. How to create a new method with type parameters from multiple sources, herein. (cherry picked from commit ff9f60f420c090b6716c927ab0359b082f2299de) commit 8889c7a13f74bc175e48aa2209549089a974c2af Author: Paul Phillips Date: Fri Oct 5 22:19:52 2012 -0700 Responded to comment about how many isCoercibles there are. I make the case that there is only one. (cherry picked from commit 883f1ac88dd7cec5882d42d6b48d7f267d1f6e00) --- .../tools/nsc/transform/ExtensionMethods.scala | 189 +++++++++++++-------- .../scala/tools/nsc/typechecker/Infer.scala | 11 ++ .../scala/reflect/internal/Definitions.scala | 11 ++ test/files/pos/t6482.scala | 11 ++ 4 files changed, 150 insertions(+), 72 deletions(-) create mode 100644 test/files/pos/t6482.scala diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index c7ca239fa9..bc54054028 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -91,39 +91,42 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { matching.head } + /** Recognize a MethodType which represents an extension method. + * + * It may have a curried parameter list with the `$this` alone in the first + * parameter list, in which case that parameter list is dropped. Or, since + * the curried lists disappear during uncurry, it may have a single parameter + * list with `$this` as the first parameter, in which case that parameter is + * removed from the list. + */ + object ExtensionMethodType { + def unapply(tp: Type) = tp match { + case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF => + Some((thiz, if (rest.isEmpty) restpe else MethodType(rest, restpe) )) + case _ => + None + } + } + /** This method removes the `$this` argument from the parameter list a method. * * A method may be a `PolyType`, in which case we tear out the `$this` and the class - * type params from its nested `MethodType`. - * It may be a `MethodType`, either with a curried parameter list in which the first argument - * is a `$this` - we just return the rest of the list. - * This means that the corresponding symbol was generated during `extmethods`. - * - * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list. - * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards, - * for instance, during `specialize`. - * In this case, the first argument is `$this` and we just get rid of it. + * type params from its nested `MethodType`. Or it may be a MethodType, as + * described at the ExtensionMethodType extractor. */ private def normalize(stpe: Type, clazz: Symbol): Type = stpe match { case PolyType(tparams, restpe) => - // Split the type parameters of the extension method into two groups, - // corresponding the to class and method type parameters. - val numClassParams = clazz.typeParams.length - val methTParams = tparams dropRight numClassParams - val classTParams = tparams takeRight numClassParams - - GenPolyType(methTParams, - normalize(restpe.substSym(classTParams, clazz.typeParams), clazz)) - case MethodType(List(thiz), restpe) if thiz.name == nme.SELF => - restpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil) - case MethodType(thiz :: params, restpe) => - MethodType(params, restpe) + // method type parameters, class type parameters + val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length) + GenPolyType(mtparams, + normalize(restpe.substSym(ctparams, clazz.typeParams), clazz)) + case ExtensionMethodType(thiz, etpe) => + etpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil) case _ => stpe } class Extender(unit: CompilationUnit) extends TypingTransformer(unit) { - private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]() def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit = @@ -134,31 +137,54 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed) } + /** We will need to clone the info of the original method (which obtains clones + * of the method type parameters), clone the type parameters of the value class, + * and create a new polymethod with the union of all those type parameters, with + * their infos adjusted to be consistent with their new home. Example: + * + * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal { + * def baz[B >: A](x: B): List[B] = x :: xs + * // baz has to be transformed into this extension method, where + * // A is cloned from class Foo and B is cloned from method baz: + * // def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B] + * } + * + * TODO: factor out the logic for consolidating type parameters from a class + * and a method for re-use elsewhere, because nobody will get this right without + * some higher level facilities. + */ def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = { - // No variance for method type parameters - var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) - val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK)) + val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth + // Start with the class type parameters - clones will be method type parameters + // so must drop their variance. + val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) + + val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*) val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType - def transform(clonedType: Type): Type = clonedType match { - case MethodType(params, restpe) => - // I assume it was a bug that this was dropping params... [Martin]: No, it wasn't; it's curried. - MethodType(List(thisParam), clonedType) - case NullaryMethodType(restpe) => - MethodType(List(thisParam), restpe) - } - val GenPolyType(tparams, restpe) = origInfo cloneInfo extensionMeth - val selfParamSingletonType = singleType(currentOwner.companionModule.thisType, thisParam) - GenPolyType( - tparams ::: newTypeParams, - transform(restpe) substThisAndSym (clazz, selfParamSingletonType, clazz.typeParams, newTypeParams) - ) - } + val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult)) + val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam) - private def allParams(tpe: Type): List[Symbol] = tpe match { - case MethodType(params, res) => params ::: allParams(res) - case _ => List() - } + def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass) + def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass) + + // We can't substitute symbols on the entire polytype because we + // need to modify the bounds of the cloned type parameters, but we + // don't want to substitute for the cloned type parameters themselves. + val tparams = tparamsFromMethod ::: tparamsFromClass + GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType)) + // For reference, calling fix on the GenPolyType plays out like this: + // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966] + // do not conform to method extension$baz#16148's type parameter bounds + // + // And the difference is visible here. See how B is bounded from below by A#16149 + // in both cases, but in the failing case, the other type parameter has turned into + // a different A. (What is that A? It is a clone of the original A created in + // SubstMap during the call to substSym, but I am not clear on all the particulars.) + // + // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154] + // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151] + } override def transform(tree: Tree): Tree = { tree match { case Template(_, _, _) => @@ -173,37 +199,56 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { super.transform(tree) } else tree case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension => - val companion = currentOwner.companionModule - val origMeth = tree.symbol - val extensionName = extensionNames(origMeth).head - val extensionMeth = companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) - .setAnnotations(origMeth.annotations) - companion.info.decls.enter(extensionMeth) - val newInfo = extensionMethInfo(extensionMeth, origMeth.info, currentOwner) + val origMeth = tree.symbol + val origThis = currentOwner + val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params + val origParams = vparamss.flatten map (_.symbol) + val companion = origThis.companionModule + + def makeExtensionMethodSymbol = { + val extensionName = extensionNames(origMeth).head + val extensionMeth = ( + companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL) + setAnnotations origMeth.annotations + ) + companion.info.decls.enter(extensionMeth) + } + + val extensionMeth = makeExtensionMethodSymbol + val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis) extensionMeth setInfo newInfo - log("Value class %s spawns extension method.\n Old: %s\n New: %s".format( - currentOwner, - origMeth.defString, - extensionMeth.defString)) // extensionMeth.defStringSeenAs(origInfo - - def thisParamRef = gen.mkAttributedStableRef(extensionMeth.info.params.head setPos extensionMeth.pos) - val GenPolyType(extensionTpeParams, extensionMono) = extensionMeth.info - val origTpeParams = (tparams map (_.symbol)) ::: currentOwner.typeParams - val extensionBody = rhs + + log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}") + + val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo + val extensionParams = allParameters(extensionMono) + val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos) + + val extensionBody = ( + rhs .substituteSymbols(origTpeParams, extensionTpeParams) - .substituteSymbols(vparamss.flatten map (_.symbol), allParams(extensionMono).tail) - .substituteThis(currentOwner, thisParamRef) - .changeOwner((origMeth, extensionMeth)) - extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) } - val extensionCallPrefix = Apply( - gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)), - List(This(currentOwner))) - val extensionCall = atOwner(origMeth) { - localTyper.typedPos(rhs.pos) { - gen.mkForwarder(extensionCallPrefix, mmap(vparamss)(_.symbol)) - } - } - deriveDefDef(tree)(_ => extensionCall) + .substituteSymbols(origParams, extensionParams) + .substituteThis(origThis, extensionThis) + .changeOwner(origMeth -> extensionMeth) + ) + + // Record the extension method ( FIXME: because... ? ) + extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody)) + + // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this) + // which leaves the actual argument application for extensionCall. + val sel = Select(gen.mkAttributedRef(companion), extensionMeth) + val targs = origTpeParams map (_.tpeHK) + val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil) + + // Apply all the argument lists. + deriveDefDef(tree)(_ => + atOwner(origMeth)( + localTyper.typedPos(rhs.pos)( + gen.mkForwarder(callPrefix, mmap(vparamss)(_.symbol)) + ) + ) + ) case _ => super.transform(tree) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 581f9f3bfa..7dddd320ef 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -411,8 +411,19 @@ trait Infer extends Checkable { /** Like weakly compatible but don't apply any implicit conversions yet. * Used when comparing the result type of a method with its prototype. + * * [Martin] I think Infer is also created by Erasure, with the default * implementation of isCoercible + * [Paulp] (Assuming the above must refer to my comment on isCoercible) + * Nope, I examined every occurrence of Inferencer in trunk. It + * appears twice as a self-type, once at its definition, and once + * where it is instantiated in Typers. There are no others. + * + % ack -A0 -B0 --no-filename '\bInferencer\b' src + self: Inferencer => + self: Inferencer => + class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable { + val infer = new Inferencer(context0) { */ def isConservativelyCompatible(tp: Type, pt: Type): Boolean = context.withImplicitsDisabled(isWeaklyCompatible(tp, pt)) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 4269b65297..8f451c207a 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -671,6 +671,11 @@ trait Definitions extends api.StandardDefinitions { case _ => Nil } + def dropNullaryMethod(tp: Type) = tp match { + case NullaryMethodType(restpe) => restpe + case _ => tp + } + def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match { case RefinedType(p :: _, _) => p.normalize case tp => tp @@ -864,6 +869,12 @@ trait Definitions extends api.StandardDefinitions { removeRedundantObjects(parents) } + /** Flatten curried parameter lists of a method type. */ + def allParameters(tpe: Type): List[Symbol] = tpe match { + case MethodType(params, res) => params ::: allParameters(res) + case _ => Nil + } + def typeStringNoPackage(tp: Type) = "" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "." diff --git a/test/files/pos/t6482.scala b/test/files/pos/t6482.scala new file mode 100644 index 0000000000..24ea38e519 --- /dev/null +++ b/test/files/pos/t6482.scala @@ -0,0 +1,11 @@ +final class TraversableOnceOps[+A](val collection: TraversableOnce[A]) extends AnyVal { + def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] = + if (collection.isEmpty) None else Some(collection.reduceLeft[B](op)) +} +// error: type arguments [B] do not conform to method reduceLeft's type parameter bounds [B >: A] +// if (collection.isEmpty) None else Some(collection.reduceLeft[B](op)) +// ^ + +class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal { + def baz[B >: A](x: B): List[B] = x :: xs +} -- cgit v1.2.3 From 374c912a1440193b06fc6fd74b39063949b2c086 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 26 Jan 2013 10:43:48 +0100 Subject: SI-7022 Additional test case for value class w. bounds As reported against 2.10.0, and as fixed by SI-6482, which was backported in the previous commit. --- test/files/pos/t7022.scala | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 test/files/pos/t7022.scala diff --git a/test/files/pos/t7022.scala b/test/files/pos/t7022.scala new file mode 100644 index 0000000000..0609e2d250 --- /dev/null +++ b/test/files/pos/t7022.scala @@ -0,0 +1,9 @@ +class Catch[+T] { + def either[U >: T](body: => U): Either[Throwable, U] = ??? +} + +object Test { + implicit class RichCatch[T](val c: Catch[T]) extends AnyVal { + def validation[U >: T](u: => U): Either[Throwable, U] = c.either(u) + } +} -- cgit v1.2.3 From 3af838c55674c0d03e8849d8c756d6ad56c537db Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 28 Jan 2013 14:10:48 +0100 Subject: SI-7033 Be symful when creating factory methods. Implicit class factory methods were synthesizing the reference to the class as `Ident(classDef.name)`, which was unhygienic in case of `implicit class X[X]`. To use symbols without causing a cycle, I switched from `REF(symbol)` to `Ident(symbol)`. The former calls into: at scala.reflect.internal.TreeGen.mkAttributedSelect(TreeGen.scala:184) at scala.reflect.internal.TreeGen.mkAttributedRef(TreeGen.scala:124) at scala.reflect.internal.TreeGen.mkAttributedRef(TreeGen.scala:130) at scala.tools.nsc.ast.TreeDSL$CODE$.REF(TreeDSL.scala:307) which forces the info of enclosing module and forms a cycle. --- .../scala/tools/nsc/typechecker/MethodSynthesis.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Unapplies.scala | 11 ++++++----- test/files/pos/t7033.scala | 15 +++++++++++++++ 3 files changed, 22 insertions(+), 6 deletions(-) create mode 100644 test/files/pos/t7033.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index acc4f7ff67..b1cf93a879 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -389,7 +389,7 @@ trait MethodSynthesis { result } def derivedTree: DefDef = - factoryMeth(mods & flagsMask | flagsExtra, name, tree, symbolic = false) + factoryMeth(mods & flagsMask | flagsExtra, name, tree) def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC def flagsMask: Long = AccessFlags def name: TermName = tree.name.toTermName diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index 5782d7bbca..3e4e0f49d7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -79,8 +79,9 @@ trait Unapplies extends ast.TreeDSL private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus - private def classType(cdef: ClassDef, tparams: List[TypeDef], symbolic: Boolean = true): Tree = { - val tycon = if (symbolic) REF(cdef.symbol) else Ident(cdef.name) + private def classType(cdef: ClassDef, tparams: List[TypeDef]): Tree = { + // SI-7033 Unattributed to avoid forcing `cdef.symbol.info`. + val tycon = Ident(cdef.symbol) if (tparams.isEmpty) tycon else AppliedTypeTree(tycon, tparams map toIdent) } @@ -133,10 +134,10 @@ trait Unapplies extends ast.TreeDSL /** The apply method corresponding to a case class */ - def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef, symbolic: Boolean): DefDef = { + def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = { val tparams = cdef.tparams map copyUntypedInvariant val cparamss = constrParamss(cdef) - def classtpe = classType(cdef, tparams, symbolic) + def classtpe = classType(cdef, tparams) atPos(cdef.pos.focus)( DefDef(mods, name, tparams, cparamss, classtpe, New(classtpe, mmap(cparamss)(gen.paramToArg))) @@ -145,7 +146,7 @@ trait Unapplies extends ast.TreeDSL /** The apply method corresponding to a case class */ - def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef, symbolic = true) + def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef) /** The unapply method corresponding to a case class */ diff --git a/test/files/pos/t7033.scala b/test/files/pos/t7033.scala new file mode 100644 index 0000000000..a4d256673b --- /dev/null +++ b/test/files/pos/t7033.scala @@ -0,0 +1,15 @@ +import language.higherKinds +object Wrap { + implicit class X[X](val a: X) + + X[Int](0) +} + +class Wrap { + implicit class Y[Y](val a: Y) + Y[Int](0) + implicit class Z[Z[_]](val a: Z[Wrap.this.Z[Z]]) + Z[List](List(new Z[List](null))) +} + +case class X[X](val a: X) -- cgit v1.2.3 From c7d489e21f234bf1a2ea04c6b68990c53b5b387d Mon Sep 17 00:00:00 2001 From: James Iry Date: Sat, 2 Feb 2013 09:27:21 -0800 Subject: SI-5313 Test clobbers on the back edge of a loop I realized I was missing a test case for a local store early in a loop that was unused but turned out to be a clobber of a store later in the loop. --- test/files/run/t5313.check | 2 ++ test/files/run/t5313.scala | 10 ++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/test/files/run/t5313.check b/test/files/run/t5313.check index aa30c0efa5..7a48b2b711 100644 --- a/test/files/run/t5313.check +++ b/test/files/run/t5313.check @@ -8,3 +8,5 @@ STORE_LOCAL(variable kept4) STORE_LOCAL(variable kept4) STORE_LOCAL(variable kept5) STORE_LOCAL(variable kept5) +STORE_LOCAL(variable kept6) +STORE_LOCAL(variable kept6) diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala index 64009e29af..7da8726a1f 100644 --- a/test/files/run/t5313.scala +++ b/test/files/run/t5313.scala @@ -7,7 +7,7 @@ object Test extends IcodeTest { override def code = """class Foo { - def foo = true + def randomBoolean = util.Random.nextInt % 2 == 0 def bar = { var kept1 = new Object val result = new java.lang.ref.WeakReference(kept1) @@ -19,7 +19,7 @@ object Test extends IcodeTest { var erased4 = erased2 // and this val erased5 = erased4 // and this var kept2: Object = new Object // ultimately can't be eliminated - while(foo) { + while(randomBoolean) { val kept3 = kept2 kept2 = null // this can't, because it clobbers kept2, which is used erased4 = null // safe to eliminate @@ -36,6 +36,12 @@ object Test extends IcodeTest { kept5 = null // can't eliminate it's a clobber and it's used print(kept5) kept5 = null // can eliminate because we don't care about clobbers of nulls + while(randomBoolean) { + var kept6: AnyRef = null // not used, but have to keep because it clobbers the next used store + // on the back edge of the loop + kept6 = new Object // used + println(kept6) + } result } }""".stripMargin -- cgit v1.2.3 From 4fda83f8b02101a37871eadda9a13c70fd22bd2d Mon Sep 17 00:00:00 2001 From: James Iry Date: Sat, 2 Feb 2013 09:53:59 -0800 Subject: SI-5313 Minor code cleanup for store clobbering Used withDefault on the localStores map to simplify the code. Added a type alias for the (BasicBlock, Int) tuple type that was used throughout the code to represent the location of an instruction. --- .../nsc/backend/opt/DeadCodeElimination.scala | 28 ++++++++++------------ 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala index 53270c0651..d4a6d18c60 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala @@ -18,6 +18,9 @@ abstract class DeadCodeElimination extends SubComponent { import icodes.opcodes._ import definitions.RuntimePackage + /** The block and index where an instruction is located */ + type InstrLoc = (BasicBlock, Int) + val phaseName = "dce" /** Create a new phase */ @@ -55,10 +58,10 @@ abstract class DeadCodeElimination extends SubComponent { val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis; /** Use-def chain: give the reaching definitions at the beginning of given instruction. */ - var defs: immutable.Map[(BasicBlock, Int), immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty + var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty /** Useful instructions which have not been scanned yet. */ - val worklist: mutable.Set[(BasicBlock, Int)] = new mutable.LinkedHashSet + val worklist: mutable.Set[InstrLoc] = new mutable.LinkedHashSet /** what instructions have been marked as useful? */ val useful: mutable.Map[BasicBlock, mutable.BitSet] = perRunCaches.newMap() @@ -67,16 +70,16 @@ abstract class DeadCodeElimination extends SubComponent { var accessedLocals: List[Local] = Nil /** Map from a local and a basic block to the instructions that store to that local in that basic block */ - val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() + val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()} /** Stores that clobber previous stores to array or ref locals. See SI-5313 */ - val clobbers = mutable.Set[(BasicBlock, Int)]() + val clobbers = mutable.Set[InstrLoc]() /** the current method. */ var method: IMethod = _ /** Map instructions who have a drop on some control path, to that DROP instruction. */ - val dropOf: mutable.Map[(BasicBlock, Int), List[(BasicBlock, Int)]] = perRunCaches.newMap() + val dropOf: mutable.Map[InstrLoc, List[InstrLoc]] = perRunCaches.newMap() def dieCodeDie(m: IMethod) { if (m.hasCode) { @@ -135,14 +138,9 @@ abstract class DeadCodeElimination extends SubComponent { if (necessary) worklist += ((bb, idx)) // add it to the localStores map val key = (l, bb) - val set = if(localStores contains key) - localStores(key) - else { - val set = new mutable.BitSet - localStores.put(key, set) - set - } + val set = localStores(key) set += idx + localStores(key) = set case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) | THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) | @@ -345,8 +343,8 @@ abstract class DeadCodeElimination extends SubComponent { } } - private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = { - val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap + private def computeCompensations(m: IMethod): mutable.Map[InstrLoc, List[Instruction]] = { + val compensations: mutable.Map[InstrLoc, List[Instruction]] = new mutable.HashMap m foreachBlock { bb => assert(bb.closed, "Open block in computeCompensations") @@ -385,7 +383,7 @@ abstract class DeadCodeElimination extends SubComponent { res } - private def findInstruction(bb: BasicBlock, i: Instruction): (BasicBlock, Int) = { + private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = { for (b <- linearizer.linearizeAt(method, bb)) { val idx = b.toList indexWhere (_ eq i) if (idx != -1) -- cgit v1.2.3 From 3f0bce95fffcea60b3cbf8bc9e0c9a410f732cde Mon Sep 17 00:00:00 2001 From: Kato Kazuyoshi Date: Tue, 8 Jan 2013 01:22:34 +0900 Subject: SI-6017 Generate Scaladoc's index links in Scala side There is no reason to do it in JavaScript. --- .../scala/tools/nsc/doc/html/page/Index.scala | 14 +++++++++++++- .../scala/tools/nsc/doc/html/resource/lib/index.js | 19 +------------------ test/scaladoc/run/SI-6017.scala | 3 +++ 3 files changed, 17 insertions(+), 19 deletions(-) diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala index 86407fb9a3..2f83741d4e 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala @@ -48,10 +48,22 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage { + def letters: NodeSeq = { + val xs = index.firstLetterIndex.keys.toSeq + xs.sorted map { + c => { + if (c == '_') '#' else c.toUpper + } + } + } + def browser =
-
+
+
+
{ letters }
+
{ def packageElem(pack: model.Package): NodeSeq = { diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js index 1323a06c01..70073b272a 100644 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js @@ -335,8 +335,7 @@ function keyboardScrolldownLeftPane() { /* Configures the text filter */ function configureTextFilter() { scheduler.add("init", function() { - $("#filter").append("
"); - printAlphabet(); + $("#textfilter").append(""); var input = $("#textfilter input"); resizeFilterBlock(); input.bind('keyup', function(event) { @@ -532,19 +531,3 @@ function kindFilterSync() { function resizeFilterBlock() { $("#tpl").css("top", $("#filter").outerHeight(true)); } - -function printAlphabet() { - var html = '#'; - var c; - for (c = 'a'; c <= 'z'; c = String.fromCharCode(c.charCodeAt(0) + 1)) { - html += [ - '', - c.toUpperCase(), - '' - ].join(''); - } - $("#filter").append('
' + html + '
'); -} - diff --git a/test/scaladoc/run/SI-6017.scala b/test/scaladoc/run/SI-6017.scala index a4950e48d8..e729dca67d 100644 --- a/test/scaladoc/run/SI-6017.scala +++ b/test/scaladoc/run/SI-6017.scala @@ -16,6 +16,9 @@ object Test extends ScaladocModelTest { val index = IndexModelFactory.makeIndex(universe) // Because "STAR" and "Star" are different assert(index.firstLetterIndex('s').keys.toSeq.length == 2) + + val indexPage = new Index(universe, index) + assert(indexPage.letters.length == 1) } case _ => assert(false) } -- cgit v1.2.3 From 0e8d8c735e9a4590d3f74b549b8453f46335709a Mon Sep 17 00:00:00 2001 From: Kato Kazuyoshi Date: Fri, 1 Feb 2013 22:53:45 +0900 Subject: SI-6017 Scaladoc: Show all letters without dangling links Use instead of if there is no page on the letter. --- .../scala/tools/nsc/doc/html/page/Index.scala | 20 +++++++++++++------- .../scala/tools/nsc/doc/html/resource/lib/index.css | 6 +++++- test/scaladoc/run/SI-6017.scala | 4 +++- 3 files changed, 21 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala index 2f83741d4e..c76bdc58d9 100644 --- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala +++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala @@ -48,14 +48,20 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
- def letters: NodeSeq = { - val xs = index.firstLetterIndex.keys.toSeq - xs.sorted map { - c => { - if (c == '_') '#' else c.toUpper - } + def letters: NodeSeq = + '_' +: ('a' to 'z') map { + char => { + val label = if (char == '_') '#' else char.toUpper + + index.firstLetterIndex.get(char) match { + case Some(_) => + { + label + } + case None => { label } + } + } } - } def browser =
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css index 2a8f9b570a..55fb370a41 100644 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css +++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css @@ -206,7 +206,7 @@ h1 { border-right:0; } -#letters > a { +#letters > a, #letters > span { /* font-family: monospace;*/ color: #858484; font-weight: bold; @@ -214,6 +214,10 @@ h1 { text-shadow: #ffffff 0 1px 0; padding-right: 2px; } + +#letters > span { + color: #bbb; +} #tpl { display: block; diff --git a/test/scaladoc/run/SI-6017.scala b/test/scaladoc/run/SI-6017.scala index e729dca67d..9951534c6d 100644 --- a/test/scaladoc/run/SI-6017.scala +++ b/test/scaladoc/run/SI-6017.scala @@ -18,7 +18,9 @@ object Test extends ScaladocModelTest { assert(index.firstLetterIndex('s').keys.toSeq.length == 2) val indexPage = new Index(universe, index) - assert(indexPage.letters.length == 1) + val letters = indexPage.letters + assert(letters.length > 1) + assert(letters(0).toString == "#") } case _ => assert(false) } -- cgit v1.2.3 From bc01614c5349c30bc74ae85370a059422c793175 Mon Sep 17 00:00:00 2001 From: "Paolo G. Giarrusso" Date: Mon, 4 Feb 2013 07:08:08 +0100 Subject: Revert "SI-6422: add missing Fractional and Integral alias in scala package" This reverts commit c6866a28faf67cd2e455f9a0a829859a73e38819 because it adds two members to the API and so breaks forward binary compatibility. Attention: this is only intended for 2.10.x, not for 2.11. --- src/library/scala/package.scala | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala index d3f8df9110..84f6f0be9c 100644 --- a/src/library/scala/package.scala +++ b/src/library/scala/package.scala @@ -95,10 +95,7 @@ package object scala { val Equiv = scala.math.Equiv type Fractional[T] = scala.math.Fractional[T] - val Fractional = scala.math.Fractional - type Integral[T] = scala.math.Integral[T] - val Integral = scala.math.Integral type Numeric[T] = scala.math.Numeric[T] val Numeric = scala.math.Numeric -- cgit v1.2.3 From 057417236845b689f29c221037e1d06d2b6e43bb Mon Sep 17 00:00:00 2001 From: James Iry Date: Mon, 4 Feb 2013 08:23:30 -0800 Subject: SI-5833 Fixes tail-of-Nil problem in RefinedType#normalizeImpl RefinedType#normalizeImpl was checking to see if the flattened list of parents had an empty tail then pulling the head if so. But if the list was empty then boom. This fix makes it check if the whole list has length 1 instead. Empty lists will flow through to the rest the logic which has no problems with Nil. --- src/reflect/scala/reflect/internal/Types.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 9d4bdab837..0dd98fb6ae 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1800,7 +1800,7 @@ trait Types extends api.Types { self: SymbolTable => // TODO see comments around def intersectionType and def merge def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) } val flattened = flatten(parents).distinct - if (decls.isEmpty && flattened.tail.isEmpty) { + if (decls.isEmpty && hasLength(flattened, 1)) { flattened.head } else if (flattened != parents) { refinedType(flattened, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition) @@ -3542,7 +3542,7 @@ trait Types extends api.Types { self: SymbolTable => if (phase.erasedTypes) if (parents.isEmpty) ObjectClass.tpe else parents.head else { - val clazz = owner.newRefinementClass(pos) // TODO: why were we passing in NoPosition instead of pos? + val clazz = owner.newRefinementClass(pos) val result = RefinedType(parents, decls, clazz) clazz.setInfo(result) result -- cgit v1.2.3 From b67f8e57f4381024c0fba109ebe11d4006472d83 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Feb 2013 19:53:32 +0100 Subject: [nomerge] SI-6667 Demote a new ambiguity error to a lint warning. In the interests of not breaking source compability. A few projects are relying on this bug. Should not be merged to master. --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 7 ++++++- test/files/neg/t6667.check | 3 ++- test/files/neg/t6667.flags | 1 + test/files/neg/t6667b.check | 3 ++- test/files/neg/t6667b.flags | 1 + 5 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/t6667.flags create mode 100644 test/files/neg/t6667b.flags diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index ec3a0a0ef7..d1cf9b1904 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1319,12 +1319,17 @@ trait Implicits { // `materializeImplicit` does some preprocessing for `pt` // is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`? - if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, false) + if (result.isFailure) result = searchImplicit(implicitsOfExpectedType, false) if (result.isFailure) { context.updateBuffer(previousErrs) if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart) } else { + if (wasAmbigious && settings.lint.value) + reporter.warning(tree.pos, + "Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. \n" + + previousErrs.map(_.errMsg).mkString("\n")) + if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart) if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits) } diff --git a/test/files/neg/t6667.check b/test/files/neg/t6667.check index 43313fa4fe..b04251d7c1 100644 --- a/test/files/neg/t6667.check +++ b/test/files/neg/t6667.check @@ -1,4 +1,5 @@ -t6667.scala:8: error: ambiguous implicit values: +t6667.scala:8: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. +ambiguous implicit values: both value inScope1 in object Test of type => C and value inScope2 in object Test of type => C match expected type C diff --git a/test/files/neg/t6667.flags b/test/files/neg/t6667.flags new file mode 100644 index 0000000000..6c1dd108ae --- /dev/null +++ b/test/files/neg/t6667.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint \ No newline at end of file diff --git a/test/files/neg/t6667b.check b/test/files/neg/t6667b.check index 99cea9a47c..5d56e776c3 100644 --- a/test/files/neg/t6667b.check +++ b/test/files/neg/t6667b.check @@ -4,7 +4,8 @@ t6667b.scala:16: error: ambiguous implicit values: match expected type Test.Box new Test() ^ -t6667b.scala:19: error: ambiguous implicit values: +t6667b.scala:19: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. +ambiguous implicit values: both value a in object Test of type => Test.Box and value b of type Test.Box match expected type Test.Box diff --git a/test/files/neg/t6667b.flags b/test/files/neg/t6667b.flags new file mode 100644 index 0000000000..6c1dd108ae --- /dev/null +++ b/test/files/neg/t6667b.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint \ No newline at end of file -- cgit v1.2.3 From 0bcdf71a96d6d0b6276801efbe49081f0ae7749d Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Mon, 4 Feb 2013 22:06:54 +0100 Subject: pullrequest feedback https://github.com/scala/scala/pull/2040 --- .../scala/tools/nsc/symtab/classfile/ClassfileParser.scala | 1 + src/reflect/scala/reflect/internal/AnnotationInfos.scala | 10 +++++----- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 2 +- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 7ac7b4d637..976f7e038b 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1042,6 +1042,7 @@ abstract class ClassfileParser { def parseExceptions(len: Int) { val nClasses = in.nextChar for (n <- 0 until nClasses) { + // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065) val cls = pool.getClassSymbol(in.nextChar.toInt) sym.addThrowsAnnotation(cls) } diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 29879f9806..032b45316e 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -33,14 +33,14 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => case ThrownException(exc) => exc } - def addThrowsAnnotation(excSym: Symbol): Self = { - val excTpe = if (excSym.isMonomorphicType) excSym.tpe else { - debuglog(s"Encountered polymorphic exception `${excSym.fullName}` while parsing class file.") + def addThrowsAnnotation(throwableSym: Symbol): Self = { + val throwableTpe = if (throwableSym.isMonomorphicType) throwableSym.tpe else { + debuglog(s"Encountered polymorphic exception `${throwableSym.fullName}` while parsing class file.") // in case we encounter polymorphic exception the best we can do is to convert that type to // monomorphic one by introducing existentials, see SI-7009 for details - existentialAbstraction(excSym.typeParams, excSym.tpe) + existentialAbstraction(throwableSym.typeParams, throwableSym.tpe) } - val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, excTpe), List(Literal(Constant(excTpe))), Nil) + val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil) withAnnotations(List(throwsAnn)) } diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 698b60b929..ea2fc4afe9 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -616,7 +616,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni */ private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) { sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList - // FIXME: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser + // SI-7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser val jexTpes = jann match { case jm: jMethod => jm.getExceptionTypes.toList case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList -- cgit v1.2.3 From 02dd4c974f33d137ea353a72e27efb70928fb378 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Mon, 4 Feb 2013 22:23:54 +0100 Subject: reflecting @throws defined in Scala code As per Jason's comment: How are Scala classes containing @throws annots treated? I can't figure out whether we pickle the annotation in addition to adding the exception to the signature. If we do, might we end up with duplicate annotations in runtime reflection? This warrants a test. See the context of the discussion here: https://github.com/scala/scala/pull/2040/files#r2874769. No, we won't end up with duplicates, because classes defined in Scala are loaded in a different completer. But I'll add a test - you can never have too many of those. --- test/files/run/t7008-scala-defined.check | 7 +++++++ test/files/run/t7008-scala-defined/Impls_Macros_2.scala | 12 ++++++++++++ .../ScalaClassWithCheckedExceptions_1.scala | 6 ++++++ test/files/run/t7008-scala-defined/Test_3.scala | 9 +++++++++ 4 files changed, 34 insertions(+) create mode 100644 test/files/run/t7008-scala-defined.check create mode 100644 test/files/run/t7008-scala-defined/Impls_Macros_2.scala create mode 100644 test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala create mode 100644 test/files/run/t7008-scala-defined/Test_3.scala diff --git a/test/files/run/t7008-scala-defined.check b/test/files/run/t7008-scala-defined.check new file mode 100644 index 0000000000..84ed62619e --- /dev/null +++ b/test/files/run/t7008-scala-defined.check @@ -0,0 +1,7 @@ +: List(throws[NullPointerException]("")) +bar: List(throws[E1]("")) +baz: List(throws[IllegalStateException]("")) +============= +: List(throws[NullPointerException]("")) +bar: List(throws[E1]("")) +baz: List(throws[IllegalStateException]("")) diff --git a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala new file mode 100644 index 0000000000..94fd99018e --- /dev/null +++ b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala @@ -0,0 +1,12 @@ +import language.experimental.macros +import scala.reflect.macros.Context + +object Macros { + def impl(c: Context) = { + val decls = c.typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList + val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL) + c.universe.reify(println(c.literal(s).splice)) + } + + def foo = macro impl +} \ No newline at end of file diff --git a/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala b/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala new file mode 100644 index 0000000000..7783c873ec --- /dev/null +++ b/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala @@ -0,0 +1,6 @@ +class ScalaClassWithCheckedExceptions_1[E1 <: Exception] @throws[NullPointerException]("") () { + @throws[E1]("") def bar() {} + @throws[IllegalStateException]("") def baz(x: Int) {} + // FIXME: SI-7066 + // @throws[E2]("") def foo[E2 <: Exception] {} +} \ No newline at end of file diff --git a/test/files/run/t7008-scala-defined/Test_3.scala b/test/files/run/t7008-scala-defined/Test_3.scala new file mode 100644 index 0000000000..03bb79d311 --- /dev/null +++ b/test/files/run/t7008-scala-defined/Test_3.scala @@ -0,0 +1,9 @@ +import scala.reflect.runtime.universe._ + +object Test extends App { + Macros.foo + println("=============") + + val decls = typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList + decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}")) +} \ No newline at end of file -- cgit v1.2.3