diff options
59 files changed, 1177 insertions, 502 deletions
diff --git a/src/compiler/scala/reflect/macros/contexts/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala index cd3db74016..c1ab17027f 100644 --- a/src/compiler/scala/reflect/macros/contexts/Typers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala @@ -46,9 +46,7 @@ trait Typers { universe.analyzer.inferImplicit(tree, viewTpe, true, callsiteTyper.context, silent, withMacrosDisabled, pos, (pos, msg) => throw TypecheckException(pos, msg)) } - def resetAllAttrs(tree: Tree): Tree = universe.resetAllAttrs(universe.duplicateAndKeepPositions(tree)) + def resetLocalAttrs(tree: Tree): Tree = universe.resetAttrs(universe.duplicateAndKeepPositions(tree)) - def resetLocalAttrs(tree: Tree): Tree = universe.resetLocalAttrs(universe.duplicateAndKeepPositions(tree)) - - def untypecheck(tree: Tree): Tree = universe.resetLocalAttrs(universe.duplicateAndKeepPositions(tree)) + def untypecheck(tree: Tree): Tree = resetLocalAttrs(tree) } diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala index ad0632f93e..6b0a0ee8d7 100644 --- a/src/compiler/scala/reflect/reify/Reifier.scala +++ b/src/compiler/scala/reflect/reify/Reifier.scala @@ -86,7 +86,7 @@ abstract class Reifier extends States throw new Error("reifee %s of type %s is not supported".format(reifee, if (reifee == null) "null" else reifee.getClass.toString)) } - // todo. why do we resetAllAttrs? + // todo. why do we reset attrs? // // typically we do some preprocessing before reification and // the code emitted/moved around during preprocessing is very hard to typecheck, so we leave it as it is @@ -109,19 +109,11 @@ abstract class Reifier extends States // // todo. this is a common problem with non-trivial macros in our current macro system // needs to be solved some day - // maybe try `resetLocalAttrs` once the dust settles - var importantSymbols = Set[Symbol]( - NothingClass, AnyClass, SingletonClass, PredefModule, ScalaRunTimeModule, TypeCreatorClass, TreeCreatorClass, MirrorClass, - ApiUniverseClass, JavaUniverseClass, ReflectRuntimePackage, runDefinitions.ReflectRuntimeCurrentMirror) - importantSymbols ++= importantSymbols map (_.companionSymbol) - importantSymbols ++= importantSymbols map (_.moduleClass) - importantSymbols ++= importantSymbols map (_.linkedClassOfClass) - def isImportantSymbol(sym: Symbol): Boolean = sym != null && sym != NoSymbol && importantSymbols(sym) - val untyped = resetAllAttrs(result, leaveAlone = { + // upd. a new hope: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ + val untyped = resetAttrs(result, leaveAlone = { case ValDef(_, u, _, _) if u == nme.UNIVERSE_SHORT => true case ValDef(_, m, _, _) if m == nme.MIRROR_SHORT => true case tree if symtab.syms contains tree.symbol => true - case tree if isImportantSymbol(tree.symbol) => true case _ => false }) diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index f6b3c42ca9..b082796757 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -140,7 +140,7 @@ trait GenTrees { if (sym == NoSymbol) { // this sometimes happens, e.g. for binds that don't have a body // or for untyped code generated during previous phases - // (see a comment in Reifiers about the latter, starting with "why do we resetAllAttrs?") + // (see a comment in Reifiers about the latter, starting with "why do we reset attrs?") mirrorCall(nme.Ident, reify(name)) } else if (!sym.isLocalToReifee) { diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 24bce0636d..7a7d4ac0b2 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -176,13 +176,24 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => } } - /** resets symbol and tpe fields in a tree, @see ResetAttrs - */ -// def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x } -// def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x } - - def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(false, leaveAlone).transform(x) - def resetLocalAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(true, leaveAlone).transform(x) + // Finally, noone resetAllAttrs it anymore, so I'm removing it from the compiler. + // Even though it's with great pleasure I'm doing that, I'll leave its body here to warn future generations about what happened in the past. + // + // So what actually happened in the past is that we used to have two flavors of resetAttrs: resetAllAttrs and resetLocalAttrs. + // resetAllAttrs destroyed all symbols and types in the tree in order to reset its state to something suitable for retypechecking + // and/or embedding into bigger trees / different lexical scopes. (Btw here's some background on why people would want to use + // reset attrs in the first place: https://groups.google.com/forum/#!topic/scala-internals/TtCTPlj_qcQ). + // + // However resetAllAttrs was more of a poison than of a treatment, because along with locally defined symbols that are the cause + // for almost every or maybe even every case of tree corruption, it erased external bindings that sometimes could not be restored. + // This is how we came up with resetLocalAttrs that left external bindings alone, and that was a big step forward. + // Then slowly but steadily we've evicted all usages of resetAllAttrs from our codebase in favor of resetLocalAttrs + // and have been living happily ever after. + // + // def resetAllAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(localOnly = false, leaveAlone).transform(x) + + /** @see ResetAttrs */ + def resetAttrs(x: Tree, leaveAlone: Tree => Boolean = null): Tree = new ResetAttrs(leaveAlone).transform(x) /** A transformer which resets symbol and tpe fields of all nodes in a given tree, * with special treatment of: @@ -193,7 +204,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => * * (bq:) This transformer has mutable state and should be discarded after use */ - private class ResetAttrs(localOnly: Boolean, leaveAlone: Tree => Boolean = null, keepLabels: Boolean = false) { + private class ResetAttrs(leaveAlone: Tree => Boolean = null) { // this used to be based on -Ydebug, but the need for logging in this code is so situational // that I've reverted to a hard-coded constant here. val debug = false @@ -277,29 +288,18 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => // vetoXXX local variables declared below describe the conditions under which we cannot erase symbols. // // The first reason to not erase symbols is the threat of non-idempotency (SI-5464). - // Here we take care of labels (SI-5562) and references to package classes (SI-5705). + // Here we take care of references to package classes (SI-5705). // There are other non-idempotencies, but they are not worked around yet. // - // The second reason has to do with the fact that resetAttrs itself has limited usefulness. - // - // First of all, why do we need resetAttrs? Gor one, it's absolutely required to move trees around. - // One cannot just take a typed tree from one lexical context and transplant it somewhere else. - // Most likely symbols defined by those trees will become borked and the compiler will blow up (SI-5797). - // To work around we just erase all symbols and types and then hope that we'll be able to correctly retypecheck. - // For ones who're not affected by scalac Stockholm syndrome, this might seem to be an extremely naive fix, but well... - // - // Of course, sometimes erasing everything won't work, because if a given identifier got resolved to something - // in one lexical scope, it can get resolved to something else. - // - // What do we do in these cases? Enter the workaround for the workaround: resetLocalAttrs, which only destroys - // locally defined symbols, but doesn't touch references to stuff declared outside of a given tree. - // That's what localOnly and vetoScope are for. + // The second reason has to do with the fact that resetAttrs needs to be less destructive. + // Erasing locally-defined symbols is useful to prevent tree corruption, but erasing external bindings is not, + // therefore we want to retain those bindings, especially given that restoring them can be impossible + // if we move these trees into lexical contexts different from their original locations. if (dupl.hasSymbol) { val sym = dupl.symbol - val vetoScope = localOnly && !(locals contains sym) - val vetoLabel = keepLabels && sym.isLabel + val vetoScope = !(locals contains sym) val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass - if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol + if (!(vetoScope || vetoThis)) dupl.symbol = NoSymbol } dupl.clearType() } @@ -308,10 +308,9 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => } def transform(x: Tree): Tree = { - if (localOnly) new MarkLocals().traverse(x) - if (localOnly && debug) { + if (debug) { assert(locals.size == orderedLocals.size) val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString EOL trace("locals (%d total): %n".format(orderedLocals.size))(msg) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index e5907e1a0f..5b7956a757 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -457,7 +457,9 @@ trait Contexts { self: Analyzer => c.prefix = prefixInChild c.enclClass = if (isTemplateOrPackage) c else enclClass c(ConstructorSuffix) = !isTemplateOrPackage && c(ConstructorSuffix) - c.enclMethod = if (isDefDef) c else enclMethod + + // SI-8245 `isLazy` need to skip lazy getters to ensure `return` binds to the right place + c.enclMethod = if (isDefDef && !owner.isLazy) c else enclMethod registerContext(c.asInstanceOf[analyzer.Context]) debuglog("[context] ++ " + c.unit + " / " + tree.summaryString) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 645f267a21..632e25aa2e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1205,7 +1205,7 @@ trait Namers extends MethodSynthesis { * flag. */ private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) { - val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetLocalAttrs(ddef.duplicate) + val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(ddef.duplicate) // having defs here is important to make sure that there's no sneaky tree sharing // in methods with multiple default parameters def rtparams = rtparams0.map(_.duplicate) @@ -1292,7 +1292,7 @@ trait Namers extends MethodSynthesis { return // fix #3649 (prevent crash in erroneous source code) } } - val ClassDef(_, _, rtparams, _) = resetLocalAttrs(cdef.duplicate) + val ClassDef(_, _, rtparams, _) = resetAttrs(cdef.duplicate) defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT))) nmr } diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 46ff98875f..6a4df415ae 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -173,7 +173,7 @@ trait NamesDefaults { self: Analyzer => // setSymbol below is important because the 'selected' function might be overloaded. by // assigning the correct method symbol, typedSelect will just assign the type. the reason // to still call 'typed' is to correctly infer singleton types, SI-5259. - val selectPos = + val selectPos = if(qual.pos.isRange && baseFun.pos.isRange) qual.pos.union(baseFun.pos).withStart(Math.min(qual.pos.end, baseFun.pos.end)) else baseFun.pos val f = blockTyper.typedOperator(Select(newQual, selected).setSymbol(baseFun1.symbol).setPos(selectPos)) @@ -287,7 +287,7 @@ trait NamesDefaults { self: Analyzer => } else { // TODO In 83c9c764b, we tried to a stable type here to fix SI-7234. But the resulting TypeTree over a - // singleton type without an original TypeTree fails to retypecheck after a resetLocalAttrs (SI-7516), + // singleton type without an original TypeTree fails to retypecheck after a resetAttrs (SI-7516), // which is important for (at least) macros. arg.tpe } @@ -310,7 +310,7 @@ trait NamesDefaults { self: Analyzer => new ChangeOwnerTraverser(context.owner, sym) traverse arg // fixes #4502 if (repeated) arg match { case WildcardStarArg(expr) => expr - case _ => blockTyper typed gen.mkSeqApply(resetLocalAttrs(arg)) + case _ => blockTyper typed gen.mkSeqApply(resetAttrs(arg)) } else arg } Some(atPos(body.pos)(ValDef(sym, body).setType(NoType))) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index f7684b93af..2125e281f0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1414,7 +1414,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case TypeRef(pre, sym, args) => tree match { case tt: TypeTree if tt.original == null => // SI-7783 don't warn about inferred types - // FIXME: reconcile this check with one in resetAllAttrs + // FIXME: reconcile this check with one in resetAttrs case _ => checkUndesiredProperties(sym, tree.pos) } if(sym.isJavaDefined) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 101e1526fe..10fe530445 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -832,7 +832,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else tpr.typed(withImplicitArgs, mode, pt) } orElse { _ => - val resetTree = resetLocalAttrs(original) + val resetTree = resetAttrs(original) debuglog(s"fallback on implicits: ${tree}/$resetTree") val tree1 = typed(resetTree, mode) // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that @@ -2307,7 +2307,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper context.scope.unlink(ldef.symbol) val sym2 = namer.enterInScope( context.owner.newLabel(ldef.name, ldef.pos) setInfo MethodType(List(), restpe)) - val rhs2 = typed(resetAllAttrs(ldef.rhs), restpe) + val LabelDef(_, _, rhs1) = resetAttrs(ldef) + val rhs2 = typed(rhs1, restpe) ldef.params foreach (param => param setType param.symbol.tpe) deriveLabelDef(ldef)(_ => rhs2) setSymbol sym2 setType restpe } @@ -2589,7 +2590,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // // Well behaved trees satisfy the property: // - // typed(tree) == typed(resetLocalAttrs(typed(tree)) + // typed(tree) == typed(resetAttrs(typed(tree)) // // Trees constructed without low-level symbol manipulation get this for free; // references to local symbols are cleared by `ResetAttrs`, but bind to the diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index ffac29b4b8..cc2d9141ce 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -52,13 +52,13 @@ trait Unapplies extends ast.TreeDSL { } private def constrParamss(cdef: ClassDef): List[List[ValDef]] = { - val ClassDef(_, _, _, Template(_, _, body)) = resetLocalAttrs(cdef.duplicate) + val ClassDef(_, _, _, Template(_, _, body)) = resetAttrs(cdef.duplicate) val DefDef(_, _, _, vparamss, _, _) = treeInfo firstConstructor body vparamss } private def constrTparamsInvariant(cdef: ClassDef): List[TypeDef] = { - val ClassDef(_, _, tparams, _) = resetLocalAttrs(cdef.duplicate) + val ClassDef(_, _, tparams, _) = resetAttrs(cdef.duplicate) val tparamsInvariant = tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT))) tparamsInvariant } diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala index bb0bbd79a3..8630ecf69e 100644 --- a/src/compiler/scala/tools/reflect/FastTrack.scala +++ b/src/compiler/scala/tools/reflect/FastTrack.scala @@ -20,8 +20,8 @@ trait FastTrack { private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers - private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = - new { val c: c0.type = c0 } with MacroImplementations + private implicit def context2macroimplementations(c0: MacroContext): FormatInterpolator { val c: c0.type } = + new { val c: c0.type = c0 } with FormatInterpolator private implicit def context2quasiquote(c0: MacroContext): QuasiquoteImpls { val c: c0.type } = new { val c: c0.type = c0 } with QuasiquoteImpls private def makeBlackbox(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) = @@ -48,7 +48,7 @@ trait FastTrack { makeBlackbox( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) }, makeBlackbox( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) }, makeBlackbox( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }, - makeBlackbox( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) }, + makeBlackbox( StringContext_f) { case _ => _.interpolate }, makeBlackbox(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree }, makeWhitebox( QuasiquoteClass_api_apply) { case _ => _.expandQuasiquote }, makeWhitebox(QuasiquoteClass_api_unapply) { case _ => _.expandQuasiquote } diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala new file mode 100644 index 0000000000..d5e674ebae --- /dev/null +++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala @@ -0,0 +1,329 @@ +package scala.tools.reflect + +import scala.reflect.macros.runtime.Context +import scala.collection.mutable.{ ListBuffer, Stack } +import scala.reflect.internal.util.Position +import scala.PartialFunction.cond +import scala.util.matching.Regex.Match + +import java.util.{ Formatter, Formattable, IllegalFormatException } + +abstract class FormatInterpolator { + val c: Context + val global: c.universe.type = c.universe + + import c.universe.{ Match => _, _ } + import definitions._ + import treeInfo.Applied + + @inline private def truly(body: => Unit): Boolean = { body ; true } + @inline private def falsely(body: => Unit): Boolean = { body ; false } + + private def fail(msg: String) = c.abort(c.enclosingPosition, msg) + private def bail(msg: String) = global.abort(msg) + + def interpolate: Tree = c.macroApplication match { + //case q"$_(..$parts).f(..$args)" => + case Applied(Select(Apply(_, parts), _), _, argss) => + val args = argss.flatten + def badlyInvoked = (parts.length != args.length + 1) && truly { + def because(s: String) = s"too $s arguments for interpolated string" + val (p, msg) = + if (parts.length == 0) (c.prefix.tree.pos, "there are no parts") + else if (args.length + 1 < parts.length) + (if (args.isEmpty) c.enclosingPosition else args.last.pos, because("few")) + else (args(parts.length-1).pos, because("many")) + c.abort(p, msg) + } + if (badlyInvoked) c.macroApplication else interpolated(parts, args) + case other => + bail(s"Unexpected application ${showRaw(other)}") + other + } + + /** Every part except the first must begin with a conversion for + * the arg that preceded it. If the conversion is missing, "%s" + * is inserted. + * + * In any other position, the only permissible conversions are + * the literals (%% and %n) or an index reference (%1$ or %<). + * + * A conversion specifier has the form: + * + * [index$][flags][width][.precision]conversion + * + * 1) "...${smth}" => okay, equivalent to "...${smth}%s" + * 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah" + * 3) "...${smth}%" => error + * 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n" + * 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%" + * 6) "...${smth}[%legalJavaConversion]" => okay* + * 7) "...${smth}[%illegalJavaConversion]" => error + * *Legal according to [[http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html]] + */ + def interpolated(parts: List[Tree], args: List[Tree]) = { + val fstring = new StringBuilder + val evals = ListBuffer[ValDef]() + val ids = ListBuffer[Ident]() + val argStack = Stack(args: _*) + + // create a tmp val and add it to the ids passed to format + def defval(value: Tree, tpe: Type): Unit = { + val freshName = TermName(c.freshName("arg$")) + evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos + ids += Ident(freshName) + } + // Append the nth part to the string builder, possibly prepending an omitted %s first. + // Sanity-check the % fields in this part. + def copyPart(part: Tree, n: Int): Unit = { + import SpecifierGroups.{ Spec, Index } + val s0 = part match { + case Literal(Constant(x: String)) => x + case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals") + } + val s = StringContext.treatEscapes(s0) + val ms = fpat findAllMatchIn s + + def errorLeading(op: Conversion) = op.errorAt(Spec, s"conversions must follow a splice; ${Conversion.literalHelp}") + + def first = n == 0 + // a conversion for the arg is required + if (!first) { + val arg = argStack.pop() + def s_%() = { + fstring append "%s" + defval(arg, AnyTpe) + } + def accept(op: Conversion) = { + if (!op.isLeading) errorLeading(op) + op.accepts(arg) match { + case Some(tpe) => defval(arg, tpe) + case None => + } + } + if (ms.hasNext) { + Conversion(ms.next, part.pos, args.size) match { + case Some(op) if op.isLiteral => s_%() + case Some(op) if op.indexed => + if (op.index map (_ == n) getOrElse true) accept(op) + else { + // either some other arg num, or '<' + c.warning(op.groupPos(Index), "Index is not this arg") + s_%() + } + case Some(op) => accept(op) + case None => + } + } else s_%() + } + // any remaining conversions must be either literals or indexed + while (ms.hasNext) { + Conversion(ms.next, part.pos, args.size) match { + case Some(op) if first && op.hasFlag('<') => op.badFlag('<', "No last arg") + case Some(op) if op.isLiteral || op.indexed => // OK + case Some(op) => errorLeading(op) + case None => + } + } + fstring append s + } + + parts.zipWithIndex foreach { + case (part, n) => copyPart(part, n) + } + + //q"{..$evals; ${fstring.toString}.format(..$ids)}" + locally { + val expr = + Apply( + Select( + Literal(Constant(fstring.toString)), + newTermName("format")), + ids.toList + ) + val p = c.macroApplication.pos + Block(evals.toList, atPos(p.focus)(expr)) setPos p.makeTransparent + } + } + + val fpat = """%(?:(\d+)\$)?([-#+ 0,(\<]+)?(\d+)?(\.\d+)?([tT]?[%a-zA-Z])?""".r + object SpecifierGroups extends Enumeration { val Spec, Index, Flags, Width, Precision, CC = Value } + + val stdContextTags = new { val tc: c.type = c } with StdContextTags + import stdContextTags._ + val tagOfFormattable = typeTag[Formattable] + + /** A conversion specifier matched by `m` in the string part at `pos`, + * with `argc` arguments to interpolate. + */ + sealed trait Conversion { + def m: Match + def pos: Position + def argc: Int + + import SpecifierGroups.{ Value => SpecGroup, _ } + private def maybeStr(g: SpecGroup) = Option(m group g.id) + private def maybeInt(g: SpecGroup) = maybeStr(g) map (_.toInt) + val index: Option[Int] = maybeInt(Index) + val flags: Option[String] = maybeStr(Flags) + val width: Option[Int] = maybeInt(Width) + val precision: Option[Int] = maybeStr(Precision) map (_.drop(1).toInt) + val op: String = maybeStr(CC) getOrElse "" + + def cc: Char = if ("tT" contains op(0)) op(1) else op(0) + + def indexed: Boolean = index.nonEmpty || hasFlag('<') + def isLiteral: Boolean = false + def isLeading: Boolean = m.start(0) == 0 + def verify: Boolean = goodFlags && goodIndex + def accepts(arg: Tree): Option[Type] + + val allFlags = "-#+ 0,(<" + def hasFlag(f: Char) = (flags getOrElse "") contains f + def hasAnyFlag(fs: String) = fs exists (hasFlag) + + def badFlag(f: Char, msg: String) = { + val i = flags map (_.indexOf(f)) filter (_ >= 0) getOrElse 0 + errorAtOffset(Flags, i, msg) + } + def groupPos(g: SpecGroup) = groupPosAt(g, 0) + def groupPosAt(g: SpecGroup, i: Int) = pos withPoint (pos.point + m.start(g.id) + i) + def errorAt(g: SpecGroup, msg: String) = c.error(groupPos(g), msg) + def errorAtOffset(g: SpecGroup, i: Int, msg: String) = c.error(groupPosAt(g, i), msg) + + def noFlags = flags.isEmpty || falsely { errorAt(Flags, "flags not allowed") } + def noWidth = width.isEmpty || falsely { errorAt(Width, "width not allowed") } + def noPrecision = precision.isEmpty || falsely { errorAt(Precision, "precision not allowed") } + def only_-(msg: String) = { + val badFlags = (flags getOrElse "") filterNot { case '-' | '<' => true case _ => false } + badFlags.isEmpty || falsely { badFlag(badFlags(0), s"Only '-' allowed for $msg") } + } + protected def okFlags: String = allFlags + def goodFlags = { + val badFlags = flags map (_ filterNot (okFlags contains _)) + for (bf <- badFlags; f <- bf) badFlag(f, s"Illegal flag '$f'") + badFlags.getOrElse("").isEmpty + } + def goodIndex = { + if (index.nonEmpty && hasFlag('<')) + c.warning(groupPos(Index), "Argument index ignored if '<' flag is present") + val okRange = index map (i => i > 0 && i <= argc) getOrElse true + okRange || hasFlag('<') || falsely { errorAt(Index, "Argument index out of range") } + } + /** Pick the type of an arg to format from among the variants + * supported by a conversion. This is the type of the temporary, + * so failure results in an erroneous assignment to the first variant. + * A more complete message would be nice. + */ + def pickAcceptable(arg: Tree, variants: Type*): Option[Type] = + variants find (arg.tpe <:< _) orElse ( + variants find (c.inferImplicitView(arg, arg.tpe, _) != EmptyTree) + ) orElse Some(variants(0)) + } + object Conversion { + import SpecifierGroups.{ Spec, CC, Width } + def apply(m: Match, p: Position, n: Int): Option[Conversion] = { + def badCC(msg: String) = { + val dk = new ErrorXn(m, p) + val at = if (dk.op.isEmpty) Spec else CC + dk.errorAt(at, msg) + } + def cv(cc: Char) = cc match { + case 'b' | 'B' | 'h' | 'H' | 's' | 'S' => + new GeneralXn(m, p, n) + case 'c' | 'C' => + new CharacterXn(m, p, n) + case 'd' | 'o' | 'x' | 'X' => + new IntegralXn(m, p, n) + case 'e' | 'E' | 'f' | 'g' | 'G' | 'a' | 'A' => + new FloatingPointXn(m, p, n) + case 't' | 'T' => + new DateTimeXn(m, p, n) + case '%' | 'n' => + new LiteralXn(m, p, n) + case _ => + badCC(s"illegal conversion character '$cc'") + null + } + Option(m group CC.id) map (cc => cv(cc(0))) match { + case Some(x) => Option(x) filter (_.verify) + case None => + badCC(s"Missing conversion operator in '${m.matched}'; $literalHelp") + None + } + } + val literalHelp = "use %% for literal %, %n for newline" + } + class GeneralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { + def accepts(arg: Tree) = cc match { + case 's' | 'S' if hasFlag('#') => pickAcceptable(arg, tagOfFormattable.tpe) + case 'b' | 'B' => if (arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe) + case _ => Some(AnyTpe) + } + override protected def okFlags = cc match { + case 's' | 'S' => "-#<" + case _ => "-<" + } + } + class LiteralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { + import SpecifierGroups.Width + override val isLiteral = true + override def verify = op match { + case "%" => super.verify && noPrecision && truly(width foreach (_ => c.warning(groupPos(Width), "width ignored on literal"))) + case "n" => noFlags && noWidth && noPrecision + } + override protected val okFlags = "-" + def accepts(arg: Tree) = None + } + class CharacterXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { + override def verify = super.verify && noPrecision && only_-("c conversion") + def accepts(arg: Tree) = pickAcceptable(arg, CharTpe, ByteTpe, ShortTpe, IntTpe) + } + class IntegralXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { + override def verify = { + def d_# = (cc == 'd' && hasFlag('#') && + truly { badFlag('#', "# not allowed for d conversion") } + ) + def x_comma = (cc != 'd' && hasFlag(',') && + truly { badFlag(',', "',' only allowed for d conversion of integral types") } + ) + super.verify && noPrecision && !d_# && !x_comma + } + override def accepts(arg: Tree) = { + def isBigInt = arg.tpe <:< tagOfBigInt.tpe + val maybeOK = "+ (" + def bad_+ = cond(cc) { + case 'o' | 'x' | 'X' if hasAnyFlag(maybeOK) && !isBigInt => + maybeOK filter hasFlag foreach (badf => + badFlag(badf, s"only use '$badf' for BigInt conversions to o, x, X")) + true + } + if (bad_+) None else pickAcceptable(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe) + } + } + class FloatingPointXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { + override def verify = super.verify && (cc match { + case 'a' | 'A' => + val badFlags = ",(" filter hasFlag + noPrecision && badFlags.isEmpty || falsely { + badFlags foreach (badf => badFlag(badf, s"'$badf' not allowed for a, A")) + } + case _ => true + }) + def accepts(arg: Tree) = pickAcceptable(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe) + } + class DateTimeXn(val m: Match, val pos: Position, val argc: Int) extends Conversion { + import SpecifierGroups.CC + def hasCC = (op.length == 2 || + falsely { errorAt(CC, "Date/time conversion must have two characters") }) + def goodCC = ("HIklMSLNpzZsQBbhAaCYyjmdeRTrDFc" contains cc) || + falsely { errorAtOffset(CC, 1, s"'$cc' doesn't seem to be a date or time conversion") } + override def verify = super.verify && hasCC && goodCC && noPrecision && only_-("date/time conversions") + def accepts(arg: Tree) = pickAcceptable(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe) + } + class ErrorXn(val m: Match, val pos: Position) extends Conversion { + val argc = 0 + override def verify = false + def accepts(arg: Tree) = None + } +} diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala deleted file mode 100644 index a9ed419b1e..0000000000 --- a/src/compiler/scala/tools/reflect/MacroImplementations.scala +++ /dev/null @@ -1,171 +0,0 @@ -package scala.tools.reflect - -import scala.reflect.macros.contexts.Context -import scala.collection.mutable.ListBuffer -import scala.collection.mutable.Stack -import scala.reflect.internal.util.Position - -abstract class MacroImplementations { - val c: Context - - import c.universe._ - import definitions._ - - def macro_StringInterpolation_f(parts: List[Tree], args: List[Tree], origApplyPos: c.universe.Position): Tree = { - // the parts all have the same position information (as the expression is generated by the compiler) - // the args have correct position information - - // the following conditions can only be violated if invoked directly - if (parts.length != args.length + 1) { - if(parts.length == 0) - c.abort(c.prefix.tree.pos, "too few parts") - else if(args.length + 1 < parts.length) - c.abort(if(args.length==0) c.enclosingPosition else args.last.pos, - "too few arguments for interpolated string") - else - c.abort(args(parts.length-1).pos, - "too many arguments for interpolated string") - } - - val pi = parts.iterator - val bldr = new java.lang.StringBuilder - val evals = ListBuffer[ValDef]() - val ids = ListBuffer[Ident]() - val argStack = Stack(args : _*) - - def defval(value: Tree, tpe: Type): Unit = { - val freshName = newTermName(c.freshName("arg$")) - evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos - ids += Ident(freshName) - } - - def isFlag(ch: Char): Boolean = { - ch match { - case '-' | '#' | '+' | ' ' | '0' | ',' | '(' => true - case _ => false - } - } - - def checkType(arg: Tree, variants: Type*): Option[Type] = { - variants.find(arg.tpe <:< _).orElse( - variants.find(c.inferImplicitView(arg, arg.tpe, _) != EmptyTree).orElse( - Some(variants(0)) - ) - ) - } - - val stdContextTags = new { val tc: c.type = c } with StdContextTags - import stdContextTags._ - - def conversionType(ch: Char, arg: Tree): Option[Type] = { - ch match { - case 'b' | 'B' => - if(arg.tpe <:< NullTpe) Some(NullTpe) else Some(BooleanTpe) - case 'h' | 'H' => - Some(AnyTpe) - case 's' | 'S' => - Some(AnyTpe) - case 'c' | 'C' => - checkType(arg, CharTpe, ByteTpe, ShortTpe, IntTpe) - case 'd' | 'o' | 'x' | 'X' => - checkType(arg, IntTpe, LongTpe, ByteTpe, ShortTpe, tagOfBigInt.tpe) - case 'e' | 'E' | 'g' | 'G' | 'f' | 'a' | 'A' => - checkType(arg, DoubleTpe, FloatTpe, tagOfBigDecimal.tpe) - case 't' | 'T' => - checkType(arg, LongTpe, tagOfCalendar.tpe, tagOfDate.tpe) - case _ => None - } - } - - def copyString(first: Boolean): Unit = { - val strTree = pi.next() - val rawStr = strTree match { - case Literal(Constant(str: String)) => str - case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals") - } - val str = StringContext.treatEscapes(rawStr) - val strLen = str.length - val strIsEmpty = strLen == 0 - def charAtIndexIs(idx: Int, ch: Char) = idx < strLen && str(idx) == ch - def isPercent(idx: Int) = charAtIndexIs(idx, '%') - def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%') - var idx = 0 - - def errorAtIndex(idx: Int, msg: String) = c.error(Position.offset(strTree.pos.source, strTree.pos.point + idx), msg) - def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string") - def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character") - def nonEscapedPercent(idx: Int) = errorAtIndex(idx, - "conversions must follow a splice; use %% for literal %, %n for newline") - - // STEP 1: handle argument conversion - // 1) "...${smth}" => okay, equivalent to "...${smth}%s" - // 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah" - // 3) "...${smth}%" => error - // 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n" - // 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%" - // 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html - // 7) "...${smth}[%illegalJavaConversion]" => error - if (!first) { - val arg = argStack.pop() - if (isConversion(0)) { - // PRE str is not empty and str(0) == '%' - // argument index parameter is not allowed, thus parse - // [flags][width][.precision]conversion - var pos = 1 - while (pos < strLen && isFlag(str charAt pos)) pos += 1 - while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1 - if (pos < strLen && str.charAt(pos) == '.') { - pos += 1 - while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1 - } - if (pos < strLen) { - conversionType(str charAt pos, arg) match { - case Some(tpe) => defval(arg, tpe) - case None => illegalConversionCharacter(pos) - } - } else { - wrongConversionString(pos - 1) - } - idx = 1 - } else { - bldr append "%s" - defval(arg, AnyTpe) - } - } - - // STEP 2: handle the rest of the text - // 1) %n tokens are left as is - // 2) %% tokens are left as is - // 3) other usages of percents are reported as errors - if (!strIsEmpty) { - while (idx < strLen) { - if (isPercent(idx)) { - if (isConversion(idx)) nonEscapedPercent(idx) - else idx += 1 // skip n and % in %n and %% - } - idx += 1 - } - bldr append (str take idx) - } - } - - copyString(first = true) - while (pi.hasNext) { - copyString(first = false) - } - - val fstring = bldr.toString -// val expr = c.reify(fstring.format((ids.map(id => Expr(id).eval)) : _*)) -// https://issues.scala-lang.org/browse/SI-5824, therefore - val expr = - Apply( - Select( - Literal(Constant(fstring)), - newTermName("format")), - List(ids: _* ) - ) - - Block(evals.toList, atPos(origApplyPos.focus)(expr)) setPos origApplyPos.makeTransparent - } - -} diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala index 4c1bc794bc..02a458214f 100644 --- a/src/compiler/scala/tools/reflect/ToolBox.scala +++ b/src/compiler/scala/tools/reflect/ToolBox.scala @@ -71,12 +71,6 @@ trait ToolBox[U <: scala.reflect.api.Universe] { */ def inferImplicitView(tree: u.Tree, from: u.Type, to: u.Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: u.Position = u.NoPosition): u.Tree - /** Recursively resets symbols and types in a given tree. - * WARNING: Don't use this API, go for [[untypecheck]] instead. - */ - @deprecated("Use `tb.untypecheck` instead", "2.11.0") - def resetAllAttrs(tree: u.Tree): u.Tree - /** Recursively resets locally defined symbols and types in a given tree. * WARNING: Don't use this API, go for [[untypecheck]] instead. */ diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 4a8c91bd1b..541a915adb 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -236,7 +236,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => NoPosition)) trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value)) - val cleanedUp = resetLocalAttrs(moduledef) + val cleanedUp = resetAttrs(moduledef) trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value)) cleanedUp.asInstanceOf[ModuleDef] } @@ -385,18 +385,10 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => uitree } - def resetAllAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi => - import compilerApi._ - val ctree: compiler.Tree = importer.importTree(tree) - val ttree: compiler.Tree = compiler.resetAllAttrs(ctree) - val uttree = exporter.importTree(ttree) - uttree - } - def resetLocalAttrs(tree: u.Tree): u.Tree = withCompilerApi { compilerApi => import compilerApi._ val ctree: compiler.Tree = importer.importTree(tree) - val ttree: compiler.Tree = compiler.resetLocalAttrs(ctree) + val ttree: compiler.Tree = compiler.resetAttrs(ctree) val uttree = exporter.importTree(ttree) uttree } diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala index 0c73214745..017e966f63 100644 --- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala +++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala @@ -190,6 +190,10 @@ trait Reifiers { self: Quasiquotes => reifyBuildCall(nme.SyntacticFunction, args, body) case SyntacticIdent(name, isBackquoted) => reifyBuildCall(nme.SyntacticIdent, name, isBackquoted) + case SyntacticEmptyTypeTree() => + reifyBuildCall(nme.SyntacticEmptyTypeTree) + case SyntacticImport(expr, selectors) => + reifyBuildCall(nme.SyntacticImport, expr, selectors) case Q(Placeholder(Hole(tree, DotDot))) => mirrorBuildCall(nme.SyntacticBlock, tree) case Q(other) => diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index 29c92a111c..47fb66744e 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -22,9 +22,9 @@ import generic.CanBuildFrom * on a map that will no longer have elements removed but will be * used heavily may save both time and storage space. * - * This map is not indended to contain more than 2^29 entries (approximately - * 500 million). The maximum capacity is 2^30, but performance will degrade - * rapidly as 2^30 is approached. + * This map is not intended to contain more than 2^29^ entries (approximately + * 500 million). The maximum capacity is 2^30^, but performance will degrade + * rapidly as 2^30^ is approached. * */ final class AnyRefMap[K <: AnyRef, V] private[collection] (defaultEntry: K => V, initialBufferSize: Int, initBlank: Boolean) @@ -291,24 +291,21 @@ extends AbstractMap[K, V] private[this] val vz = _values private[this] var index = 0 - private[this] var found = false - def hasNext = found || (index<hz.length && { + def hasNext: Boolean = index<hz.length && { var h = hz(index) - if (h+h != 0) found = true - else { + while (h+h == 0) { index += 1 - while (index < hz.length && { h = hz(index); h+h == 0 }) index += 1 - found = (index < hz.length) + if (index >= hz.length) return false + h = hz(index) } - found - }) + true + } - def next = { - if (found || hasNext) { - val ans = (_keys(index).asInstanceOf[K], _values(index).asInstanceOf[V]) + def next: (K, V) = { + if (hasNext) { + val ans = (kz(index).asInstanceOf[K], vz(index).asInstanceOf[V]) index += 1 - found = false ans } else throw new NoSuchElementException("next") diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala index 3bcf751ace..ec20a89a10 100644 --- a/src/reflect/scala/reflect/api/BuildUtils.scala +++ b/src/reflect/scala/reflect/api/BuildUtils.scala @@ -198,9 +198,9 @@ private[reflect] trait BuildUtils { self: Universe => val SyntacticFunction: SyntacticFunctionExtractor trait SyntacticFunctionExtractor { - def apply(params: List[Tree], body: Tree): Tree + def apply(params: List[Tree], body: Tree): Function - def unapply(tree: Tree): Option[(List[ValDef], Tree)] + def unapply(tree: Function): Option[(List[ValDef], Tree)] } val SyntacticDefDef: SyntacticDefDefExtractor @@ -248,6 +248,13 @@ private[reflect] trait BuildUtils { self: Universe => def unapply(tree: Tree): Option[(Tree)] } + val SyntacticEmptyTypeTree: SyntacticEmptyTypeTreeExtractor + + trait SyntacticEmptyTypeTreeExtractor { + def apply(): TypeTree + def unapply(tt: TypeTree): Boolean + } + val SyntacticFor: SyntacticForExtractor val SyntacticForYield: SyntacticForExtractor @@ -283,5 +290,11 @@ private[reflect] trait BuildUtils { self: Universe => def apply(name: Name, isBackquoted: Boolean = false): Ident def unapply(tree: Ident): Option[(Name, Boolean)] } + + val SyntacticImport: SyntacticImportExtractor + trait SyntacticImportExtractor { + def apply(expr: Tree, selectors: List[Tree]): Import + def unapply(imp: Import): Some[(Tree, List[Tree])] + } } } diff --git a/src/reflect/scala/reflect/api/Importers.scala b/src/reflect/scala/reflect/api/Importers.scala index 5667d93e29..6539137cee 100644 --- a/src/reflect/scala/reflect/api/Importers.scala +++ b/src/reflect/scala/reflect/api/Importers.scala @@ -52,7 +52,7 @@ package api * val imported = importer.importTree(tree) * * // after the tree is imported, it can be evaluated as usual - * val tree = toolBox.resetAllAttrs(imported.duplicate) + * val tree = toolBox.untypecheck(imported.duplicate) * val valueOfX = toolBox.eval(imported).asInstanceOf[T] * ... * } diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index 83da5141b9..60e00ca5fd 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -1066,7 +1066,7 @@ trait Trees { self: Universe => * UnApply( * // a dummy node that carries the type of unapplication to patmat * // the <unapply-selector> here doesn't have an underlying symbol - * // it only has a type assigned, therefore after `resetAllAttrs` this tree is no longer typeable + * // it only has a type assigned, therefore after `untypecheck` this tree is no longer typeable * Apply(Select(Ident(Foo), newTermName("unapply")), List(Ident(newTermName("<unapply-selector>")))), * // arguments of the unapply => nothing synthetic here * List(Bind(newTermName("x"), Ident(nme.WILDCARD)))), diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala index 6106339324..c5581601de 100644 --- a/src/reflect/scala/reflect/internal/BuildUtils.scala +++ b/src/reflect/scala/reflect/internal/BuildUtils.scala @@ -33,19 +33,19 @@ trait BuildUtils { self: SymbolTable => } def newFreeTerm(name: String, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol = - newFreeTermSymbol(newTermName(name), value, flags, origin) + newFreeTermSymbol(newTermName(name), value, flags, origin).markFlagsCompleted(mask = AllFlags) def newFreeType(name: String, flags: Long = 0L, origin: String = null): FreeTypeSymbol = - newFreeTypeSymbol(newTypeName(name), flags, origin) + newFreeTypeSymbol(newTypeName(name), flags, origin).markFlagsCompleted(mask = AllFlags) def newNestedSymbol(owner: Symbol, name: Name, pos: Position, flags: Long, isClass: Boolean): Symbol = - owner.newNestedSymbol(name, pos, flags, isClass) + owner.newNestedSymbol(name, pos, flags, isClass).markFlagsCompleted(mask = AllFlags) def setAnnotations[S <: Symbol](sym: S, annots: List[AnnotationInfo]): S = sym.setAnnotations(annots) def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = - sym.setTypeSignature(tpe) + sym.setTypeSignature(tpe).markAllCompleted() def This(sym: Symbol): Tree = self.This(sym) @@ -143,7 +143,7 @@ trait BuildUtils { self: SymbolTable => def RefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym - def freshTermName(prefix: String): TermName = self.freshTermName(prefix) + def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX): TermName = self.freshTermName(prefix) def freshTypeName(prefix: String): TypeName = self.freshTypeName(prefix) @@ -435,16 +435,13 @@ trait BuildUtils { self: SymbolTable => } object SyntacticFunction extends SyntacticFunctionExtractor { - def apply(params: List[Tree], body: Tree): Tree = { + def apply(params: List[Tree], body: Tree): Function = { val params0 :: Nil = mkParam(params :: Nil, PARAM) require(params0.forall { _.rhs.isEmpty }, "anonymous functions don't support parameters with default values") Function(params0, body) } - def unapply(tree: Tree): Option[(List[ValDef], Tree)] = tree match { - case Function(params, body) => Some((params, body)) - case _ => None - } + def unapply(tree: Function): Option[(List[ValDef], Tree)] = Function.unapply(tree) } object SyntacticNew extends SyntacticNewExtractor { @@ -537,13 +534,15 @@ trait BuildUtils { self: SymbolTable => def unapply(tree: Tree): Option[Tree] = gen.Filter.unapply(tree) } - // abstract over possible alternative representations of no type in valdef - protected object EmptyTypTree { - def unapply(tree: Tree): Boolean = tree match { - case EmptyTree => true - case tt: TypeTree if (tt.original == null || tt.original.isEmpty) => true - case _ => false - } + // If a tree in type position isn't provided by the user (e.g. `tpt` fields of + // `ValDef` and `DefDef`, function params etc), then it's going to be parsed as + // TypeTree with empty original and empty tpe. This extractor matches such trees + // so that one can write q"val x = 2" to match typecheck(q"val x = 2"). Note that + // TypeTree() is the only possible representation for empty trees in type positions. + // We used to sometimes receive EmptyTree in such cases, but not anymore. + object SyntacticEmptyTypeTree extends SyntacticEmptyTypeTreeExtractor { + def apply(): TypeTree = self.TypeTree() + def unapply(tt: TypeTree): Boolean = tt.original == null || tt.original.isEmpty } // match a sequence of desugared `val $pat = $value` @@ -561,8 +560,8 @@ trait BuildUtils { self: SymbolTable => case ValDef(_, name1, _, Match(MaybeUnchecked(value), CaseDef(pat, EmptyTree, Ident(name2)) :: Nil)) :: UnPatSeq(rest) if name1 == name2 => Some((pat, value) :: rest) - // case q"${_} val $name: ${EmptyTypTree()} = $value" :: UnPatSeq(rest) => - case ValDef(_, name, EmptyTypTree(), value) :: UnPatSeq(rest) => + // case q"${_} val $name: ${SyntacticEmptyTypeTree()} = $value" :: UnPatSeq(rest) => + case ValDef(_, name, SyntacticEmptyTypeTree(), value) :: UnPatSeq(rest) => Some((Bind(name, self.Ident(nme.WILDCARD)), value) :: rest) // case q"${_} val $name: $tpt = $value" :: UnPatSeq(rest) => case ValDef(_, name, tpt, value) :: UnPatSeq(rest) => @@ -604,8 +603,8 @@ trait BuildUtils { self: SymbolTable => def unapply(tree: Tree): Option[(Tree, Tree)] = tree match { case Function(ValDef(Modifiers(PARAM, _, _), name, tpt, EmptyTree) :: Nil, body) => tpt match { - case EmptyTypTree() => Some((Bind(name, self.Ident(nme.WILDCARD)), body)) - case _ => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), body)) + case SyntacticEmptyTypeTree() => Some((Bind(name, self.Ident(nme.WILDCARD)), body)) + case _ => Some((Bind(name, Typed(self.Ident(nme.WILDCARD), tpt)), body)) } case UnVisitor(_, CaseDef(pat, EmptyTree, body) :: Nil) => Some((pat, body)) @@ -770,6 +769,146 @@ trait BuildUtils { self: SymbolTable => } def unapply(tree: Ident): Some[(Name, Boolean)] = Some((tree.name, tree.hasAttachment[BackquotedIdentifierAttachment.type])) } + + /** Facade over Imports and ImportSelectors that lets to structurally + * deconstruct/reconstruct them. + * + * Selectors are represented in the following way: + * 1. q"import foo._" <==> q"import foo.${pq"_"}" + * 2. q"import foo.bar" <==> q"import foo.${pq"bar"}" + * 3. q"import foo.{bar => baz}" <==> q"import foo.${pq"bar -> baz"}" + * 4. q"import foo.{bar => _}" <==> q"import foo.${pq"bar -> _"}" + * + * All names in selectors are TermNames despite the fact ImportSelector + * can theoretically contain TypeNames too (but they never do in practice.) + */ + object SyntacticImport extends SyntacticImportExtractor { + // construct/deconstruct {_} import selector + private object WildcardSelector { + def apply(offset: Int): ImportSelector = ImportSelector(nme.WILDCARD, offset, null, -1) + def unapply(sel: ImportSelector): Option[Int] = sel match { + case ImportSelector(nme.WILDCARD, offset, null, -1) => Some(offset) + case _ => None + } + } + + // construct/deconstruct {foo} import selector + private object NameSelector { + def apply(name: TermName, offset: Int): ImportSelector = ImportSelector(name, offset, name, offset) + def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match { + case ImportSelector(name1, offset1, name2, offset2) if name1 == name2 && offset1 == offset2 => + Some((name1.toTermName, offset1)) + case _ => + None + } + } + + // construct/deconstruct {foo => bar} import selector + private object RenameSelector { + def apply(name1: TermName, offset1: Int, name2: TermName, offset2: Int): ImportSelector = + ImportSelector(name1, offset1, name2, offset2) + def unapply(sel: ImportSelector): Option[(TermName, Int, TermName, Int)] = sel match { + case ImportSelector(_, _, null | nme.WILDCARD, _) => + None + case ImportSelector(name1, offset1, name2, offset2) if name1 != name2 => + Some((name1.toTermName, offset1, name2.toTermName, offset2)) + case _ => + None + } + } + + // construct/deconstruct {foo => _} import selector + private object UnimportSelector { + def apply(name: TermName, offset: Int): ImportSelector = + ImportSelector(name, offset, nme.WILDCARD, -1) + def unapply(sel: ImportSelector): Option[(TermName, Int)] = sel match { + case ImportSelector(name, offset, nme.WILDCARD, _) => Some((name.toTermName, offset)) + case _ => None + } + } + + // represent {_} import selector as pq"_" + private object WildcardSelectorRepr { + def apply(pos: Position): Tree = atPos(pos)(self.Ident(nme.WILDCARD)) + def unapply(tree: Tree): Option[Position] = tree match { + case self.Ident(nme.WILDCARD) => Some(tree.pos) + case _ => None + } + } + + // represent {foo} import selector as pq"foo" + private object NameSelectorRepr { + def apply(name: TermName, pos: Position): Tree = atPos(pos)(Bind(name, WildcardSelectorRepr(pos))) + def unapply(tree: Tree): Option[(TermName, Position)] = tree match { + case Bind(name, WildcardSelectorRepr(_)) => Some((name.toTermName, tree.pos)) + case _ => None + } + } + + // pq"left -> right" + private object Arrow { + def apply(left: Tree, right: Tree): Apply = + Apply(self.Ident(nme.MINGT), left :: right :: Nil) + def unapply(tree: Apply): Option[(Tree, Tree)] = tree match { + case Apply(self.Ident(nme.MINGT), left :: right :: Nil) => Some((left, right)) + case _ => None + } + } + + // represent {foo => bar} import selector as pq"foo -> bar" + private object RenameSelectorRepr { + def apply(name1: TermName, pos1: Position, name2: TermName, pos2: Position): Tree = { + val left = NameSelectorRepr(name1, pos1) + val right = NameSelectorRepr(name2, pos2) + atPos(wrappingPos(left :: right :: Nil))(Arrow(left, right)) + } + def unapply(tree: Tree): Option[(TermName, Position, TermName, Position)] = tree match { + case Arrow(NameSelectorRepr(name1, pos1), NameSelectorRepr(name2, pos2)) => + Some((name1.toTermName, pos1, name2.toTermName, pos2)) + case _ => + None + } + } + + // represent {foo => _} import selector as pq"foo -> _" + private object UnimportSelectorRepr { + def apply(name: TermName, pos: Position): Tree = + atPos(pos)(Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(pos))) + def unapply(tree: Tree): Option[(TermName, Position)] = tree match { + case Arrow(NameSelectorRepr(name, pos), WildcardSelectorRepr(_)) => + Some((name, pos)) + case _ => + None + } + } + + private def derivedPos(t: Tree, offset: Int): Position = + if (t.pos == NoPosition) NoPosition else t.pos.withPoint(offset) + + private def derivedOffset(pos: Position): Int = + if (pos == NoPosition) -1 else pos.point + + def apply(expr: Tree, selectors: List[Tree]): Import = { + val importSelectors = selectors.map { + case WildcardSelectorRepr(pos) => WildcardSelector(derivedOffset(pos)) + case NameSelectorRepr(name, pos) => NameSelector(name, derivedOffset(pos)) + case RenameSelectorRepr(name1, pos1, name2, pos2) => RenameSelector(name1, derivedOffset(pos1), name2, derivedOffset(pos2)) + case UnimportSelectorRepr(name, pos) => UnimportSelector(name, derivedOffset(pos)) + case tree => throw new IllegalArgumentException(s"${showRaw(tree)} doesn't correspond to import selector") + } + Import(expr, importSelectors) + } + + def unapply(imp: Import): Some[(Tree, List[Tree])] = { + val selectors = imp.selectors.map { + case WildcardSelector(offset) => WildcardSelectorRepr(derivedPos(imp, offset)) + case NameSelector(name, offset) => NameSelectorRepr(name, derivedPos(imp, offset)) + case RenameSelector(name1, offset1, name2, offset2) => RenameSelectorRepr(name1, derivedPos(imp, offset1), name2, derivedPos(imp, offset2)) + case UnimportSelector(name, offset) => UnimportSelectorRepr(name, derivedPos(imp, offset)) + } + Some((imp.expr, selectors)) + } + } } val build: BuildImpl = new BuildImpl diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 4d24f0b219..7a0c70caf6 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -30,12 +30,12 @@ trait Definitions extends api.StandardDefinitions { private def enterNewClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): ClassSymbol = { val clazz = owner.newClassSymbol(name, NoPosition, flags) - clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) + clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) markAllCompleted } private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long): MethodSymbol = { val msym = owner.newMethod(name.encode, NoPosition, flags) val params = msym.newSyntheticValueParams(formals) - msym setInfo MethodType(params, restpe) + msym setInfo MethodType(params, restpe) markAllCompleted } private def enterNewMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): MethodSymbol = owner.info.decls enter newMethod(owner, name, formals, restpe, flags) @@ -251,8 +251,8 @@ trait Definitions extends api.StandardDefinitions { } // top types - lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT) - lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe) + lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT) markAllCompleted + lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe) markAllCompleted lazy val ObjectClass = getRequiredClass(sn.Object.toString) // Cached types for core monomorphic classes @@ -275,7 +275,7 @@ trait Definitions extends api.StandardDefinitions { val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT) val av_constr = anyval.newClassConstructor(NoPosition) anyval.info.decls enter av_constr - anyval + anyval markAllCompleted }).asInstanceOf[ClassSymbol] def AnyVal_getClass = getMemberMethod(AnyValClass, nme.getClass_) @@ -287,8 +287,10 @@ trait Definitions extends api.StandardDefinitions { locally { this initFlags ABSTRACT | FINAL this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this) + this markAllCompleted } final override def isBottomClass = true + final override def isThreadsafe(purpose: SymbolOps): Boolean = true } final object NothingClass extends BottomClassSymbol(tpnme.Nothing, AnyClass) { override def isSubClass(that: Symbol) = true @@ -357,7 +359,7 @@ trait Definitions extends api.StandardDefinitions { def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit) lazy val TypeConstraintClass = requiredClass[scala.annotation.TypeConstraint] - lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL) + lazy val SingletonClass = enterNewClass(ScalaPackageClass, tpnme.Singleton, AnyTpe :: Nil, ABSTRACT | TRAIT | FINAL) markAllCompleted lazy val SerializableClass = requiredClass[scala.Serializable] lazy val JavaSerializableClass = requiredClass[java.io.Serializable] modifyInfo fixupAsAnyTrait lazy val ComparableClass = requiredClass[java.lang.Comparable[_]] modifyInfo fixupAsAnyTrait @@ -1127,6 +1129,7 @@ trait Definitions extends api.StandardDefinitions { lazy val AnnotationDefaultAttr: ClassSymbol = { val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L) sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym) + markAllCompleted(sym) RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match { case existing :: _ => existing.asInstanceOf[ClassSymbol] @@ -1226,7 +1229,7 @@ trait Definitions extends api.StandardDefinitions { val tparam = clazz.newSyntheticTypeParam("T0", flags) val parents = List(AnyRefTpe, parentFn(tparam)) - clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz)) + clazz setInfo GenPolyType(List(tparam), ClassInfoType(parents, newScope, clazz)) markAllCompleted } def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): MethodSymbol = { @@ -1237,7 +1240,7 @@ trait Definitions extends api.StandardDefinitions { case (_, restpe) => NullaryMethodType(restpe) } - msym setInfoAndEnter genPolyType(tparams, mtpe) + msym setInfoAndEnter genPolyType(tparams, mtpe) markAllCompleted } /** T1 means one type parameter. diff --git a/src/reflect/scala/reflect/internal/FreshNames.scala b/src/reflect/scala/reflect/internal/FreshNames.scala index 1de8d425ad..7e9a568266 100644 --- a/src/reflect/scala/reflect/internal/FreshNames.scala +++ b/src/reflect/scala/reflect/internal/FreshNames.scala @@ -8,7 +8,7 @@ package internal import scala.reflect.internal.util.FreshNameCreator -trait FreshNames { self: Names => +trait FreshNames { self: Names with StdNames => // SI-6879 Keeps track of counters that are supposed to be globally unique // as opposed to traditional freshers that are unique to compilation units. val globalFreshNameCreator = new FreshNameCreator @@ -17,8 +17,8 @@ trait FreshNames { self: Names => def currentFreshNameCreator: FreshNameCreator // create fresh term/type name using implicit fresh name creator - def freshTermName(prefix: String = "x$")(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix)) - def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix)) + def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX)(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix)) + def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix)) // Extractor that matches names which were generated by some // FreshNameCreator with known prefix. Extracts user-specified @@ -36,4 +36,4 @@ trait FreshNames { self: Names => else Some(NameTransformer.decode(sname.replaceFirst(quotedCreatorPrefix, "").replaceAll("\\d*$", ""))) } } -}
\ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index 483d0dd656..ff91b08ea1 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -4,6 +4,7 @@ package internal import scala.collection.mutable.WeakHashMap import scala.ref.WeakReference +import scala.reflect.internal.Flags._ // SI-6241: move importers to a mirror trait Importers extends api.Importers { to: SymbolTable => @@ -87,6 +88,7 @@ trait Importers extends api.Importers { to: SymbolTable => } my setInfo GenPolyType(mytypeParams, importType(theirCore)) my setAnnotations (their.annotations map importAnnotationInfo) + markAllCompleted(my) } } } finally { @@ -142,6 +144,7 @@ trait Importers extends api.Importers { to: SymbolTable => myowner.newTypeSymbol(myname.toTypeName, mypos, myflags) } symMap.weakUpdate(their, my) + markFlagsCompleted(my)(mask = AllFlags) my setInfo recreatedSymbolCompleter(my, their) } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 53475be479..679186f938 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -295,22 +295,23 @@ trait StdNames { protected implicit def createNameType(name: String): TermName = newTermNameCached(name) /** Base strings from which synthetic names are derived. */ - val BITMAP_PREFIX = "bitmap$" - val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$" - val DEFAULT_GETTER_STRING = "$default$" - val DEFAULT_GETTER_INIT_STRING = NameTransformer.encode("<init>") + DEFAULT_GETTER_STRING - val DO_WHILE_PREFIX = "doWhile$" - val EVIDENCE_PARAM_PREFIX = "evidence$" - val EXCEPTION_RESULT_PREFIX = "exceptionResult" - val EXPAND_SEPARATOR_STRING = "$$" - val INTERPRETER_IMPORT_WRAPPER = "$iw" - val LOCALDUMMY_PREFIX = "<local " // owner of local blocks - val PROTECTED_PREFIX = "protected$" - val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set" - val SUPER_PREFIX_STRING = "super$" - val WHILE_PREFIX = "while$" - val FRESH_PREFIX = "fresh" - val FRESH_SUFFIX = "macro$" // uses a keyword to avoid collisions with mangled names + val BITMAP_PREFIX = "bitmap$" + val CHECK_IF_REFUTABLE_STRING = "check$ifrefutable$" + val DEFAULT_GETTER_STRING = "$default$" + val DEFAULT_GETTER_INIT_STRING = NameTransformer.encode("<init>") + DEFAULT_GETTER_STRING + val DO_WHILE_PREFIX = "doWhile$" + val EVIDENCE_PARAM_PREFIX = "evidence$" + val EXCEPTION_RESULT_PREFIX = "exceptionResult" + val EXPAND_SEPARATOR_STRING = "$$" + val FRESH_TERM_NAME_PREFIX = "x$" + val INTERPRETER_IMPORT_WRAPPER = "$iw" + val LOCALDUMMY_PREFIX = "<local " // owner of local blocks + val PROTECTED_PREFIX = "protected$" + val PROTECTED_SET_PREFIX = PROTECTED_PREFIX + "set" + val SUPER_PREFIX_STRING = "super$" + val WHILE_PREFIX = "while$" + val FRESH_PREFIX = "fresh" + val FRESH_SUFFIX = "macro$" // uses a keyword to avoid collisions with mangled names // Compiler internal names val ANYname: NameType = "<anyname>" @@ -602,12 +603,14 @@ trait StdNames { val SyntacticBlock: NameType = "SyntacticBlock" val SyntacticClassDef: NameType = "SyntacticClassDef" val SyntacticDefDef: NameType = "SyntacticDefDef" + val SyntacticEmptyTypeTree: NameType = "SyntacticEmptyTypeTree" val SyntacticFilter: NameType = "SyntacticFilter" val SyntacticFor: NameType = "SyntacticFor" val SyntacticForYield: NameType = "SyntacticForYield" val SyntacticFunction: NameType = "SyntacticFunction" val SyntacticFunctionType: NameType = "SyntacticFunctionType" val SyntacticIdent: NameType = "SyntacticIdent" + val SyntacticImport: NameType = "SyntacticImport" val SyntacticMatch: NameType = "SyntacticMatch" val SyntacticNew: NameType = "SyntacticNew" val SyntacticObjectDef: NameType = "SyntacticObjectDef" @@ -838,6 +841,7 @@ trait StdNames { val LSR = encode(">>>") val LT = encode("<") val MINUS = encode("-") + val MINGT = encode("->") val MOD = encode("%") val MUL = encode("*") val NE = encode("!=") diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index b8cd1e86d3..2969bd92de 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -55,16 +55,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => def newFreeTypeSymbol(name: TypeName, flags: Long = 0L, origin: String): FreeTypeSymbol = new FreeTypeSymbol(name, origin) initFlags flags - /** Determines whether the given information request should trigger the given symbol's completer. - * See comments to `Symbol.needsInitialize` for details. - */ - protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) = - completer match { - case null => false - case _: FlagAgnosticCompleter => !isFlagRelated - case _ => abort(s"unsupported completer: $completer of class ${if (completer != null) completer.getClass else null} for symbol ${symbol.fullName}") - } - /** The original owner of a class. Used by the backend to generate * EnclosingMethod attributes. */ @@ -106,18 +96,27 @@ trait Symbols extends api.Symbols { self: SymbolTable => } def knownDirectSubclasses = { - if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize + // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. + if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize children } def selfType = { - if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize + // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. + if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize typeOfThis } def baseClasses = info.baseClasses def module = sourceModule def thisPrefix: Type = thisType + + // automatic full initialization on access to info from reflection API is a double-edged sword + // on the one hand, it's convenient so that the users don't have to deal with initialization themselves before printing out stuff + // (e.g. printing out a method's signature without fully initializing it would result in <_>'s for parameters + // on the other hand, this strategy can potentially cause unexpected effects due to being inconsistent with compiler's behavior + // so far I think user convenience outweighs the scariness, but we need to keep the tradeoff in mind + // NOTE: if you end up removing the call to fullyInitializeSymbol, consider that it would affect both runtime reflection and macros def typeSignature: Type = { fullyInitializeSymbol(this); info } def typeSignatureIn(site: Type): Type = { fullyInitializeSymbol(this); site memberInfo this } @@ -140,6 +139,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => with Annotatable[Symbol] with Attachable { + // makes sure that all symbols that runtime reflection deals with are synchronized + private def isSynchronized = this.isInstanceOf[scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol] + private def isAprioriThreadsafe = isThreadsafe(AllOps) + assert(isCompilerUniverse || isSynchronized || isAprioriThreadsafe, s"unsafe symbol $initName (child of $initOwner) in runtime reflection universe") + type AccessBoundaryType = Symbol type AnnotationType = AnnotationInfo @@ -609,20 +613,55 @@ trait Symbols extends api.Symbols { self: SymbolTable => && isTopLevel && nme.isReplWrapperName(name) ) + + /** In our current architecture, symbols for top-level classes and modules + * are created as dummies. Package symbols just call newClass(name) or newModule(name) and + * consider their job done. + * + * In order for such a dummy to provide meaningful info (e.g. a list of its members), + * it needs to go through unpickling. Unpickling is a process of reading Scala metadata + * from ScalaSignature annotations and assigning it to symbols and types. + * + * A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation + * and then reads metadata for the unpicklee, its companion (if any) and all their members recursively + * (i.e. the pickle not only contains info about directly nested classes/modules, but also about + * classes/modules nested into those and so on). + * + * Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called. + * This happens because package symbols assign completer thunks to the dummies they create. + * Therefore metadata loading happens lazily and transparently. + * + * Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members). + * It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin. + * This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol + * produces incorrect results. + * + * One might think that the solution is simple: automatically call the completer + * whenever one needs flags, annotations and privateWithin - just like it's done for typeSignature. + * Unfortunately, this leads to weird crashes in scalac, and currently we can't attempt + * to fix the core of the compiler risk stability a few weeks before the final release. + * upd. Haha, "a few weeks before the final release". This surely sounds familiar :) + * + * However we do need to fix this for runtime reflection, since this idionsynchrazy is not something + * we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a + * runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization. + */ final def getFlag(mask: Long): Long = { - if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize + if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize flags & mask } /** Does symbol have ANY flag in `mask` set? */ final def hasFlag(mask: Long): Boolean = { - if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize + // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. + if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize (flags & mask) != 0 } def hasFlag(mask: Int): Boolean = hasFlag(mask.toLong) /** Does symbol have ALL the flags in `mask` set? */ final def hasAllFlags(mask: Long): Boolean = { - if (!isCompilerUniverse && needsInitialize(isFlagRelated = true, mask = mask)) initialize + // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. + if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize (flags & mask) == mask } @@ -789,7 +828,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * * Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815 */ - def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag) + final def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag) final def hasVolatileType = tpe.isVolatile && !hasAnnotation(uncheckedStableClass) /** Does this symbol denote the primary constructor of its enclosing class? */ @@ -943,12 +982,21 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isInitialized: Boolean = validTo != NoPeriod - /** Some completers call sym.setInfo when still in-flight and then proceed with initialization (e.g. see LazyPackageType) - * setInfo sets _validTo to current period, which means that after a call to setInfo isInitialized will start returning true. - * Unfortunately, this doesn't mean that info becomes ready to be used, because subsequent initialization might change the info. - * Therefore we need this method to distinguish between initialized and really initialized symbol states. + /** We consider a symbol to be thread-safe, when multiple concurrent threads can call its methods + * (either directly or indirectly via public reflection or internal compiler infrastructure), + * without any locking and everything works as it should work. + * + * In its basic form, `isThreadsafe` always returns false. Runtime reflection augments reflection infrastructure + * with threadsafety-tracking mechanism implemented in `SynchronizedSymbol` that communicates with underlying completers + * and can sometimes return true if the symbol has been completed to the point of thread safety. + * + * The `purpose` parameter signifies whether we want to just check immutability of certain flags for the given mask. + * This is necessary to enable robust auto-initialization of `Symbol.flags` for runtime reflection, and is also quite handy + * in avoiding unnecessary initializations when requesting for flags that have already been set. */ - final def isFullyInitialized: Boolean = _validTo != NoPeriod && (flags & LOCKED) == 0 + def isThreadsafe(purpose: SymbolOps): Boolean = false + def markFlagsCompleted(mask: Long): this.type = this + def markAllCompleted(): this.type = this /** Can this symbol be loaded by a reflective mirror? * @@ -1225,7 +1273,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ private[this] var _privateWithin: Symbol = _ def privateWithin = { - if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize + // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. + if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize _privateWithin } def privateWithin_=(sym: Symbol) { _privateWithin = sym } @@ -1483,46 +1532,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => catch { case _: CyclicReference => debuglog("Hit cycle in maybeInitialize of $this") ; false } } - /** Called when the programmer requests information that might require initialization of the underlying symbol. - * - * `isFlagRelated` and `mask` describe the nature of this information. - * isFlagRelated = true means that the programmer needs particular bits in flags. - * isFlagRelated = false means that the request is unrelated to flags (annotations or privateWithin). - * - * In our current architecture, symbols for top-level classes and modules - * are created as dummies. Package symbols just call newClass(name) or newModule(name) and - * consider their job done. - * - * In order for such a dummy to provide meaningful info (e.g. a list of its members), - * it needs to go through unpickling. Unpickling is a process of reading Scala metadata - * from ScalaSignature annotations and assigning it to symbols and types. - * - * A single unpickling session takes a top-level class or module, parses the ScalaSignature annotation - * and then reads metadata for the unpicklee, its companion (if any) and all their members recursively - * (i.e. the pickle not only contains info about directly nested classes/modules, but also about - * classes/modules nested into those and so on). - * - * Unpickling is triggered automatically whenever typeSignature (info in compiler parlance) is called. - * This happens because package symbols assign completer thunks to the dummies they create. - * Therefore metadata loading happens lazily and transparently. - * - * Almost transparently. Unfortunately metadata isn't limited to just signatures (i.e. lists of members). - * It also includes flags (which determine e.g. whether a class is sealed or not), annotations and privateWithin. - * This gives rise to unpleasant effects like in SI-6277, when a flag test called on an uninitialize symbol - * produces incorrect results. - * - * One might think that the solution is simple: automatically call the completer whenever one needs - * flags, annotations and privateWithin - just like it's done for typeSignature. Unfortunately, this - * leads to weird crashes in scalac, and currently we can't attempt to fix the core of the compiler - * risk stability a few weeks before the final release. - * - * However we do need to fix this for runtime reflection, since it's not something we'd like to - * expose to reflection users. Therefore a proposed solution is to check whether we're in a - * runtime reflection universe and if yes then to commence initialization. - */ - protected def needsInitialize(isFlagRelated: Boolean, mask: Long) = - !isInitialized && (flags & LOCKED) == 0 && shouldTriggerCompleter(this, if (infos ne null) infos.info else null, isFlagRelated, mask) - /** Was symbol's type updated during given phase? */ final def hasTypeAt(pid: Phase#Id): Boolean = { assert(isCompilerUniverse) @@ -1681,7 +1690,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => * the annotations attached to member a definition (class, method, type, field). */ def annotations: List[AnnotationInfo] = { - if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize + // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. + if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize _annotations } @@ -3510,6 +3520,17 @@ trait Symbols extends api.Symbols { self: SymbolTable => Statistics.newView("#symbols")(ids) + +// -------------- Completion -------------------------------------------------------- + + // is used to differentiate levels of thread-safety in `Symbol.isThreadsafe` + case class SymbolOps(isFlagRelated: Boolean, mask: Long) + val AllOps = SymbolOps(isFlagRelated = false, mask = 0L) + def FlagOps(mask: Long) = SymbolOps(isFlagRelated = true, mask = mask) + + private def relevantSymbols(syms: Seq[Symbol]) = syms.flatMap(sym => List(sym, sym.moduleClass, sym.sourceModule)) + def markFlagsCompleted(syms: Symbol*)(mask: Long): Unit = relevantSymbols(syms).foreach(_.markFlagsCompleted(mask)) + def markAllCompleted(syms: Symbol*): Unit = relevantSymbols(syms).foreach(_.markAllCompleted) } object SymbolsStats { diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index ba7efc94a6..2d4e0b3b34 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1969,12 +1969,12 @@ trait Types def apply(value: Constant) = unique(new UniqueConstantType(value)) } - private var _volatileRecursions: Int = 0 - def volatileRecursions = _volatileRecursions - def volatileRecursions_=(value: Int) = _volatileRecursions = value - - private val _pendingVolatiles = new mutable.HashSet[Symbol] - def pendingVolatiles = _pendingVolatiles + /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected + * with synchronized, because they are accessed only from isVolatile, which is called only from + * Typer. + */ + private var volatileRecursions: Int = 0 + private val pendingVolatiles = new mutable.HashSet[Symbol] class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) { require(args0 ne Nil, this) diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 2a19441476..42f794736a 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -275,6 +275,7 @@ abstract class UnPickler { def pflags = flags & PickledFlags def finishSym(sym: Symbol): Symbol = { + markFlagsCompleted(sym)(mask = AllFlags) sym.privateWithin = privateWithin sym.info = ( if (atEnd) { @@ -663,7 +664,7 @@ abstract class UnPickler { private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter { private val definedAtRunId = currentRunId private val p = phase - override def complete(sym: Symbol) : Unit = try { + protected def completeInternal(sym: Symbol) : Unit = try { val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType` if (p ne null) slowButSafeEnteringPhase(p) (sym setInfo tp) @@ -673,6 +674,10 @@ abstract class UnPickler { catch { case e: MissingRequirementError => throw toTypeError(e) } + override def complete(sym: Symbol) : Unit = { + completeInternal(sym) + if (!isCompilerUniverse) markAllCompleted(sym) + } override def load(sym: Symbol) { complete(sym) } } @@ -680,8 +685,9 @@ abstract class UnPickler { * of completed symbol to symbol at index `j`. */ private class LazyTypeRefAndAlias(i: Int, j: Int) extends LazyTypeRef(i) { - override def complete(sym: Symbol) = try { - super.complete(sym) + override def completeInternal(sym: Symbol) = try { + super.completeInternal(sym) + var alias = at(j, readSymbol) if (alias.isOverloaded) alias = slowButSafeEnteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt)))) diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala index 222ae43d79..68e07dd319 100644 --- a/src/reflect/scala/reflect/macros/Evals.scala +++ b/src/reflect/scala/reflect/macros/Evals.scala @@ -17,13 +17,13 @@ trait Evals { * permitted by the shape of the arguments. * * Known issues: because of [[https://issues.scala-lang.org/browse/SI-5748 https://issues.scala-lang.org/browse/SI-5748]] - * trees being evaluated first need to undergo `resetAllAttrs`. Resetting symbols and types + * trees being evaluated first need to undergo `untypecheck`. Resetting symbols and types * mutates the tree in place, therefore the conventional approach is to `duplicate` the tree first. * * {{{ * scala> def impl(c: Context)(x: c.Expr[String]) = { - * | val x1 = c.Expr[String](c.resetAllAttrs(x.tree.duplicate)) - * | println(s"compile-time value is: \${c.eval(x1)}") + * | val x1 = c.Expr[String](c.untypecheck(x.tree.duplicate)) + * | println(s"compile-time value is: ${c.eval(x1)}") * | x * | } * impl: (c: Context)(x: c.Expr[String])c.Expr[String] diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index 87de442921..6c077de1d2 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -68,12 +68,6 @@ trait Typers { */ def inferImplicitView(tree: Tree, from: Type, to: Type, silent: Boolean = true, withMacrosDisabled: Boolean = false, pos: Position = enclosingPosition): Tree - /** Recursively resets symbols and types in a given tree. - * WARNING: Don't use this API, go for [[untypecheck]] instead. - */ - @deprecated("Use `c.untypecheck` instead", "2.11.0") - def resetAllAttrs(tree: Tree): Tree - /** Recursively resets locally defined symbols and types in a given tree. * WARNING: Don't use this API, go for [[untypecheck]] instead. */ diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala index d84e6aa737..bc5c8b2840 100644 --- a/src/reflect/scala/reflect/macros/Universe.scala +++ b/src/reflect/scala/reflect/macros/Universe.scala @@ -122,7 +122,7 @@ abstract class Universe extends scala.reflect.api.Universe { def setType(tp: Type): Tree /** Like `setType`, but if this is a previously empty TypeTree that - * fact is remembered so that resetAllAttrs will snap back. + * fact is remembered so that `untypecheck` will snap back. * * \@PP: Attempting to elaborate on the above, I find: If defineType * is called on a TypeTree whose type field is null or NoType, @@ -130,7 +130,8 @@ abstract class Universe extends scala.reflect.api.Universe { * ResetAttrsTraverser, which nulls out the type field of TypeTrees * for which wasEmpty is true, leaving the others alone. * - * resetAllAttrs is used in situations where some speculative + * `untypecheck` (or `resetAttrs` in compiler parlance) is used + * in situations where some speculative * typing of a tree takes place, fails, and the tree needs to be * returned to its former state to try again. So according to me: * using `defineType` instead of `setType` is how you communicate diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 7cc5176507..68c67bb1f8 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -42,7 +42,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni // overriden by ReflectGlobal def rootClassLoader: ClassLoader = this.getClass.getClassLoader - trait JavaClassCompleter extends FlagAssigningCompleter + trait JavaClassCompleter def runtimeMirror(cl: ClassLoader): Mirror = gilSynchronized { mirrors get cl match { @@ -63,10 +63,10 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private[reflect] lazy val runDefinitions = new definitions.RunDefinitions // only one "run" in the reflection universe import runDefinitions._ - override lazy val RootPackage = new RootPackage with SynchronizedTermSymbol - override lazy val RootClass = new RootClass with SynchronizedModuleClassSymbol - override lazy val EmptyPackage = new EmptyPackage with SynchronizedTermSymbol - override lazy val EmptyPackageClass = new EmptyPackageClass with SynchronizedModuleClassSymbol + override lazy val RootPackage = (new RootPackage with SynchronizedTermSymbol).markFlagsCompleted(mask = AllFlags) + override lazy val RootClass = (new RootClass with SynchronizedModuleClassSymbol).markFlagsCompleted(mask = AllFlags) + override lazy val EmptyPackage = (new EmptyPackage with SynchronizedTermSymbol).markFlagsCompleted(mask = AllFlags) + override lazy val EmptyPackageClass = (new EmptyPackageClass with SynchronizedModuleClassSymbol).markFlagsCompleted(mask = AllFlags) /** The lazy type for root. */ @@ -575,6 +575,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni val bytes = ssig.getBytes val len = ByteCodecs.decode(bytes) unpickler.unpickle(bytes take len, 0, clazz, module, jclazz.getName) + markAllCompleted(clazz, module) case None => loadBytes[Array[String]]("scala.reflect.ScalaLongSignature") match { case Some(slsig) => @@ -583,6 +584,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni val len = ByteCodecs.decode(encoded) val decoded = encoded.take(len) unpickler.unpickle(decoded, 0, clazz, module, jclazz.getName) + markAllCompleted(clazz, module) case None => // class does not have a Scala signature; it's a Java class info("translating reflection info for Java " + jclazz) //debug @@ -605,6 +607,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni private def createTypeParameter(jtvar: jTypeVariable[_ <: GenericDeclaration]): TypeSymbol = { val tparam = sOwner(jtvar).newTypeParameter(newTypeName(jtvar.getName)) .setInfo(new TypeParamCompleter(jtvar)) + markFlagsCompleted(tparam)(mask = AllFlags) tparamCache enter (jtvar, tparam) tparam } @@ -617,6 +620,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni override def load(sym: Symbol) = complete(sym) override def complete(sym: Symbol) = { sym setInfo TypeBounds.upper(glb(jtvar.getBounds.toList map typeToScala map objToAny)) + markAllCompleted(sym) } } @@ -655,7 +659,16 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni * @param module The Scala companion object for which info is copied * @param jclazz The Java class */ - private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAssigningCompleter { + private class FromJavaClassCompleter(clazz: Symbol, module: Symbol, jclazz: jClass[_]) extends LazyType with JavaClassCompleter with FlagAgnosticCompleter { + // one doesn't need to do non-trivial computations to assign flags for Java-based reflection artifacts + // therefore I'm moving flag-assigning logic from completion to construction + val flags = jclazz.scalaFlags + clazz setFlag (flags | JAVA) + if (module != NoSymbol) { + module setFlag (flags & PRIVATE | JAVA) + module.moduleClass setFlag (flags & PRIVATE | JAVA) + } + markFlagsCompleted(clazz, module)(mask = AllFlags) /** used to avoid cycles while initializing classes */ private var parentsLevel = 0 @@ -665,12 +678,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni override def load(sym: Symbol): Unit = { debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug assert(sym == clazz || (module != NoSymbol && (sym == module || sym == module.moduleClass)), sym) - val flags = jclazz.scalaFlags - clazz setFlag (flags | JAVA) - if (module != NoSymbol) { - module setFlag (flags & PRIVATE | JAVA) - module.moduleClass setFlag (flags & PRIVATE | JAVA) - } propagatePackageBoundary(jclazz, relatedSymbols: _*) copyAnnotations(clazz, jclazz) @@ -686,6 +693,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni override def complete(sym: Symbol): Unit = { load(sym) completeRest() + markAllCompleted(clazz, module) } def completeRest(): Unit = gilSynchronized { @@ -738,6 +746,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni class LazyPolyType(override val typeParams: List[Symbol]) extends LazyType with FlagAgnosticCompleter { override def complete(sym: Symbol) { completeRest() + markAllCompleted(clazz, module) } } } @@ -892,6 +901,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni val pkg = owner.newPackage(name) pkg.moduleClass setInfo new LazyPackageType pkg setInfoAndEnter pkg.moduleClass.tpe + markFlagsCompleted(pkg)(mask = AllFlags) info("made Scala "+pkg) pkg } else @@ -1074,6 +1084,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni fieldCache.enter(jfield, field) propagatePackageBoundary(jfield, field) copyAnnotations(field, jfield) + markAllCompleted(field) field } @@ -1100,11 +1111,9 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni setMethType(meth, tparams, paramtpes, resulttpe) propagatePackageBoundary(jmeth.javaFlags, meth) copyAnnotations(meth, jmeth) - - if (jmeth.javaFlags.isVarargs) - meth modifyInfo arrayToRepeated - else - meth + if (jmeth.javaFlags.isVarargs) meth modifyInfo arrayToRepeated + markAllCompleted(meth) + meth } /** @@ -1127,6 +1136,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe)) propagatePackageBoundary(jconstr.javaFlags, constr) copyAnnotations(constr, jconstr) + markAllCompleted(constr) constr } diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 8811b5513e..fb893cbff1 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -32,8 +32,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => // inaccessible: this._skolemizationLevel // inaccessible: this._undoLog // inaccessible: this._intersectionWitness - // inaccessible: this._volatileRecursions - // inaccessible: this._pendingVolatiles // inaccessible: this._subsametypeRecursions // inaccessible: this._pendingSubTypes // inaccessible: this._basetypeRecursions @@ -206,6 +204,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.NoSymbol this.CyclicReference // inaccessible: this.TypeHistory + this.SymbolOps this.TermName this.TypeName this.Liftable diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala index 30a3855d70..c56bc28d90 100644 --- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala +++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala @@ -6,6 +6,7 @@ import internal.Flags import java.lang.{Class => jClass, Package => jPackage} import scala.collection.mutable import scala.reflect.runtime.ReflectionUtils.scalacShouldntLoadClass +import scala.reflect.internal.Flags._ private[reflect] trait SymbolLoaders { self: SymbolTable => @@ -17,6 +18,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => * is found, a package is created instead. */ class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter { + markFlagsCompleted(clazz, module)(mask = ~TopLevelPickledFlags) override def complete(sym: Symbol) = { debugInfo("completing "+sym+"/"+clazz.fullName) assert(sym == clazz || sym == module || sym == module.moduleClass) @@ -24,6 +26,8 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => val loadingMirror = mirrorThatLoaded(sym) val javaClass = loadingMirror.javaClass(clazz.javaClassName) loadingMirror.unpickleClass(clazz, module, javaClass) + // NOTE: can't mark as thread-safe here, because unpickleClass might decide to delegate to FromJavaClassCompleter + // if (!isCompilerUniverse) markAllCompleted(clazz, module) } } override def load(sym: Symbol) = complete(sym) @@ -64,6 +68,7 @@ private[reflect] trait SymbolLoaders { self: SymbolTable => sym setInfo new ClassInfoType(List(), new PackageScope(sym), sym) // override def safeToString = pkgClass.toString openPackageModule(sym) + markAllCompleted(sym) } } diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index ddbf3bd629..02155578f8 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -28,19 +28,4 @@ private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors w * in order to prevent memory leaks: http://groups.google.com/group/scala-internals/browse_thread/thread/eabcf3d406dab8b2. */ override def isCompilerUniverse = false - - /** Unlike compiler universes, reflective universes can auto-initialize symbols on flag requests. - * - * scalac wasn't designed with such auto-initialization in mind, and quite often it makes assumptions - * that flag requests won't cause initialization. Therefore enabling auto-init leads to cyclic errors. - * We could probably fix those, but at the moment it's too risky. - * - * Reflective universes share codebase with scalac, but their surface is much smaller, which means less assumptions. - * These assumptions are taken care of in this overriden `shouldTriggerCompleter` method. - */ - override protected def shouldTriggerCompleter(symbol: Symbol, completer: Type, isFlagRelated: Boolean, mask: Long) = - completer match { - case _: TopClassCompleter | _: JavaClassCompleter => !isFlagRelated || (mask & TopLevelPickledFlags) != 0 - case _ => super.shouldTriggerCompleter(symbol, completer, isFlagRelated, mask) - } } diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index 1a232c8de1..f5e16c6640 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -4,6 +4,7 @@ package runtime import scala.reflect.io.AbstractFile import scala.collection.{ immutable, mutable } +import scala.reflect.internal.Flags._ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable => @@ -31,23 +32,105 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb trait SynchronizedSymbol extends Symbol { - def gilSynchronizedIfNotInited[T](body: => T): T = { - // TODO JZ desired, but prone to race conditions. We need the runtime reflection based - // type completers to establish a memory barrier upon initialization. Maybe a volatile - // write? We need to consult with the experts here. Until them, lock pessimistically. - // - // `run/reflection-sync-subtypes.scala` fails about 1/50 times otherwise. - // - // if (isFullyInitialized) body - // else gilSynchronized { body } + /** (Things written in this comment only applies to runtime reflection. Compile-time reflection, + * especially across phases and runs, is somewhat more complicated, but we won't be touching it, + * because at the moment we only care about synchronizing runtime reflection). + * + * As it has been noted on multiple occasions, generally speaking, reflection artifacts aren't thread-safe. + * Reasons for that differ from artifact to artifact. In some cases it's quite bad (e.g. types use a number + * of non-concurrent compiler caches, so we need to serialize certain operations on types in order to make + * sure that things stay deterministic). However, in case of symbols there's hope, because it's only during + * initializaton that symbols are thread-unsafe. After everything's set up, symbols become immutable + * (sans a few deterministic caches that can be populated simultaneously by multiple threads) and therefore thread-safe. + * + * Note that by saying "symbols become immutable" I mean literally that. In a very common case of PackageClassSymbol's, + * even when a symbol finishes its initialization and becomes immutable, its info forever remains mutable. + * Therefore even if we no longer need to synchronize a PackageClassSymbol after it's initialized, we still have to take + * care of its ClassInfoType (or, more precisely, of the underlying Scope), but that's done elsewhere, and + * here we don't need to worry about that. + * + * Okay, so now we simply check `Symbol.isInitialized` and if it's true, then everything's fine? Haha, nope! + * The thing is that some completers call sym.setInfo when still in-flight and then proceed with initialization + * (e.g. see LazyPackageType). Consequently, setInfo sets _validTo to current period, which means that after + * a call to setInfo isInitialized will start returning true. Unfortunately, this doesn't mean that info becomes + * ready to be used, because subsequent initialization might change the info. + * + * Therefore we need to somehow distinguish between initialized and really initialized symbol states. + * Okay, let's do it on per-completer basis. We have seven kinds of completers to worry about: + * 1) LazyPackageType that initializes packages and their underlying package classes + * 2) TopClassCompleter that initializes top-level Scala-based class-module companion pairs of static definitions + * 3) LazyTypeRef and LazyTypeRefAndAlias set up by TopClassCompleter that initialize (transitive members) of top-level classes/modules + * 4) FromJavaClassCompleter that does the same for both top-level and non-toplevel Java-based classes/modules + * 5) Fully-initialized signatures of non-class/module Java-based reflection artifacts + * 6) Importing completer that transfers metadata from one universe to another + * 7) Signatures of special symbols such as roots and symbolsNotPresentInBytecode + * + * The mechanisms underlying completion are quite complex, and it'd be only natural to suppose that over time we're going to overlook something. + * Wrt isThreadsafe we could have two wrong situations: false positives (isThreadsafe = true, but the symbol isn't actually threadsafe) + * and false negatives (isThreadsafe = false, but the symbol is actually threadsafe). However, even though both are wrong, only the former + * is actively malicious. Indeed, false positives might lead to races, inconsistent state and crashes, while the latter would only cause + * `initialize` to be called and a gil to be taken on every potentially auto-initializable operation. Unpleasant yes, but still robust. + * + * What makes me hopeful is that: + * 1) By default (e.g. if some new completion mechanism gets introduced for a special flavor of symbols and we forget to call markCompleted) + * isThreadsafe is always in false negative state, which is unpleasant but safe. + * 2) Calls to `markCompleted` which are the only potential source of erroneous behavior are few and are relatively easy to place: + * just put them just before your completer's `complete` returns, and you should be fine. + * + * upd. Actually, there's another problem of not keeping initialization mask up-to-date. If we're not careful enough, + * then it might so happen that getting a certain flag that the compiler assumes to be definitively set will spuriously + * return isThreadsafe(purpose = FlagsOp(<flag>)) = false and that will lead to spurious auto-initialization, + * which will cause an SO or a cyclic reference or some other crash. I've done my best to go through all possible completers + * and call `markFlagsCompleted` where appropriate, but again over time something might be overlooked, so to guard against that + * I'm only considering TopLevelPickledFlags to be sources of potential initialization. This ensures that such system flags as + * isMethod, isModule or isPackage are never going to auto-initialize. + */ + override def isThreadsafe(purpose: SymbolOps) = { + if (isCompilerUniverse) false + else if (_initialized) true + else purpose.isFlagRelated && (_initializationMask & purpose.mask & TopLevelPickledFlags) == 0 + } + + /** Communicates with completers declared in scala.reflect.runtime.SymbolLoaders + * about the status of initialization of the underlying symbol. + * + * Unfortunately, it's not as easy as just introducing the `markThreadsafe` method that would be called + * by the completers when they are really done (as opposed to `setInfo` that, as mentioned above, doesn't mean anything). + * + * Since we also want to auto-initialize symbols when certain methods are being called (`Symbol.hasFlag` for example), + * we need to track the identity of the initializer, so as to block until initialization is complete if the caller + * comes from a different thread, but to skip auto-initialization if we're the initializing thread. + * + * Just a volatile var is fine, because: + * 1) Status can only be changed in a single-threaded fashion (this is enforced by gilSynchronized + * that effecively guards `Symbol.initialize`), which means that there can't be update conflicts. + * 2) If someone reads a stale value of status, then the worst thing that might happen is that this someone + * is going to spuriously call `initialize`, which is either a gil-protected operation (if the symbol isn't inited yet) + * or a no-op (if the symbol is already inited), and that is fine in both cases. + * + * upd. It looks like we also need to keep track of a mask of initialized flags to make sure + * that normal symbol initialization routines don't trigger auto-init in Symbol.flags-related routines (e.g. Symbol.getFlag). + * Due to the same reasoning as above, a single volatile var is enough for to store the mask. + */ + @volatile private[this] var _initialized = false + @volatile private[this] var _initializationMask = TopLevelPickledFlags + override def markFlagsCompleted(mask: Long): this.type = { _initializationMask = _initializationMask & ~mask; this } + override def markAllCompleted(): this.type = { _initializationMask = 0L; _initialized = true; this } + + def gilSynchronizedIfNotThreadsafe[T](body: => T): T = { + // TODO: debug and fix the race that doesn't allow us uncomment this optimization + // if (isCompilerUniverse || isThreadsafe(purpose = AllOps)) body + // else gilSynchronized { body } gilSynchronized { body } } - override def validTo = gilSynchronizedIfNotInited { super.validTo } - override def info = gilSynchronizedIfNotInited { super.info } - override def rawInfo: Type = gilSynchronizedIfNotInited { super.rawInfo } + override def validTo = gilSynchronizedIfNotThreadsafe { super.validTo } + override def info = gilSynchronizedIfNotThreadsafe { super.info } + override def rawInfo: Type = gilSynchronizedIfNotThreadsafe { super.rawInfo } + override def typeSignature: Type = gilSynchronizedIfNotThreadsafe { super.typeSignature } + override def typeSignatureIn(site: Type): Type = gilSynchronizedIfNotThreadsafe { super.typeSignatureIn(site) } - override def typeParams: List[Symbol] = gilSynchronizedIfNotInited { + override def typeParams: List[Symbol] = gilSynchronizedIfNotThreadsafe { if (isCompilerUniverse) super.typeParams else { if (isMonomorphicType) Nil @@ -63,7 +146,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb } } } - override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotInited { + override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotThreadsafe { if (isCompilerUniverse) super.unsafeTypeParams else { if (isMonomorphicType) Nil @@ -71,8 +154,6 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb } } - override def isStable: Boolean = gilSynchronized { super.isStable } - // ------ creators ------------------------------------------------------------------- override protected def createAbstractTypeSymbol(name: TypeName, pos: Position, newFlags: Long): AbstractTypeSymbol = @@ -126,7 +207,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb // we can keep this lock fine-grained, because it's just a cache over asSeenFrom, which makes deadlocks impossible // unfortunately we cannot elide this lock, because the cache depends on `pre` private lazy val typeAsMemberOfLock = new Object - override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotInited { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } } + override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotThreadsafe { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } } } trait SynchronizedModuleSymbol extends ModuleSymbol with SynchronizedTermSymbol @@ -135,7 +216,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb // unlike with typeConstructor, a lock is necessary here, because tpe calculation relies on // temporarily assigning NoType to tpeCache to detect cyclic reference errors private lazy val tpeLock = new Object - override def tpe_* : Type = gilSynchronizedIfNotInited { tpeLock.synchronized { super.tpe_* } } + override def tpe_* : Type = gilSynchronizedIfNotThreadsafe { tpeLock.synchronized { super.tpe_* } } } trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala index 83d471f91e..9bcf85dd6f 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala @@ -50,13 +50,6 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa private lazy val _intersectionWitness = mkThreadLocalStorage(perRunCaches.newWeakMap[List[Type], sWeakRef[Type]]()) override def intersectionWitness = _intersectionWitness.get - private lazy val _volatileRecursions = mkThreadLocalStorage(0) - override def volatileRecursions = _volatileRecursions.get - override def volatileRecursions_=(value: Int) = _volatileRecursions.set(value) - - private lazy val _pendingVolatiles = mkThreadLocalStorage(new mutable.HashSet[Symbol]) - override def pendingVolatiles = _pendingVolatiles.get - private lazy val _subsametypeRecursions = mkThreadLocalStorage(0) override def subsametypeRecursions = _subsametypeRecursions.get override def subsametypeRecursions_=(value: Int) = _subsametypeRecursions.set(value) diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check index 457f497f2f..703846ad62 100644 --- a/test/files/neg/stringinterpolation_macro-neg.check +++ b/test/files/neg/stringinterpolation_macro-neg.check @@ -1,61 +1,61 @@ -stringinterpolation_macro-neg.scala:8: error: too few parts +stringinterpolation_macro-neg.scala:13: error: there are no parts new StringContext().f() ^ -stringinterpolation_macro-neg.scala:9: error: too few arguments for interpolated string +stringinterpolation_macro-neg.scala:14: error: too few arguments for interpolated string new StringContext("", " is ", "%2d years old").f(s) ^ -stringinterpolation_macro-neg.scala:10: error: too many arguments for interpolated string +stringinterpolation_macro-neg.scala:15: error: too many arguments for interpolated string new StringContext("", " is ", "%2d years old").f(s, d, d) ^ -stringinterpolation_macro-neg.scala:11: error: too few arguments for interpolated string +stringinterpolation_macro-neg.scala:16: error: too few arguments for interpolated string new StringContext("", "").f() ^ -stringinterpolation_macro-neg.scala:14: error: type mismatch; +stringinterpolation_macro-neg.scala:19: error: type mismatch; found : String required: Boolean f"$s%b" ^ -stringinterpolation_macro-neg.scala:15: error: type mismatch; +stringinterpolation_macro-neg.scala:20: error: type mismatch; found : String required: Char f"$s%c" ^ -stringinterpolation_macro-neg.scala:16: error: type mismatch; +stringinterpolation_macro-neg.scala:21: error: type mismatch; found : Double required: Char f"$f%c" ^ -stringinterpolation_macro-neg.scala:17: error: type mismatch; +stringinterpolation_macro-neg.scala:22: error: type mismatch; found : String required: Int f"$s%x" ^ -stringinterpolation_macro-neg.scala:18: error: type mismatch; +stringinterpolation_macro-neg.scala:23: error: type mismatch; found : Boolean required: Int f"$b%d" ^ -stringinterpolation_macro-neg.scala:19: error: type mismatch; +stringinterpolation_macro-neg.scala:24: error: type mismatch; found : String required: Int f"$s%d" ^ -stringinterpolation_macro-neg.scala:20: error: type mismatch; +stringinterpolation_macro-neg.scala:25: error: type mismatch; found : Double required: Int f"$f%o" ^ -stringinterpolation_macro-neg.scala:21: error: type mismatch; +stringinterpolation_macro-neg.scala:26: error: type mismatch; found : String required: Double f"$s%e" ^ -stringinterpolation_macro-neg.scala:22: error: type mismatch; +stringinterpolation_macro-neg.scala:27: error: type mismatch; found : Boolean required: Double f"$b%f" ^ -stringinterpolation_macro-neg.scala:27: error: type mismatch; +stringinterpolation_macro-neg.scala:32: error: type mismatch; found : String required: Int Note that implicit conversions are not applicable because they are ambiguous: @@ -64,7 +64,109 @@ Note that implicit conversions are not applicable because they are ambiguous: are possible conversion functions from String to Int f"$s%d" ^ -stringinterpolation_macro-neg.scala:30: error: illegal conversion character +stringinterpolation_macro-neg.scala:35: error: illegal conversion character 'i' f"$s%i" ^ -15 errors found +stringinterpolation_macro-neg.scala:38: error: Illegal flag '+' + f"$s%+ 0,(s" + ^ +stringinterpolation_macro-neg.scala:38: error: Illegal flag ' ' + f"$s%+ 0,(s" + ^ +stringinterpolation_macro-neg.scala:38: error: Illegal flag '0' + f"$s%+ 0,(s" + ^ +stringinterpolation_macro-neg.scala:38: error: Illegal flag ',' + f"$s%+ 0,(s" + ^ +stringinterpolation_macro-neg.scala:38: error: Illegal flag '(' + f"$s%+ 0,(s" + ^ +stringinterpolation_macro-neg.scala:39: error: Only '-' allowed for c conversion + f"$c%#+ 0,(c" + ^ +stringinterpolation_macro-neg.scala:40: error: # not allowed for d conversion + f"$d%#d" + ^ +stringinterpolation_macro-neg.scala:41: error: ',' only allowed for d conversion of integral types + f"$d%,x" + ^ +stringinterpolation_macro-neg.scala:42: error: only use '+' for BigInt conversions to o, x, X + f"$d%+ (x" + ^ +stringinterpolation_macro-neg.scala:42: error: only use ' ' for BigInt conversions to o, x, X + f"$d%+ (x" + ^ +stringinterpolation_macro-neg.scala:42: error: only use '(' for BigInt conversions to o, x, X + f"$d%+ (x" + ^ +stringinterpolation_macro-neg.scala:43: error: ',' not allowed for a, A + f"$f%,(a" + ^ +stringinterpolation_macro-neg.scala:43: error: '(' not allowed for a, A + f"$f%,(a" + ^ +stringinterpolation_macro-neg.scala:44: error: Only '-' allowed for date/time conversions + f"$t%#+ 0,(tT" + ^ +stringinterpolation_macro-neg.scala:47: error: precision not allowed + f"$c%.2c" + ^ +stringinterpolation_macro-neg.scala:48: error: precision not allowed + f"$d%.2d" + ^ +stringinterpolation_macro-neg.scala:49: error: precision not allowed + f"%.2%" + ^ +stringinterpolation_macro-neg.scala:50: error: precision not allowed + f"%.2n" + ^ +stringinterpolation_macro-neg.scala:51: error: precision not allowed + f"$f%.2a" + ^ +stringinterpolation_macro-neg.scala:52: error: precision not allowed + f"$t%.2tT" + ^ +stringinterpolation_macro-neg.scala:55: error: No last arg + f"%<s" + ^ +stringinterpolation_macro-neg.scala:56: error: No last arg + f"%<c" + ^ +stringinterpolation_macro-neg.scala:57: error: No last arg + f"%<tT" + ^ +stringinterpolation_macro-neg.scala:58: error: Argument index out of range + f"${8}%d ${9}%d%3$$d" + ^ +stringinterpolation_macro-neg.scala:59: error: Argument index out of range + f"${8}%d ${9}%d%0$$d" + ^ +stringinterpolation_macro-neg.scala:62: warning: Index is not this arg + f"${8}%d ${9}%1$$d" + ^ +stringinterpolation_macro-neg.scala:63: warning: Argument index ignored if '<' flag is present + f"$s%s $s%s %1$$<s" + ^ +stringinterpolation_macro-neg.scala:64: warning: Index is not this arg + f"$s%s $s%1$$s" + ^ +stringinterpolation_macro-neg.scala:67: error: type mismatch; + found : String + required: java.util.Formattable + f"$s%#s" + ^ +stringinterpolation_macro-neg.scala:70: error: 'G' doesn't seem to be a date or time conversion + f"$t%tG" + ^ +stringinterpolation_macro-neg.scala:71: error: Date/time conversion must have two characters + f"$t%t" + ^ +stringinterpolation_macro-neg.scala:72: error: Missing conversion operator in '%10.5'; use %% for literal %, %n for newline + f"$s%10.5" + ^ +stringinterpolation_macro-neg.scala:75: error: conversions must follow a splice; use %% for literal %, %n for newline + f"${d}random-leading-junk%d" + ^ +three warnings found +45 errors found diff --git a/test/files/neg/stringinterpolation_macro-neg.scala b/test/files/neg/stringinterpolation_macro-neg.scala index ac9d97d678..3869d42d66 100644 --- a/test/files/neg/stringinterpolation_macro-neg.scala +++ b/test/files/neg/stringinterpolation_macro-neg.scala @@ -3,6 +3,11 @@ object Test extends App { val d = 8 val b = false val f = 3.14159 + val c = 'c' + val t = new java.util.Date + val x = new java.util.Formattable { + def formatTo(ff: java.util.Formatter, g: Int, w: Int, p: Int): Unit = ff format "xxx" + } // 1) number of arguments new StringContext().f() @@ -28,4 +33,44 @@ object Test extends App { } f"$s%i" + + // 3) flag mismatches + f"$s%+ 0,(s" + f"$c%#+ 0,(c" + f"$d%#d" + f"$d%,x" + f"$d%+ (x" + f"$f%,(a" + f"$t%#+ 0,(tT" + + // 4) bad precisions + f"$c%.2c" + f"$d%.2d" + f"%.2%" + f"%.2n" + f"$f%.2a" + f"$t%.2tT" + + // 5) bad indexes + f"%<s" + f"%<c" + f"%<tT" + f"${8}%d ${9}%d%3$$d" + f"${8}%d ${9}%d%0$$d" + + // warnings + f"${8}%d ${9}%1$$d" + f"$s%s $s%s %1$$<s" + f"$s%s $s%1$$s" + + // 6) bad arg types + f"$s%#s" + + // 7) misunderstood conversions + f"$t%tG" + f"$t%t" + f"$s%10.5" + + // 8) other brain failures + f"${d}random-leading-junk%d" } diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check index d2c40f4df8..61c33f99b1 100644 --- a/test/files/neg/t7325.check +++ b/test/files/neg/t7325.check @@ -1,13 +1,13 @@ -t7325.scala:2: error: conversions must follow a splice; use %% for literal %, %n for newline +t7325.scala:2: error: Missing conversion operator in '%'; use %% for literal %, %n for newline println(f"%") ^ -t7325.scala:4: error: conversions must follow a splice; use %% for literal %, %n for newline +t7325.scala:4: error: Missing conversion operator in '%'; use %% for literal %, %n for newline println(f"%%%") ^ -t7325.scala:6: error: conversions must follow a splice; use %% for literal %, %n for newline +t7325.scala:6: error: Missing conversion operator in '%'; use %% for literal %, %n for newline println(f"%%%%%") ^ -t7325.scala:16: error: wrong conversion string +t7325.scala:16: error: Missing conversion operator in '%'; use %% for literal %, %n for newline println(f"${0}%") ^ t7325.scala:19: error: conversions must follow a splice; use %% for literal %, %n for newline diff --git a/test/files/pos/annotated-original/M_1.scala b/test/files/pos/annotated-original/M_1.scala index e312f9abbf..84a01bcce5 100644 --- a/test/files/pos/annotated-original/M_1.scala +++ b/test/files/pos/annotated-original/M_1.scala @@ -2,6 +2,6 @@ import language.experimental.macros import scala.reflect.macros.blackbox.Context object M { - def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.resetLocalAttrs(a.tree)) + def impl(c: Context)(a: c.Expr[Any]) = c.Expr[Any](c.untypecheck(a.tree)) def m(a: Any) = macro impl } diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala index b02864b994..fdf9c72c31 100644 --- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala +++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala @@ -44,7 +44,7 @@ object Macros { val typeOut = c.Expr[String](q"${ttag.tpe.toString}").splice def apply(_arg: T): U = c.Expr[U](b1)(ttag.asInstanceOf[c.WeakTypeTag[U]]).splice }) - val untyped = c.resetLocalAttrs(template.tree) + val untyped = c.untypecheck(template.tree) c.Expr[T => U](untyped) case _ => sys.error("Bad function type") diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala index 9f51248095..b38687c8b3 100644 --- a/test/files/pos/t7377/Macro_1.scala +++ b/test/files/pos/t7377/Macro_1.scala @@ -2,6 +2,6 @@ import language.experimental._ import scala.reflect.macros.blackbox.Context object M { - def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typecheck(c.resetLocalAttrs(expr.tree))) + def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typecheck(c.untypecheck(expr.tree))) def noop[A](expr: A): A = macro noopImpl[A] } diff --git a/test/files/pos/t7516/A_1.scala b/test/files/pos/t7516/A_1.scala index 3bba19966d..3bd477dcda 100644 --- a/test/files/pos/t7516/A_1.scala +++ b/test/files/pos/t7516/A_1.scala @@ -3,7 +3,7 @@ import scala.reflect._,macros._, scala.language.experimental.macros object A { def impl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = { val r = c.universe.reify { List(t.splice) } - c.Expr[List[T]]( c.resetLocalAttrs(r.tree) ) + c.Expr[List[T]]( c.untypecheck(r.tree) ) } def demo[T](t: T): List[T] = macro impl[T] } diff --git a/test/files/pos/t8064/Macro_1.scala b/test/files/pos/t8064/Macro_1.scala index dd42950b34..9f1e6955b4 100644 --- a/test/files/pos/t8064/Macro_1.scala +++ b/test/files/pos/t8064/Macro_1.scala @@ -5,6 +5,6 @@ object Macro { def apply(a: Any): Any = macro impl def impl(c: Context)(a: c.Tree): c.Tree = { - c.resetLocalAttrs(a) + c.untypecheck(a) } } diff --git a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala index 624479480d..f038d8714f 100644 --- a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala +++ b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala @@ -3,7 +3,7 @@ import scala.reflect.macros.blackbox.Context object Impls { def foo(c: Context)(x: c.Expr[Int]) = { import c.universe._ - val x1 = c.Expr[Int](c.resetAllAttrs(x.tree)) + val x1 = c.Expr[Int](c.untypecheck(x.tree)) c.Expr[Int](Literal(Constant(c.eval(x1)))) } } diff --git a/test/pending/run/reflection-sync-potpourri.scala b/test/files/run/reflection-sync-potpourri.scala index 0ad5f2ab66..0c96974df7 100644 --- a/test/pending/run/reflection-sync-potpourri.scala +++ b/test/files/run/reflection-sync-potpourri.scala @@ -24,7 +24,7 @@ object Test extends App { override def run(): Unit = { val s1 = foo("42") val s2 = perms(diceRolls(i - 1)).map(x => force(x)).sorted.mkString(", ") - assert(s1 == "java.lang.String") + assert(s1 == "String" || s1 == "java.lang.String") assert(s2 == "java.lang.annotation.Annotation, java.lang.reflect.Method, scala.io.BufferedSource, scala.io.Codec") } }) diff --git a/test/files/run/stringinterpolation_macro-run.check b/test/files/run/stringinterpolation_macro-run.check index be62c5780b..ead61e76ac 100644 --- a/test/files/run/stringinterpolation_macro-run.check +++ b/test/files/run/stringinterpolation_macro-run.check @@ -46,6 +46,8 @@ S 120 120 120 + 0X4 +She is 4 feet tall. 120 42 3.400000e+00 @@ -60,3 +62,6 @@ S 05/26/12 05/26/12 05/26/12 +% +7 7 9 +7 9 9 diff --git a/test/files/run/stringinterpolation_macro-run.scala b/test/files/run/stringinterpolation_macro-run.scala index 1138cd0860..ff779dd1d3 100644 --- a/test/files/run/stringinterpolation_macro-run.scala +++ b/test/files/run/stringinterpolation_macro-run.scala @@ -72,6 +72,14 @@ println(f"${120 : java.lang.Integer}%d") println(f"${120 : java.lang.Long}%d") println(f"${BigInt(120)}%d") println(f"${new java.math.BigInteger("120")}%d") +println(f"${4}%#10X") + +locally { + val fff = new java.util.Formattable { + def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("4") + } + println(f"She is ${fff}%#s feet tall.") +} { implicit val strToShort = (s: String) => java.lang.Short.parseShort(s) @@ -103,4 +111,11 @@ println(f"${c.getTime.getTime}%TD") implicit val strToDate = (x: String) => c println(f"""${"1234"}%TD""") + + +// literals and arg indexes +println(f"%%") +println(f"${7}%d %<d ${9}%d") +println(f"${7}%d %2$$d ${9}%d") + } diff --git a/test/files/run/t7240/Macros_1.scala b/test/files/run/t7240/Macros_1.scala index 019ddf7cd6..c6e976038d 100644 --- a/test/files/run/t7240/Macros_1.scala +++ b/test/files/run/t7240/Macros_1.scala @@ -41,7 +41,7 @@ object Bakery { def constructor = Apply(Select(New(Ident(newTypeName("eval"))), nme.CONSTRUCTOR), List()) c.eval(c.Expr[Any]( - c.resetAllAttrs(Block(composeDSL(Literal(Constant(1))), constructor)))) + c.untypecheck(Block(composeDSL(Literal(Constant(1))), constructor)))) c.Expr[Any](Literal(Constant(1))) } diff --git a/test/files/run/t8245.scala b/test/files/run/t8245.scala new file mode 100644 index 0000000000..d44defbb9e --- /dev/null +++ b/test/files/run/t8245.scala @@ -0,0 +1,14 @@ +object Test { + def foo(o: Option[Int]): Int = { + lazy val i: Int = { + def local: Int = {if ("".isEmpty) return 42; -42} + assert(local == 42) + o.getOrElse(return -1) + } + i + 1 + } + + def main(args: Array[String]) { + assert(foo(None) == -1) + } +} diff --git a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala index 798c7adf2e..dcd4f63a4d 100644 --- a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala @@ -8,7 +8,8 @@ object DefinitionConstructionProps with TypeDefConstruction with ValDefConstruction with DefConstruction - with PackageConstruction { + with PackageConstruction + with ImportConstruction { property("SI-6842") = test { val x: Tree = q"val x: Int" assertEqAst(q"def f($x) = 0", "def f(x: Int) = 0") @@ -363,3 +364,30 @@ trait DefConstruction { self: QuasiquoteProperties => assertEqAst(q"def foo(implicit ..$xs) = x1 + x2", "def foo(implicit x1: Int, x2: Long) = x1 + x2") } } + +trait ImportConstruction { self: QuasiquoteProperties => + property("construct wildcard import") = test { + val sel = pq"_" + assert(q"import foo.$sel" ≈ q"import foo._") + } + + property("construct named import") = test { + val sel = pq"bar" + assert(q"import foo.$sel" ≈ q"import foo.bar") + } + + property("construct renaming import") = test { + val sel = pq"bar -> baz" + assert(q"import foo.$sel" ≈ q"import foo.{bar => baz}") + } + + property("construct unimport import") = test { + val sels = pq"poison -> _" :: pq"_" :: Nil + assert(q"import foo.{..$sels}" ≈ q"import foo.{poison => _, _}") + } + + property("construct mixed import") = test { + val sels = pq"a -> b" :: pq"c -> _" :: pq"_" :: Nil + assert(q"import foo.{..$sels}" ≈ q"import foo.{a => b, c => _, _}") + } +} diff --git a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala index 88e00c734b..e2d1757d48 100644 --- a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala @@ -10,6 +10,7 @@ object DefinitionDeconstructionProps with ValVarDeconstruction with DefDeconstruction with PackageDeconstruction + with ImportDeconstruction trait TraitDeconstruction { self: QuasiquoteProperties => property("exhaustive trait matcher") = test { @@ -219,3 +220,55 @@ trait DefDeconstruction { self: QuasiquoteProperties => assert(impl.isEmpty) } } + +trait ImportDeconstruction { self: QuasiquoteProperties => + property("exhaustive import matcher") = test { + def matches(line: String) = { + val q"import $ref.{..$sels}" = parse(line) + } + matches("import foo.bar") + matches("import foo.{bar, baz}") + matches("import foo.{a => b, c => d}") + matches("import foo.{poision => _, _}") + matches("import foo.bar.baz._") + } + + property("extract import binding") = test { + val q"import $_.$sel" = q"import foo.bar" + val pq"bar" = sel + } + + property("extract import wildcard") = test { + val q"import $_.$sel" = q"import foo._" + val pq"_" = sel + } + + property("extract import rename") = test { + val q"import $_.$sel" = q"import foo.{bar => baz}" + val pq"bar -> baz" = sel + val pq"$left -> $right" = sel + val pq"bar" = left + val pq"baz" = right + } + + property("extract import unimport") = test { + val q"import $_.$sel" = q"import foo.{bar => _}" + val pq"bar -> _" = sel + val pq"$left -> $right" = sel + val pq"bar" = left + val pq"_" = right + } + + property("splice names into import selector") = forAll { + (expr: Tree, plain: TermName, oldname: TermName, newname: TermName, discard: TermName) => + + val Import(expr1, List( + ImportSelector(plain11, _, plain12, _), + ImportSelector(oldname1, _, newname1, _), + ImportSelector(discard1, _, wildcard, _))) = + q"import $expr.{$plain, $oldname => $newname, $discard => _}" + + expr1 ≈ expr && plain11 == plain12 && plain12 == plain && + oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == nme.WILDCARD + } +} diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala index 145e51ab68..058880a25c 100644 --- a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala @@ -85,19 +85,6 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") { q"$fun[..$types]" ≈ (if (types.nonEmpty) TypeApply(fun, types) else fun) } - property("splice names into import selector") = forAll { - (expr: Tree, plain: Name, oldname: Name, newname: Name, discard: Name) => - - val Import(expr1, List( - ImportSelector(plain11, _, plain12, _), - ImportSelector(oldname1, _, newname1, _), - ImportSelector(discard1, _, wildcard, _))) = - q"import $expr.{$plain, $oldname => $newname, $discard => _}" - - expr1 ≈ expr && plain11 == plain12 && plain12 == plain && - oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == nme.WILDCARD - } - property("splice trees into while loop") = forAll { (cond: Tree, body: Tree) => val LabelDef(_, List(), If(cond1, Block(List(body1), Apply(_, List())), Literal(Constant(())))) = q"while($cond) $body" body1 ≈ body && cond1 ≈ cond diff --git a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala index 0984032084..78b54a4e49 100644 --- a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala @@ -30,7 +30,7 @@ object TypeConstructionProps extends QuasiquoteProperties("type construction") } property("empty tq") = test { - val tt: TypeTree = tq" " + val tt: TypeTree = tq"" assert(tt.tpe == null) assert(tt.original == null) } diff --git a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala index 44f110a3d5..0fdcc19052 100644 --- a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala +++ b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala @@ -52,4 +52,10 @@ object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction assert(arglast ≈ tq"C") assert(restpe ≈ tq"D") } + + property("match empty type tree") = test { + val tq"" = TypeTree() + // matches because type tree isn't syntactic without original + val tq"" = tq"${typeOf[Int]}" + } } diff --git a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala index 2f501435e3..3afb47952c 100644 --- a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala +++ b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala @@ -75,4 +75,11 @@ object TypecheckedProps extends QuasiquoteProperties("typechecked") { assert(f.original ≈ pq"Test.this.Cell") assert(args ≈ List(pq"v")) } -}
\ No newline at end of file + + property("extract inferred val type") = test { + val typechecked = typecheck(q"val x = 42") + val q"val x = 42" = typechecked + val q"val x: ${tq""} = 42" = typechecked + val q"val x: ${t: Type} = 42" = typechecked + } +} diff --git a/test/junit/scala/collection/SetMapConsistencyTest.scala b/test/junit/scala/collection/SetMapConsistencyTest.scala index 7bb8ca958b..0d6f43db06 100644 --- a/test/junit/scala/collection/SetMapConsistencyTest.scala +++ b/test/junit/scala/collection/SetMapConsistencyTest.scala @@ -478,7 +478,7 @@ class SetMapConsistencyTest { } @Test - def si8213() { + def testSI8213() { val am = new scala.collection.mutable.AnyRefMap[String, Int] for (i <- 0 until 1024) am += i.toString -> i am.getOrElseUpdate("1024", { am.clear; -1 }) @@ -488,4 +488,23 @@ class SetMapConsistencyTest { lm.getOrElseUpdate(1024, { lm.clear; -1 }) assert(lm == scala.collection.mutable.LongMap(1024L -> -1)) } + + // Mutating when an iterator is in the wild shouldn't produce random junk in the iterator + // Todo: test all sets/maps this way + @Test + def testSI8154() { + def f() = { + val xs = scala.collection.mutable.AnyRefMap[String, Int]("a" -> 1) + val it = xs.iterator + it.hasNext + xs.clear() + + if (it.hasNext) Some(it.next) + else None + } + assert(f() match { + case Some((a,b)) if (a==null || b==null) => false + case _ => true + }) + } } diff --git a/test/pending/run/idempotency-partial-functions.scala b/test/pending/run/idempotency-partial-functions.scala index e673da5a29..b26c442599 100644 --- a/test/pending/run/idempotency-partial-functions.scala +++ b/test/pending/run/idempotency-partial-functions.scala @@ -22,7 +22,7 @@ object Test extends App { val tb = cm.mkToolBox() val tpartials = tb.typecheck(partials.tree) println(tpartials) - val rtpartials = tb.resetAllAttrs(tpartials) + val rtpartials = tb.untypecheck(tpartials) println(tb.eval(rtpartials)) } Test.main(null)
\ No newline at end of file |