aboutsummaryrefslogtreecommitdiff
path: root/compiler/src/dotty/tools/dotc/core
diff options
context:
space:
mode:
authorFelix Mulder <felix.mulder@gmail.com>2016-11-02 11:08:28 +0100
committerGuillaume Martres <smarter@ubuntu.com>2016-11-22 01:35:07 +0100
commit8a61ff432543a29234193cd1f7c14abd3f3d31a0 (patch)
treea8147561d307af862c295cfc8100d271063bb0dd /compiler/src/dotty/tools/dotc/core
parent6a455fe6da5ff9c741d91279a2dc6fe2fb1b472f (diff)
downloaddotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.gz
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.tar.bz2
dotty-8a61ff432543a29234193cd1f7c14abd3f3d31a0.zip
Move compiler and compiler tests to compiler dir
Diffstat (limited to 'compiler/src/dotty/tools/dotc/core')
-rw-r--r--compiler/src/dotty/tools/dotc/core/Annotations.scala162
-rw-r--r--compiler/src/dotty/tools/dotc/core/CheckRealizable.scala132
-rw-r--r--compiler/src/dotty/tools/dotc/core/Comments.scala459
-rw-r--r--compiler/src/dotty/tools/dotc/core/Constants.scala235
-rw-r--r--compiler/src/dotty/tools/dotc/core/Constraint.scala154
-rw-r--r--compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala458
-rw-r--r--compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala17
-rw-r--r--compiler/src/dotty/tools/dotc/core/Contexts.scala709
-rw-r--r--compiler/src/dotty/tools/dotc/core/Decorators.scala185
-rw-r--r--compiler/src/dotty/tools/dotc/core/Definitions.scala807
-rw-r--r--compiler/src/dotty/tools/dotc/core/DenotTransformers.scala78
-rw-r--r--compiler/src/dotty/tools/dotc/core/Denotations.scala1217
-rw-r--r--compiler/src/dotty/tools/dotc/core/Flags.scala640
-rw-r--r--compiler/src/dotty/tools/dotc/core/Hashable.scala103
-rw-r--r--compiler/src/dotty/tools/dotc/core/Mode.scala89
-rw-r--r--compiler/src/dotty/tools/dotc/core/NameOps.scala432
-rw-r--r--compiler/src/dotty/tools/dotc/core/Names.scala372
-rw-r--r--compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala636
-rw-r--r--compiler/src/dotty/tools/dotc/core/Periods.scala159
-rw-r--r--compiler/src/dotty/tools/dotc/core/Phases.scala377
-rw-r--r--compiler/src/dotty/tools/dotc/core/Scopes.scala437
-rw-r--r--compiler/src/dotty/tools/dotc/core/Signature.scala103
-rw-r--r--compiler/src/dotty/tools/dotc/core/StdNames.scala844
-rw-r--r--compiler/src/dotty/tools/dotc/core/Substituters.scala306
-rw-r--r--compiler/src/dotty/tools/dotc/core/SymDenotations.scala2004
-rw-r--r--compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala267
-rw-r--r--compiler/src/dotty/tools/dotc/core/Symbols.scala602
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeApplications.scala688
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeComparer.scala1502
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeErasure.scala514
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeOps.scala554
-rw-r--r--compiler/src/dotty/tools/dotc/core/TypeParamInfo.scala40
-rw-r--r--compiler/src/dotty/tools/dotc/core/TyperState.scala210
-rw-r--r--compiler/src/dotty/tools/dotc/core/Types.overflow66
-rw-r--r--compiler/src/dotty/tools/dotc/core/Types.scala3865
-rw-r--r--compiler/src/dotty/tools/dotc/core/Uniques.scala128
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala88
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala221
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala378
-rw-r--r--compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala1100
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala53
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala101
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala79
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala39
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala188
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala553
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala30
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala71
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala122
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala141
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala95
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala188
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala641
-rw-r--r--compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala1161
-rw-r--r--compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala299
-rw-r--r--compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala1260
56 files changed, 26359 insertions, 0 deletions
diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala
new file mode 100644
index 000000000..0e8e5a1f0
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala
@@ -0,0 +1,162 @@
+package dotty.tools.dotc
+package core
+
+import Symbols._, Types._, util.Positions._, Contexts._, Constants._, ast.tpd._
+import config.ScalaVersion
+import StdNames._
+import dotty.tools.dotc.ast.{tpd, untpd}
+
+object Annotations {
+
+ abstract class Annotation {
+ def tree(implicit ctx: Context): Tree
+ def symbol(implicit ctx: Context): Symbol =
+ if (tree.symbol.isConstructor) tree.symbol.owner
+ else tree.tpe.typeSymbol
+ def matches(cls: Symbol)(implicit ctx: Context): Boolean = symbol.derivesFrom(cls)
+ def appliesToModule: Boolean = true // for now; see remark in SymDenotations
+
+ def derivedAnnotation(tree: Tree)(implicit ctx: Context) =
+ if (tree eq this.tree) this else Annotation(tree)
+
+ def arguments(implicit ctx: Context) = ast.tpd.arguments(tree)
+ def argument(i: Int)(implicit ctx: Context): Option[Tree] = {
+ val args = arguments
+ if (i < args.length) Some(args(i)) else None
+ }
+ def argumentConstant(i: Int)(implicit ctx: Context): Option[Constant] =
+ for (ConstantType(c) <- argument(i) map (_.tpe)) yield c
+
+ def ensureCompleted(implicit ctx: Context): Unit = tree
+ }
+
+ case class ConcreteAnnotation(t: Tree) extends Annotation {
+ def tree(implicit ctx: Context): Tree = t
+ }
+
+ abstract case class LazyAnnotation(sym: Symbol) extends Annotation {
+ private var myTree: Tree = null
+ def tree(implicit ctx: Context) = {
+ if (myTree == null) myTree = complete(ctx)
+ myTree
+ }
+ def complete(implicit ctx: Context): Tree
+ override def symbol(implicit ctx: Context): Symbol = sym
+ }
+
+ /** An annotation indicating the body of a right-hand side,
+ * typically of an inline method. Treated specially in
+ * pickling/unpickling and TypeTreeMaps
+ */
+ abstract class BodyAnnotation extends Annotation {
+ override def symbol(implicit ctx: Context) = defn.BodyAnnot
+ override def derivedAnnotation(tree: Tree)(implicit ctx: Context) =
+ if (tree eq this.tree) this else ConcreteBodyAnnotation(tree)
+ override def arguments(implicit ctx: Context) = Nil
+ override def ensureCompleted(implicit ctx: Context) = ()
+ }
+
+ case class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation {
+ def tree(implicit ctx: Context) = body
+ }
+
+ case class LazyBodyAnnotation(bodyExpr: Context => Tree) extends BodyAnnotation {
+ private var evaluated = false
+ private var myBody: Tree = _
+ def tree(implicit ctx: Context) = {
+ if (evaluated) assert(myBody != null)
+ else {
+ evaluated = true
+ myBody = bodyExpr(ctx)
+ }
+ myBody
+ }
+ def isEvaluated = evaluated
+ }
+
+ object Annotation {
+
+ def apply(tree: Tree) = ConcreteAnnotation(tree)
+
+ def apply(cls: ClassSymbol)(implicit ctx: Context): Annotation =
+ apply(cls, Nil)
+
+ def apply(cls: ClassSymbol, arg: Tree)(implicit ctx: Context): Annotation =
+ apply(cls, arg :: Nil)
+
+ def apply(cls: ClassSymbol, arg1: Tree, arg2: Tree)(implicit ctx: Context): Annotation =
+ apply(cls, arg1 :: arg2 :: Nil)
+
+ def apply(cls: ClassSymbol, args: List[Tree])(implicit ctx: Context): Annotation =
+ apply(cls.typeRef, args)
+
+ def apply(atp: Type, arg: Tree)(implicit ctx: Context): Annotation =
+ apply(atp, arg :: Nil)
+
+ def apply(atp: Type, arg1: Tree, arg2: Tree)(implicit ctx: Context): Annotation =
+ apply(atp, arg1 :: arg2 :: Nil)
+
+ def apply(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation =
+ apply(New(atp, args))
+
+ private def resolveConstructor(atp: Type, args:List[Tree])(implicit ctx: Context): Tree = {
+ val targs = atp.argTypes
+ tpd.applyOverloaded(New(atp withoutArgs targs), nme.CONSTRUCTOR, args, targs, atp, isAnnotConstructor = true)
+ }
+
+ def applyResolve(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation = {
+ apply(resolveConstructor(atp, args))
+ }
+
+ def deferred(sym: Symbol, treeFn: Context => Tree)(implicit ctx: Context): Annotation =
+ new LazyAnnotation(sym) {
+ def complete(implicit ctx: Context) = treeFn(ctx)
+ }
+
+ def deferred(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation =
+ deferred(atp.classSymbol, implicit ctx => New(atp, args))
+
+ def deferredResolve(atp: Type, args: List[Tree])(implicit ctx: Context): Annotation =
+ deferred(atp.classSymbol, implicit ctx => resolveConstructor(atp, args))
+
+ def makeAlias(sym: TermSymbol)(implicit ctx: Context) =
+ apply(defn.AliasAnnot, List(
+ ref(TermRef.withSigAndDenot(sym.owner.thisType, sym.name, sym.signature, sym))))
+
+ def makeChild(sym: Symbol)(implicit ctx: Context) =
+ deferred(defn.ChildAnnot,
+ implicit ctx => New(defn.ChildAnnotType.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil))
+
+ def makeSourceFile(path: String)(implicit ctx: Context) =
+ apply(defn.SourceFileAnnot, Literal(Constant(path)))
+ }
+
+ def ThrowsAnnotation(cls: ClassSymbol)(implicit ctx: Context) = {
+ val tref = cls.typeRef
+ Annotation(defn.ThrowsAnnotType.appliedTo(tref), Ident(tref))
+ }
+
+ /** A decorator that provides queries for specific annotations
+ * of a symbol.
+ */
+ implicit class AnnotInfo(val sym: Symbol) extends AnyVal {
+
+ def isDeprecated(implicit ctx: Context) =
+ sym.hasAnnotation(defn.DeprecatedAnnot)
+
+ def deprecationMessage(implicit ctx: Context) =
+ for (annot <- sym.getAnnotation(defn.DeprecatedAnnot);
+ arg <- annot.argumentConstant(0))
+ yield arg.stringValue
+
+ def migrationVersion(implicit ctx: Context) =
+ for (annot <- sym.getAnnotation(defn.MigrationAnnot);
+ arg <- annot.argumentConstant(1))
+ yield ScalaVersion.parse(arg.stringValue)
+
+ def migrationMessage(implicit ctx: Context) =
+ for (annot <- sym.getAnnotation(defn.MigrationAnnot);
+ arg <- annot.argumentConstant(0))
+ yield ScalaVersion.parse(arg.stringValue)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala
new file mode 100644
index 000000000..78ec685fc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala
@@ -0,0 +1,132 @@
+package dotty.tools
+package dotc
+package core
+
+import Contexts._, Types._, Symbols._, Names._, Flags._, Scopes._
+import SymDenotations._, Denotations.SingleDenotation
+import util.Positions._
+import Decorators._
+import StdNames._
+import Annotations._
+import collection.mutable
+import ast.tpd._
+
+/** Realizability status */
+object CheckRealizable {
+
+ abstract class Realizability(val msg: String) {
+ def andAlso(other: => Realizability) =
+ if (this == Realizable) other else this
+ def mapError(f: Realizability => Realizability) =
+ if (this == Realizable) this else f(this)
+ }
+
+ object Realizable extends Realizability("")
+
+ object NotConcrete extends Realizability(" is not a concrete type")
+
+ object NotStable extends Realizability(" is not a stable reference")
+
+ class NotFinal(sym: Symbol)(implicit ctx: Context)
+ extends Realizability(i" refers to nonfinal $sym")
+
+ class HasProblemBounds(typ: SingleDenotation)(implicit ctx: Context)
+ extends Realizability(i" has a member $typ with possibly conflicting bounds ${typ.info.bounds.lo} <: ... <: ${typ.info.bounds.hi}")
+
+ class HasProblemField(fld: SingleDenotation, problem: Realizability)(implicit ctx: Context)
+ extends Realizability(i" has a member $fld which is not a legal path\n since ${fld.symbol.name}: ${fld.info}${problem.msg}")
+
+ class ProblemInUnderlying(tp: Type, problem: Realizability)(implicit ctx: Context)
+ extends Realizability(i"s underlying type ${tp}${problem.msg}") {
+ assert(problem != Realizable)
+ }
+
+ def realizability(tp: Type)(implicit ctx: Context) =
+ new CheckRealizable().realizability(tp)
+
+ def boundsRealizability(tp: Type)(implicit ctx: Context) =
+ new CheckRealizable().boundsRealizability(tp)
+}
+
+/** Compute realizability status */
+class CheckRealizable(implicit ctx: Context) {
+ import CheckRealizable._
+
+ /** A set of all fields that have already been checked. Used
+ * to avoid infinite recursions when analyzing recursive types.
+ */
+ private val checkedFields: mutable.Set[Symbol] = mutable.LinkedHashSet[Symbol]()
+
+ /** Is symbol's definitition a lazy val?
+ * (note we exclude modules here, because their realizability is ensured separately)
+ */
+ private def isLateInitialized(sym: Symbol) = sym.is(Lazy, butNot = Module)
+
+ /** The realizability status of given type `tp`*/
+ def realizability(tp: Type): Realizability = tp.dealias match {
+ case tp: TermRef =>
+ val sym = tp.symbol
+ if (sym.is(Stable)) realizability(tp.prefix)
+ else {
+ val r =
+ if (!sym.isStable) NotStable
+ else if (!isLateInitialized(sym)) realizability(tp.prefix)
+ else if (!sym.isEffectivelyFinal) new NotFinal(sym)
+ else realizability(tp.info).mapError(r => new ProblemInUnderlying(tp.info, r))
+ if (r == Realizable) sym.setFlag(Stable)
+ r
+ }
+ case _: SingletonType | NoPrefix =>
+ Realizable
+ case tp =>
+ def isConcrete(tp: Type): Boolean = tp.dealias match {
+ case tp: TypeRef => tp.symbol.isClass
+ case tp: TypeProxy => isConcrete(tp.underlying)
+ case tp: AndOrType => isConcrete(tp.tp1) && isConcrete(tp.tp2)
+ case _ => false
+ }
+ if (!isConcrete(tp)) NotConcrete
+ else boundsRealizability(tp).andAlso(memberRealizability(tp))
+ }
+
+ /** `Realizable` if `tp` has good bounds, a `HasProblemBounds` instance
+ * pointing to a bad bounds member otherwise.
+ */
+ private def boundsRealizability(tp: Type) = {
+ def hasBadBounds(mbr: SingleDenotation) = {
+ val bounds = mbr.info.bounds
+ !(bounds.lo <:< bounds.hi)
+ }
+ tp.nonClassTypeMembers.find(hasBadBounds) match {
+ case Some(mbr) => new HasProblemBounds(mbr)
+ case _ => Realizable
+ }
+ }
+
+ /** `Realizable` if all of `tp`'s non-struct fields have realizable types,
+ * a `HasProblemField` instance pointing to a bad field otherwise.
+ */
+ private def memberRealizability(tp: Type) = {
+ def checkField(sofar: Realizability, fld: SingleDenotation): Realizability =
+ sofar andAlso {
+ if (checkedFields.contains(fld.symbol) || fld.symbol.is(Private | Mutable | Lazy))
+ // if field is private it cannot be part of a visible path
+ // if field is mutable it cannot be part of a path
+ // if field is lazy it does not need to be initialized when the owning object is
+ // so in all cases the field does not influence realizability of the enclosing object.
+ Realizable
+ else {
+ checkedFields += fld.symbol
+ realizability(fld.info).mapError(r => new HasProblemField(fld, r))
+ }
+ }
+ if (ctx.settings.strict.value)
+ // check fields only under strict mode for now.
+ // Reason: An embedded field could well be nullable, which means it
+ // should not be part of a path and need not be checked; but we cannot recognize
+ // this situation until we have a typesystem that tracks nullability.
+ ((Realizable: Realizability) /: tp.fields)(checkField)
+ else
+ Realizable
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala
new file mode 100644
index 000000000..1e623db4d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Comments.scala
@@ -0,0 +1,459 @@
+package dotty.tools
+package dotc
+package core
+
+import ast.{ untpd, tpd }
+import Decorators._, Symbols._, Contexts._, Flags.EmptyFlags
+import util.SourceFile
+import util.Positions._
+import util.CommentParsing._
+import util.Property.Key
+import parsing.Parsers.Parser
+import reporting.diagnostic.messages.ProperDefinitionNotFound
+
+object Comments {
+ val ContextDoc = new Key[ContextDocstrings]
+
+ /** Decorator for getting docbase out of context */
+ implicit class CommentsContext(val ctx: Context) extends AnyVal {
+ def docCtx: Option[ContextDocstrings] = ctx.property(ContextDoc)
+ }
+
+ /** Context for Docstrings, contains basic functionality for getting
+ * docstrings via `Symbol` and expanding templates
+ */
+ class ContextDocstrings {
+ import scala.collection.mutable
+
+ private[this] val _docstrings: mutable.Map[Symbol, Comment] =
+ mutable.Map.empty
+
+ val templateExpander = new CommentExpander
+
+ def docstrings: Map[Symbol, Comment] = _docstrings.toMap
+
+ def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym)
+
+ def addDocstring(sym: Symbol, doc: Option[Comment]): Unit =
+ doc.map(d => _docstrings += (sym -> d))
+ }
+
+ /** A `Comment` contains the unformatted docstring as well as a position
+ *
+ * The `Comment` contains functionality to create versions of itself without
+ * `@usecase` sections as well as functionality to map the `raw` docstring
+ */
+ abstract case class Comment(pos: Position, raw: String) { self =>
+ def isExpanded: Boolean
+
+ def usecases: List[UseCase]
+
+ val isDocComment = raw.startsWith("/**")
+
+ def expand(f: String => String): Comment = new Comment(pos, f(raw)) {
+ val isExpanded = true
+ val usecases = self.usecases
+ }
+
+ def withUsecases(implicit ctx: Context): Comment = new Comment(pos, stripUsecases) {
+ val isExpanded = self.isExpanded
+ val usecases = parseUsecases
+ }
+
+ private[this] lazy val stripUsecases: String =
+ removeSections(raw, "@usecase", "@define")
+
+ private[this] def parseUsecases(implicit ctx: Context): List[UseCase] =
+ if (!raw.startsWith("/**"))
+ List.empty[UseCase]
+ else
+ tagIndex(raw)
+ .filter { startsWithTag(raw, _, "@usecase") }
+ .map { case (start, end) => decomposeUseCase(start, end) }
+
+ /** Turns a usecase section into a UseCase, with code changed to:
+ * {{{
+ * // From:
+ * def foo: A
+ * // To:
+ * def foo: A = ???
+ * }}}
+ */
+ private[this] def decomposeUseCase(start: Int, end: Int)(implicit ctx: Context): UseCase = {
+ def subPos(start: Int, end: Int) =
+ if (pos == NoPosition) NoPosition
+ else {
+ val start1 = pos.start + start
+ val end1 = pos.end + end
+ pos withStart start1 withPoint start1 withEnd end1
+ }
+
+ val codeStart = skipWhitespace(raw, start + "@usecase".length)
+ val codeEnd = skipToEol(raw, codeStart)
+ val code = raw.substring(codeStart, codeEnd) + " = ???"
+ val codePos = subPos(codeStart, codeEnd)
+ val commentStart = skipLineLead(raw, codeEnd + 1) min end
+ val commentStr = "/** " + raw.substring(commentStart, end) + "*/"
+ val commentPos = subPos(commentStart, end)
+
+ UseCase(Comment(commentPos, commentStr), code, codePos)
+ }
+ }
+
+ object Comment {
+ def apply(pos: Position, raw: String, expanded: Boolean = false, usc: List[UseCase] = Nil)(implicit ctx: Context): Comment =
+ new Comment(pos, raw) {
+ val isExpanded = expanded
+ val usecases = usc
+ }
+ }
+
+ abstract case class UseCase(comment: Comment, code: String, codePos: Position) {
+ /** Set by typer */
+ var tpdCode: tpd.DefDef = _
+
+ def untpdCode: untpd.Tree
+ }
+
+ object UseCase {
+ def apply(comment: Comment, code: String, codePos: Position)(implicit ctx: Context) =
+ new UseCase(comment, code, codePos) {
+ val untpdCode = {
+ val tree = new Parser(new SourceFile("<usecase>", code)).localDef(codePos.start, EmptyFlags)
+
+ tree match {
+ case tree: untpd.DefDef =>
+ val newName = (tree.name.show + "$" + codePos + "$doc").toTermName
+ untpd.DefDef(newName, tree.tparams, tree.vparamss, tree.tpt, tree.rhs)
+ case _ =>
+ ctx.error(ProperDefinitionNotFound(), codePos)
+ tree
+ }
+ }
+ }
+ }
+
+ /**
+ * Port of DocComment.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+ class CommentExpander {
+ import dotc.config.Printers.dottydoc
+ import scala.collection.mutable
+
+ def expand(sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val parent = if (site != NoSymbol) site else sym
+ defineVariables(parent)
+ expandedDocComment(sym, parent)
+ }
+
+ /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
+ *
+ * @param sym The symbol for which doc comment is returned
+ * @param site The class for which doc comments are generated
+ * @throws ExpansionLimitExceeded when more than 10 successive expansions
+ * of the same string are done, which is
+ * interpreted as a recursive variable definition.
+ */
+ def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(implicit ctx: Context): String = {
+ // when parsing a top level class or module, use the (module-)class itself to look up variable definitions
+ val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym
+ else site
+ expandVariables(cookedDocComment(sym, docStr), sym, parent)
+ }
+
+ private def template(raw: String): String =
+ removeSections(raw, "@define")
+
+ private def defines(raw: String): List[String] = {
+ val sections = tagIndex(raw)
+ val defines = sections filter { startsWithTag(raw, _, "@define") }
+ val usecases = sections filter { startsWithTag(raw, _, "@usecase") }
+ val end = startTag(raw, (defines ::: usecases).sortBy(_._1))
+
+ defines map { case (start, end) => raw.substring(start, end) }
+ }
+
+ private def replaceInheritDocToInheritdoc(docStr: String): String =
+ docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc")
+
+ /** The cooked doc comment of an overridden symbol */
+ protected def superComment(sym: Symbol)(implicit ctx: Context): Option[String] =
+ allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "")
+
+ private val cookedDocComments = mutable.HashMap[Symbol, String]()
+
+ /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
+ * missing sections of an inherited doc comment.
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the doc comment of the overridden version is copied instead.
+ */
+ def cookedDocComment(sym: Symbol, docStr: String = "")(implicit ctx: Context): String = cookedDocComments.getOrElseUpdate(sym, {
+ var ownComment =
+ if (docStr.length == 0) ctx.docCtx.flatMap(_.docstring(sym).map(c => template(c.raw))).getOrElse("")
+ else template(docStr)
+ ownComment = replaceInheritDocToInheritdoc(ownComment)
+
+ superComment(sym) match {
+ case None =>
+ // SI-8210 - The warning would be false negative when this symbol is a setter
+ if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter)
+ dottydoc.println(s"${sym.pos}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.")
+ ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>")
+ case Some(sc) =>
+ if (ownComment == "") sc
+ else expandInheritdoc(sc, merge(sc, ownComment, sym), sym)
+ }
+ })
+
+ private def isMovable(str: String, sec: (Int, Int)): Boolean =
+ startsWithTag(str, sec, "@param") ||
+ startsWithTag(str, sec, "@tparam") ||
+ startsWithTag(str, sec, "@return")
+
+ def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = {
+ val srcSections = tagIndex(src)
+ val dstSections = tagIndex(dst)
+ val srcParams = paramDocs(src, "@param", srcSections)
+ val dstParams = paramDocs(dst, "@param", dstSections)
+ val srcTParams = paramDocs(src, "@tparam", srcSections)
+ val dstTParams = paramDocs(dst, "@tparam", dstSections)
+ val out = new StringBuilder
+ var copied = 0
+ var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
+
+ if (copyFirstPara) {
+ val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment
+ (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections)
+ out append src.substring(0, eop).trim
+ copied = 3
+ tocopy = 3
+ }
+
+ def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match {
+ case Some((start, end)) =>
+ if (end > tocopy) tocopy = end
+ case None =>
+ srcSec match {
+ case Some((start1, end1)) => {
+ out append dst.substring(copied, tocopy).trim
+ out append "\n"
+ copied = tocopy
+ out append src.substring(start1, end1).trim
+ }
+ case None =>
+ }
+ }
+
+ //TODO: enable this once you know how to get `sym.paramss`
+ /*
+ for (params <- sym.paramss; param <- params)
+ mergeSection(srcParams get param.name.toString, dstParams get param.name.toString)
+ for (tparam <- sym.typeParams)
+ mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString)
+
+ mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections))
+ mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections))
+ */
+
+ if (out.length == 0) dst
+ else {
+ out append dst.substring(copied)
+ out.toString
+ }
+ }
+
+ /**
+ * Expand inheritdoc tags
+ * - for the main comment we transform the inheritdoc into the super variable,
+ * and the variable expansion can expand it further
+ * - for the param, tparam and throws sections we must replace comments on the spot
+ *
+ * This is done separately, for two reasons:
+ * 1. It takes longer to run compared to merge
+ * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
+ * impacts performance
+ *
+ * @param parent The source (or parent) comment
+ * @param child The child (overriding member or usecase) comment
+ * @param sym The child symbol
+ * @return The child comment with the inheritdoc sections expanded
+ */
+ def expandInheritdoc(parent: String, child: String, sym: Symbol): String =
+ if (child.indexOf("@inheritdoc") == -1)
+ child
+ else {
+ val parentSections = tagIndex(parent)
+ val childSections = tagIndex(child)
+ val parentTagMap = sectionTagMap(parent, parentSections)
+ val parentNamedParams = Map() +
+ ("@param" -> paramDocs(parent, "@param", parentSections)) +
+ ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) +
+ ("@throws" -> paramDocs(parent, "@throws", parentSections))
+
+ val out = new StringBuilder
+
+ def replaceInheritdoc(childSection: String, parentSection: => String) =
+ if (childSection.indexOf("@inheritdoc") == -1)
+ childSection
+ else
+ childSection.replaceAllLiterally("@inheritdoc", parentSection)
+
+ def getParentSection(section: (Int, Int)): String = {
+
+ def getSectionHeader = extractSectionTag(child, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section)
+ case other => other
+ }
+
+ def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String =
+ paramMap.get(param) match {
+ case Some(section) =>
+ // Cleanup the section tag and parameter
+ val sectionTextBounds = extractSectionText(parent, section)
+ cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
+ case None =>
+ dottydoc.println(s"""${sym.pos}: the """" + getSectionHeader + "\" annotation of the " + sym +
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.")
+ "<invalid inheritdoc annotation>"
+ }
+
+ child.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") =>
+ sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
+ case _ =>
+ sectionString(extractSectionTag(child, section), parentTagMap)
+ }
+ }
+
+ def mainComment(str: String, sections: List[(Int, Int)]): String =
+ if (str.trim.length > 3)
+ str.trim.substring(3, startTag(str, sections))
+ else
+ ""
+
+ // Append main comment
+ out.append("/**")
+ out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections)))
+
+ // Append sections
+ for (section <- childSections)
+ out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section)))
+
+ out.append("*/")
+ out.toString
+ }
+
+ protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val expandLimit = 10
+
+ def expandInternal(str: String, depth: Int): String = {
+ if (depth >= expandLimit)
+ throw new ExpansionLimitExceeded(str)
+
+ val out = new StringBuilder
+ var copied, idx = 0
+ // excluding variables written as \$foo so we can use them when
+ // necessary to document things like Symbol#decode
+ def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\'
+ while (idx < str.length) {
+ if ((str charAt idx) != '$' || isEscaped)
+ idx += 1
+ else {
+ val vstart = idx
+ idx = skipVariable(str, idx + 1)
+ def replaceWith(repl: String) = {
+ out append str.substring(copied, vstart)
+ out append repl
+ copied = idx
+ }
+ variableName(str.substring(vstart + 1, idx)) match {
+ case "super" =>
+ superComment(sym) foreach { sc =>
+ val superSections = tagIndex(sc)
+ replaceWith(sc.substring(3, startTag(sc, superSections)))
+ for (sec @ (start, end) <- superSections)
+ if (!isMovable(sc, sec)) out append sc.substring(start, end)
+ }
+ case "" => idx += 1
+ case vname =>
+ lookupVariable(vname, site) match {
+ case Some(replacement) => replaceWith(replacement)
+ case None =>
+ dottydoc.println(s"Variable $vname undefined in comment for $sym in $site")
+ }
+ }
+ }
+ }
+ if (out.length == 0) str
+ else {
+ out append str.substring(copied)
+ expandInternal(out.toString, depth + 1)
+ }
+ }
+
+ // We suppressed expanding \$ throughout the recursion, and now we
+ // need to replace \$ with $ so it looks as intended.
+ expandInternal(initialStr, 0).replaceAllLiterally("""\$""", "$")
+ }
+
+ def defineVariables(sym: Symbol)(implicit ctx: Context) = {
+ val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r
+
+ val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("")
+ defs(sym) ++= defines(raw).map {
+ str => {
+ val start = skipWhitespace(str, "@define".length)
+ val (key, value) = str.splitAt(skipVariable(str, start))
+ key.drop(start) -> value
+ }
+ } map {
+ case (key, Trim(value)) =>
+ variableName(key) -> value.replaceAll("\\s+\\*+$", "")
+ }
+ }
+
+ /** Maps symbols to the variable -> replacement maps that are defined
+ * in their doc comments
+ */
+ private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map()
+
+ /** Lookup definition of variable.
+ *
+ * @param vble The variable for which a definition is searched
+ * @param site The class for which doc comments are generated
+ */
+ def lookupVariable(vble: String, site: Symbol)(implicit ctx: Context): Option[String] = site match {
+ case NoSymbol => None
+ case _ =>
+ val searchList =
+ if (site.flags.is(Flags.Module)) site :: site.info.baseClasses
+ else site.info.baseClasses
+
+ searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match {
+ case Some(str) if str startsWith "$" => lookupVariable(str.tail, site)
+ case res => res orElse lookupVariable(vble, site.owner)
+ }
+ }
+
+ /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the position of the doc comment of the overridden version is returned instead.
+ */
+ def docCommentPos(sym: Symbol)(implicit ctx: Context): Position =
+ ctx.docCtx.flatMap(_.docstring(sym).map(_.pos)).getOrElse(NoPosition)
+
+ /** A version which doesn't consider self types, as a temporary measure:
+ * an infinite loop has broken out between superComment and cookedDocComment
+ * since r23926.
+ */
+ private def allInheritedOverriddenSymbols(sym: Symbol)(implicit ctx: Context): List[Symbol] = {
+ if (!sym.owner.isClass) Nil
+ else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..`
+ //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
+ }
+
+ class ExpansionLimitExceeded(str: String) extends Exception
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Constants.scala b/compiler/src/dotty/tools/dotc/core/Constants.scala
new file mode 100644
index 000000000..1892e4bdc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Constants.scala
@@ -0,0 +1,235 @@
+package dotty.tools.dotc
+package core
+
+import Types._, Symbols._, Contexts._
+import printing.Printer
+
+object Constants {
+
+ final val NoTag = 0
+ final val UnitTag = 1
+ final val BooleanTag = 2
+ final val ByteTag = 3
+ final val ShortTag = 4
+ final val CharTag = 5
+ final val IntTag = 6
+ final val LongTag = 7
+ final val FloatTag = 8
+ final val DoubleTag = 9
+ final val StringTag = 10
+ final val NullTag = 11
+ final val ClazzTag = 12
+ // For supporting java enumerations inside java annotations (see ClassfileParser)
+ final val EnumTag = 13
+
+ case class Constant(value: Any) extends printing.Showable {
+ import java.lang.Double.doubleToRawLongBits
+ import java.lang.Float.floatToRawIntBits
+
+ val tag: Int = value match {
+ case null => NullTag
+ case x: Unit => UnitTag
+ case x: Boolean => BooleanTag
+ case x: Byte => ByteTag
+ case x: Short => ShortTag
+ case x: Int => IntTag
+ case x: Long => LongTag
+ case x: Float => FloatTag
+ case x: Double => DoubleTag
+ case x: String => StringTag
+ case x: Char => CharTag
+ case x: Type => ClazzTag
+ case x: Symbol => EnumTag
+ case _ => throw new Error("bad constant value: " + value + " of class " + value.getClass)
+ }
+
+ def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue
+ def isShortRange: Boolean = isIntRange && Short.MinValue <= intValue && intValue <= Short.MaxValue
+ def isCharRange: Boolean = isIntRange && Char.MinValue <= intValue && intValue <= Char.MaxValue
+ def isIntRange: Boolean = ByteTag <= tag && tag <= IntTag
+ def isLongRange: Boolean = ByteTag <= tag && tag <= LongTag
+ def isFloatRange: Boolean = ByteTag <= tag && tag <= FloatTag
+ def isNumeric: Boolean = ByteTag <= tag && tag <= DoubleTag
+ def isNonUnitAnyVal = BooleanTag <= tag && tag <= DoubleTag
+ def isAnyVal = UnitTag <= tag && tag <= DoubleTag
+
+ def tpe(implicit ctx: Context): Type = tag match {
+ case UnitTag => defn.UnitType
+ case BooleanTag => defn.BooleanType
+ case ByteTag => defn.ByteType
+ case ShortTag => defn.ShortType
+ case CharTag => defn.CharType
+ case IntTag => defn.IntType
+ case LongTag => defn.LongType
+ case FloatTag => defn.FloatType
+ case DoubleTag => defn.DoubleType
+ case StringTag => defn.StringType
+ case NullTag => defn.NullType
+ case ClazzTag => defn.ClassType(typeValue)
+ case EnumTag => defn.EnumType(symbolValue)
+ }
+
+ /** We need the equals method to take account of tags as well as values.
+ */
+ override def equals(other: Any): Boolean = other match {
+ case that: Constant =>
+ this.tag == that.tag && equalHashValue == that.equalHashValue
+ case _ => false
+ }
+
+ def isNaN = value match {
+ case f: Float => f.isNaN
+ case d: Double => d.isNaN
+ case _ => false
+ }
+
+ def booleanValue: Boolean =
+ if (tag == BooleanTag) value.asInstanceOf[Boolean]
+ else throw new Error("value " + value + " is not a boolean")
+
+ def byteValue: Byte = tag match {
+ case ByteTag => value.asInstanceOf[Byte]
+ case ShortTag => value.asInstanceOf[Short].toByte
+ case CharTag => value.asInstanceOf[Char].toByte
+ case IntTag => value.asInstanceOf[Int].toByte
+ case LongTag => value.asInstanceOf[Long].toByte
+ case FloatTag => value.asInstanceOf[Float].toByte
+ case DoubleTag => value.asInstanceOf[Double].toByte
+ case _ => throw new Error("value " + value + " is not a Byte")
+ }
+
+ def shortValue: Short = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toShort
+ case ShortTag => value.asInstanceOf[Short]
+ case CharTag => value.asInstanceOf[Char].toShort
+ case IntTag => value.asInstanceOf[Int].toShort
+ case LongTag => value.asInstanceOf[Long].toShort
+ case FloatTag => value.asInstanceOf[Float].toShort
+ case DoubleTag => value.asInstanceOf[Double].toShort
+ case _ => throw new Error("value " + value + " is not a Short")
+ }
+
+ def charValue: Char = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toChar
+ case ShortTag => value.asInstanceOf[Short].toChar
+ case CharTag => value.asInstanceOf[Char]
+ case IntTag => value.asInstanceOf[Int].toChar
+ case LongTag => value.asInstanceOf[Long].toChar
+ case FloatTag => value.asInstanceOf[Float].toChar
+ case DoubleTag => value.asInstanceOf[Double].toChar
+ case _ => throw new Error("value " + value + " is not a Char")
+ }
+
+ def intValue: Int = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toInt
+ case ShortTag => value.asInstanceOf[Short].toInt
+ case CharTag => value.asInstanceOf[Char].toInt
+ case IntTag => value.asInstanceOf[Int]
+ case LongTag => value.asInstanceOf[Long].toInt
+ case FloatTag => value.asInstanceOf[Float].toInt
+ case DoubleTag => value.asInstanceOf[Double].toInt
+ case _ => throw new Error("value " + value + " is not an Int")
+ }
+
+ def longValue: Long = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toLong
+ case ShortTag => value.asInstanceOf[Short].toLong
+ case CharTag => value.asInstanceOf[Char].toLong
+ case IntTag => value.asInstanceOf[Int].toLong
+ case LongTag => value.asInstanceOf[Long]
+ case FloatTag => value.asInstanceOf[Float].toLong
+ case DoubleTag => value.asInstanceOf[Double].toLong
+ case _ => throw new Error("value " + value + " is not a Long")
+ }
+
+ def floatValue: Float = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toFloat
+ case ShortTag => value.asInstanceOf[Short].toFloat
+ case CharTag => value.asInstanceOf[Char].toFloat
+ case IntTag => value.asInstanceOf[Int].toFloat
+ case LongTag => value.asInstanceOf[Long].toFloat
+ case FloatTag => value.asInstanceOf[Float]
+ case DoubleTag => value.asInstanceOf[Double].toFloat
+ case _ => throw new Error("value " + value + " is not a Float")
+ }
+
+ def doubleValue: Double = tag match {
+ case ByteTag => value.asInstanceOf[Byte].toDouble
+ case ShortTag => value.asInstanceOf[Short].toDouble
+ case CharTag => value.asInstanceOf[Char].toDouble
+ case IntTag => value.asInstanceOf[Int].toDouble
+ case LongTag => value.asInstanceOf[Long].toDouble
+ case FloatTag => value.asInstanceOf[Float].toDouble
+ case DoubleTag => value.asInstanceOf[Double]
+ case _ => throw new Error("value " + value + " is not a Double")
+ }
+
+ /** Convert constant value to conform to given type.
+ */
+ def convertTo(pt: Type)(implicit ctx: Context): Constant = {
+ def classBound(pt: Type): Type = pt.dealias.stripTypeVar match {
+ case tref: TypeRef if !tref.symbol.isClass => classBound(tref.info.bounds.lo)
+ case param: PolyParam =>
+ ctx.typerState.constraint.entry(param) match {
+ case TypeBounds(lo, hi) =>
+ if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound
+ else classBound(lo)
+ case NoType => classBound(param.binder.paramBounds(param.paramNum).lo)
+ case inst => classBound(inst)
+ }
+ case pt => pt
+ }
+ val target = classBound(pt).typeSymbol
+ if (target == tpe.typeSymbol)
+ this
+ else if ((target == defn.ByteClass) && isByteRange)
+ Constant(byteValue)
+ else if (target == defn.ShortClass && isShortRange)
+ Constant(shortValue)
+ else if (target == defn.CharClass && isCharRange)
+ Constant(charValue)
+ else if (target == defn.IntClass && isIntRange)
+ Constant(intValue)
+ else if (target == defn.LongClass && isLongRange)
+ Constant(longValue)
+ else if (target == defn.FloatClass && isFloatRange)
+ Constant(floatValue)
+ else if (target == defn.DoubleClass && isNumeric)
+ Constant(doubleValue)
+ else
+ null
+ }
+
+ def stringValue: String = value.toString
+
+ def toText(printer: Printer) = printer.toText(this)
+
+ def typeValue: Type = value.asInstanceOf[Type]
+ def symbolValue: Symbol = value.asInstanceOf[Symbol]
+
+ /**
+ * Consider two `NaN`s to be identical, despite non-equality
+ * Consider -0d to be distinct from 0d, despite equality
+ *
+ * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`)
+ * to avoid treating different encodings of `NaN` as the same constant.
+ * You probably can't express different `NaN` varieties as compile time
+ * constants in regular Scala code, but it is conceivable that you could
+ * conjure them with a macro.
+ */
+ private def equalHashValue: Any = value match {
+ case f: Float => floatToRawIntBits(f)
+ case d: Double => doubleToRawLongBits(d)
+ case v => v
+ }
+
+ override def hashCode: Int = {
+ import scala.util.hashing.MurmurHash3._
+ val seed = 17
+ var h = seed
+ h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide.
+ h = mix(h, equalHashValue.##)
+ finalizeHash(h, length = 2)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala
new file mode 100644
index 000000000..c99b748b7
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala
@@ -0,0 +1,154 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._
+import util.SimpleMap
+import collection.mutable
+import printing.{Printer, Showable}
+import printing.Texts._
+import config.Config
+import config.Printers.constr
+
+/** Constraint over undetermined type parameters. Constraints are built
+ * over values of the following types:
+ *
+ * - PolyType A constraint constrains the type parameters of a set of PolyTypes
+ * - PolyParam The parameters of the constrained polytypes
+ * - TypeVar Every constrained parameter might be associated with a TypeVar
+ * that has the PolyParam as origin.
+ */
+abstract class Constraint extends Showable {
+
+ type This <: Constraint
+
+ /** Does the constraint's domain contain the type parameters of `pt`? */
+ def contains(pt: PolyType): Boolean
+
+ /** Does the constraint's domain contain the type parameter `param`? */
+ def contains(param: PolyParam): Boolean
+
+ /** Does this constraint contain the type variable `tvar` and is it uninstantiated? */
+ def contains(tvar: TypeVar): Boolean
+
+ /** The constraint entry for given type parameter `param`, or NoType if `param` is not part of
+ * the constraint domain. Note: Low level, implementation dependent.
+ */
+ def entry(param: PolyParam): Type
+
+ /** The type variable corresponding to parameter `param`, or
+ * NoType, if `param` is not in constrained or is not paired with a type variable.
+ */
+ def typeVarOfParam(param: PolyParam): Type
+
+ /** Is it known that `param1 <:< param2`? */
+ def isLess(param1: PolyParam, param2: PolyParam): Boolean
+
+ /** The parameters that are known to be smaller wrt <: than `param` */
+ def lower(param: PolyParam): List[PolyParam]
+
+ /** The parameters that are known to be greater wrt <: than `param` */
+ def upper(param: PolyParam): List[PolyParam]
+
+ /** lower(param) \ lower(butNot) */
+ def exclusiveLower(param: PolyParam, butNot: PolyParam): List[PolyParam]
+
+ /** upper(param) \ upper(butNot) */
+ def exclusiveUpper(param: PolyParam, butNot: PolyParam): List[PolyParam]
+
+ /** The constraint bounds for given type parameter `param`.
+ * Poly params that are known to be smaller or greater than `param`
+ * are not contained in the return bounds.
+ * @pre `param` is not part of the constraint domain.
+ */
+ def nonParamBounds(param: PolyParam): TypeBounds
+
+ /** The lower bound of `param` including all known-to-be-smaller parameters */
+ def fullLowerBound(param: PolyParam)(implicit ctx: Context): Type
+
+ /** The upper bound of `param` including all known-to-be-greater parameters */
+ def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type
+
+ /** The bounds of `param` including all known-to-be-smaller and -greater parameters */
+ def fullBounds(param: PolyParam)(implicit ctx: Context): TypeBounds
+
+ /** A new constraint which is derived from this constraint by adding
+ * entries for all type parameters of `poly`.
+ * @param tvars A list of type variables associated with the params,
+ * or Nil if the constraint will just be checked for
+ * satisfiability but will solved to give instances of
+ * type variables.
+ */
+ def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This
+
+ /** A new constraint which is derived from this constraint by updating
+ * the entry for parameter `param` to `tp`.
+ * `tp` can be one of the following:
+ *
+ * - A TypeBounds value, indicating new constraint bounds
+ * - Another type, indicating a solution for the parameter
+ *
+ * @pre `this contains param`.
+ */
+ def updateEntry(param: PolyParam, tp: Type)(implicit ctx: Context): This
+
+ /** A constraint that includes the relationship `p1 <: p2`.
+ * `<:` relationships between parameters ("edges") are propagated, but
+ * non-parameter bounds are left alone.
+ */
+ def addLess(p1: PolyParam, p2: PolyParam)(implicit ctx: Context): This
+
+ /** A constraint resulting from adding p2 = p1 to this constraint, and at the same
+ * time transferring all bounds of p2 to p1
+ */
+ def unify(p1: PolyParam, p2: PolyParam)(implicit ctx: Context): This
+
+ /** A new constraint which is derived from this constraint by removing
+ * the type parameter `param` from the domain and replacing all top-level occurrences
+ * of the parameter elsewhere in the constraint by type `tp`, or a conservative
+ * approximation of it if that is needed to avoid cycles.
+ * Occurrences nested inside a refinement or prefix are not affected.
+ */
+ def replace(param: PolyParam, tp: Type)(implicit ctx: Context): This
+
+ /** Narrow one of the bounds of type parameter `param`
+ * If `isUpper` is true, ensure that `param <: `bound`, otherwise ensure
+ * that `param >: bound`.
+ */
+ def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This
+
+ /** Is entry associated with `pt` removable? This is the case if
+ * all type parameters of the entry are associated with type variables
+ * which have their `inst` fields set.
+ */
+ def isRemovable(pt: PolyType): Boolean
+
+ /** A new constraint with all entries coming from `pt` removed. */
+ def remove(pt: PolyType)(implicit ctx: Context): This
+
+ /** The polytypes constrained by this constraint */
+ def domainPolys: List[PolyType]
+
+ /** The polytype parameters constrained by this constraint */
+ def domainParams: List[PolyParam]
+
+ /** Check whether predicate holds for all parameters in constraint */
+ def forallParams(p: PolyParam => Boolean): Boolean
+
+ /** Perform operation `op` on all typevars, or only on uninstantiated
+ * typevars, depending on whether `uninstOnly` is set or not.
+ */
+ def foreachTypeVar(op: TypeVar => Unit): Unit
+
+ /** The uninstantiated typevars of this constraint */
+ def uninstVars: collection.Seq[TypeVar]
+
+ /** The weakest constraint that subsumes both this constraint and `other` */
+ def & (other: Constraint)(implicit ctx: Context): Constraint
+
+ /** Check that no constrained parameter contains itself as a bound */
+ def checkNonCyclic()(implicit ctx: Context): Unit
+
+ /** Check that constraint only refers to PolyParams bound by itself */
+ def checkClosed()(implicit ctx: Context): Unit
+}
diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
new file mode 100644
index 000000000..0e155b9e1
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -0,0 +1,458 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._
+import Decorators._
+import config.Config
+import config.Printers.{constr, typr}
+import TypeApplications.EtaExpansion
+import collection.mutable
+
+/** Methods for adding constraints and solving them.
+ *
+ * What goes into a Constraint as opposed to a ConstrainHandler?
+ *
+ * Constraint code is purely functional: Operations get constraints and produce new ones.
+ * Constraint code does not have access to a type-comparer. Anything regarding lubs and glbs has to be done
+ * elsewhere.
+ *
+ * By comparison: Constraint handlers are parts of type comparers and can use their functionality.
+ * Constraint handlers update the current constraint as a side effect.
+ */
+trait ConstraintHandling {
+
+ implicit val ctx: Context
+
+ protected def isSubType(tp1: Type, tp2: Type): Boolean
+ protected def isSameType(tp1: Type, tp2: Type): Boolean
+
+ val state: TyperState
+ import state.constraint
+
+ private var addConstraintInvocations = 0
+
+ /** If the constraint is frozen we cannot add new bounds to the constraint. */
+ protected var frozenConstraint = false
+
+ protected var alwaysFluid = false
+
+ /** Perform `op` in a mode where all attempts to set `frozen` to true are ignored */
+ def fluidly[T](op: => T): T = {
+ val saved = alwaysFluid
+ alwaysFluid = true
+ try op finally alwaysFluid = saved
+ }
+
+ /** We are currently comparing polytypes. Used as a flag for
+ * optimization: when `false`, no need to do an expensive `pruneLambdaParams`
+ */
+ protected var comparedPolyTypes: Set[PolyType] = Set.empty
+
+ private def addOneBound(param: PolyParam, bound: Type, isUpper: Boolean): Boolean =
+ !constraint.contains(param) || {
+ def occursIn(bound: Type): Boolean = {
+ val b = bound.dealias
+ (b eq param) || {
+ b match {
+ case b: AndOrType => occursIn(b.tp1) || occursIn(b.tp2)
+ case b: TypeVar => occursIn(b.origin)
+ case _ => false
+ }
+ }
+ }
+ if (Config.checkConstraintsSeparated)
+ assert(!occursIn(bound), s"$param occurs in $bound")
+ val c1 = constraint.narrowBound(param, bound, isUpper)
+ (c1 eq constraint) || {
+ constraint = c1
+ val TypeBounds(lo, hi) = constraint.entry(param)
+ isSubType(lo, hi)
+ }
+ }
+
+ protected def addUpperBound(param: PolyParam, bound: Type): Boolean = {
+ def description = i"constraint $param <: $bound to\n$constraint"
+ if (bound.isRef(defn.NothingClass) && ctx.typerState.isGlobalCommittable) {
+ def msg = s"!!! instantiated to Nothing: $param, constraint = ${constraint.show}"
+ if (Config.failOnInstantiationToNothing) assert(false, msg)
+ else ctx.log(msg)
+ }
+ constr.println(i"adding $description")
+ val lower = constraint.lower(param)
+ val res =
+ addOneBound(param, bound, isUpper = true) &&
+ lower.forall(addOneBound(_, bound, isUpper = true))
+ constr.println(i"added $description = $res")
+ res
+ }
+
+ protected def addLowerBound(param: PolyParam, bound: Type): Boolean = {
+ def description = i"constraint $param >: $bound to\n$constraint"
+ constr.println(i"adding $description")
+ val upper = constraint.upper(param)
+ val res =
+ addOneBound(param, bound, isUpper = false) &&
+ upper.forall(addOneBound(_, bound, isUpper = false))
+ constr.println(i"added $description = $res")
+ res
+ }
+
+ protected def addLess(p1: PolyParam, p2: PolyParam): Boolean = {
+ def description = i"ordering $p1 <: $p2 to\n$constraint"
+ val res =
+ if (constraint.isLess(p2, p1)) unify(p2, p1)
+ else {
+ val down1 = p1 :: constraint.exclusiveLower(p1, p2)
+ val up2 = p2 :: constraint.exclusiveUpper(p2, p1)
+ val lo1 = constraint.nonParamBounds(p1).lo
+ val hi2 = constraint.nonParamBounds(p2).hi
+ constr.println(i"adding $description down1 = $down1, up2 = $up2")
+ constraint = constraint.addLess(p1, p2)
+ down1.forall(addOneBound(_, hi2, isUpper = true)) &&
+ up2.forall(addOneBound(_, lo1, isUpper = false))
+ }
+ constr.println(i"added $description = $res")
+ res
+ }
+
+ /** Make p2 = p1, transfer all bounds of p2 to p1
+ * @pre less(p1)(p2)
+ */
+ private def unify(p1: PolyParam, p2: PolyParam): Boolean = {
+ constr.println(s"unifying $p1 $p2")
+ assert(constraint.isLess(p1, p2))
+ val down = constraint.exclusiveLower(p2, p1)
+ val up = constraint.exclusiveUpper(p1, p2)
+ constraint = constraint.unify(p1, p2)
+ val bounds = constraint.nonParamBounds(p1)
+ val lo = bounds.lo
+ val hi = bounds.hi
+ isSubType(lo, hi) &&
+ down.forall(addOneBound(_, hi, isUpper = true)) &&
+ up.forall(addOneBound(_, lo, isUpper = false))
+ }
+
+ final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = {
+ val saved = frozenConstraint
+ frozenConstraint = !alwaysFluid
+ try isSubType(tp1, tp2)
+ finally frozenConstraint = saved
+ }
+
+ final def isSameTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = {
+ val saved = frozenConstraint
+ frozenConstraint = !alwaysFluid
+ try isSameType(tp1, tp2)
+ finally frozenConstraint = saved
+ }
+
+ /** Test whether the lower bounds of all parameters in this
+ * constraint are a solution to the constraint.
+ */
+ protected final def isSatisfiable: Boolean =
+ constraint.forallParams { param =>
+ val TypeBounds(lo, hi) = constraint.entry(param)
+ isSubType(lo, hi) || {
+ ctx.log(i"sub fail $lo <:< $hi")
+ false
+ }
+ }
+
+ /** Solve constraint set for given type parameter `param`.
+ * If `fromBelow` is true the parameter is approximated by its lower bound,
+ * otherwise it is approximated by its upper bound. However, any occurrences
+ * of the parameter in a refinement somewhere in the bound are removed. Also
+ * wildcard types in bounds are approximated by their upper or lower bounds.
+ * (Such occurrences can arise for F-bounded types).
+ * The constraint is left unchanged.
+ * @return the instantiating type
+ * @pre `param` is in the constraint's domain.
+ */
+ final def approximation(param: PolyParam, fromBelow: Boolean): Type = {
+ val avoidParam = new TypeMap {
+ override def stopAtStatic = true
+ def apply(tp: Type) = mapOver {
+ tp match {
+ case tp: RefinedType if param occursIn tp.refinedInfo => tp.parent
+ case tp: WildcardType =>
+ val bounds = tp.optBounds.orElse(TypeBounds.empty).bounds
+ // Try to instantiate the wildcard to a type that is known to conform to it.
+ // This means:
+ // If fromBelow is true, we minimize the type overall
+ // Hence, if variance < 0, pick the maximal safe type: bounds.lo
+ // (i.e. the whole bounds range is over the type)
+ // if variance > 0, pick the minimal safe type: bounds.hi
+ // (i.e. the whole bounds range is under the type)
+ // if variance == 0, pick bounds.lo anyway (this is arbitrary but in line with
+ // the principle that we pick the smaller type when in doubt).
+ // If fromBelow is false, we maximize the type overall and reverse the bounds
+ // if variance != 0. For variance == 0, we still minimize.
+ // In summary we pick the bound given by this table:
+ //
+ // variance | -1 0 1
+ // ------------------------
+ // from below | lo lo hi
+ // from above | hi lo lo
+ //
+ if (variance == 0 || fromBelow == (variance < 0)) bounds.lo else bounds.hi
+ case _ => tp
+ }
+ }
+ }
+ assert(constraint.contains(param))
+ val bound = if (fromBelow) constraint.fullLowerBound(param) else constraint.fullUpperBound(param)
+ val inst = avoidParam(bound)
+ typr.println(s"approx ${param.show}, from below = $fromBelow, bound = ${bound.show}, inst = ${inst.show}")
+ inst
+ }
+
+ /** The instance type of `param` in the current constraint (which contains `param`).
+ * If `fromBelow` is true, the instance type is the lub of the parameter's
+ * lower bounds; otherwise it is the glb of its upper bounds. However,
+ * a lower bound instantiation can be a singleton type only if the upper bound
+ * is also a singleton type.
+ */
+ def instanceType(param: PolyParam, fromBelow: Boolean): Type = {
+ def upperBound = constraint.fullUpperBound(param)
+ def isSingleton(tp: Type): Boolean = tp match {
+ case tp: SingletonType => true
+ case AndType(tp1, tp2) => isSingleton(tp1) | isSingleton(tp2)
+ case OrType(tp1, tp2) => isSingleton(tp1) & isSingleton(tp2)
+ case _ => false
+ }
+ def isFullyDefined(tp: Type): Boolean = tp match {
+ case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt)
+ case tp: TypeProxy => isFullyDefined(tp.underlying)
+ case tp: AndOrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2)
+ case _ => true
+ }
+ def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match {
+ case tp: OrType => true
+ case tp: RefinedOrRecType => isOrType(tp.parent)
+ case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2)
+ case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi)
+ case _ => false
+ }
+
+ // First, solve the constraint.
+ var inst = approximation(param, fromBelow)
+
+ // Then, approximate by (1.) - (3.) and simplify as follows.
+ // 1. If instance is from below and is a singleton type, yet
+ // upper bound is not a singleton type, widen the instance.
+ if (fromBelow && isSingleton(inst) && !isSingleton(upperBound))
+ inst = inst.widen
+
+ inst = inst.simplified
+
+ // 2. If instance is from below and is a fully-defined union type, yet upper bound
+ // is not a union type, approximate the union type from above by an intersection
+ // of all common base types.
+ if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound))
+ inst = ctx.harmonizeUnion(inst)
+
+ // 3. If instance is from below, and upper bound has open named parameters
+ // make sure the instance has all named parameters of the bound.
+ if (fromBelow) inst = inst.widenToNamedTypeParams(param.namedTypeParams)
+ inst
+ }
+
+ /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have
+ * for all poly params `p` defined in `c2` as `p >: L2 <: U2`:
+ *
+ * c1 defines p with bounds p >: L1 <: U1, and
+ * L2 <: L1, and
+ * U1 <: U2
+ *
+ * Both `c1` and `c2` are required to derive from constraint `pre`, possibly
+ * narrowing it with further bounds.
+ */
+ protected final def subsumes(c1: Constraint, c2: Constraint, pre: Constraint): Boolean =
+ if (c2 eq pre) true
+ else if (c1 eq pre) false
+ else {
+ val saved = constraint
+ try
+ c2.forallParams(p =>
+ c1.contains(p) &&
+ c2.upper(p).forall(c1.isLess(p, _)) &&
+ isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)))
+ finally constraint = saved
+ }
+
+ /** The current bounds of type parameter `param` */
+ final def bounds(param: PolyParam): TypeBounds = {
+ val e = constraint.entry(param)
+ if (e.exists) e.bounds else param.binder.paramBounds(param.paramNum)
+ }
+
+ /** Add polytype `pt`, possibly with type variables `tvars`, to current constraint
+ * and propagate all bounds.
+ * @param tvars See Constraint#add
+ */
+ def addToConstraint(pt: PolyType, tvars: List[TypeVar]): Unit =
+ assert {
+ checkPropagated(i"initialized $pt") {
+ constraint = constraint.add(pt, tvars)
+ pt.paramNames.indices.forall { i =>
+ val param = PolyParam(pt, i)
+ val bounds = constraint.nonParamBounds(param)
+ val lower = constraint.lower(param)
+ val upper = constraint.upper(param)
+ if (lower.nonEmpty && !bounds.lo.isRef(defn.NothingClass) ||
+ upper.nonEmpty && !bounds.hi.isRef(defn.AnyClass)) constr.println(i"INIT*** $pt")
+ lower.forall(addOneBound(_, bounds.hi, isUpper = true)) &&
+ upper.forall(addOneBound(_, bounds.lo, isUpper = false))
+ }
+ }
+ }
+
+ /** Can `param` be constrained with new bounds? */
+ final def canConstrain(param: PolyParam): Boolean =
+ !frozenConstraint && (constraint contains param)
+
+ /** Add constraint `param <: bound` if `fromBelow` is false, `param >: bound` otherwise.
+ * `bound` is assumed to be in normalized form, as specified in `firstTry` and
+ * `secondTry` of `TypeComparer`. In particular, it should not be an alias type,
+ * lazy ref, typevar, wildcard type, error type. In addition, upper bounds may
+ * not be AndTypes and lower bounds may not be OrTypes. This is assured by the
+ * way isSubType is organized.
+ */
+ protected def addConstraint(param: PolyParam, bound: Type, fromBelow: Boolean): Boolean = {
+ def description = i"constr $param ${if (fromBelow) ">:" else "<:"} $bound:\n$constraint"
+ //checkPropagated(s"adding $description")(true) // DEBUG in case following fails
+ checkPropagated(s"added $description") {
+ addConstraintInvocations += 1
+
+ /** When comparing lambdas we might get constraints such as
+ * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter
+ * and `X0` is a lambda parameter. The constraint for `A` is not allowed
+ * to refer to such a lambda parameter because the lambda parameter is
+ * not visible where `A` is defined. Consequently, we need to
+ * approximate the bound so that the lambda parameter does not appear in it.
+ * If `tp` is an upper bound, we need to approximate with something smaller,
+ * otherwise something larger.
+ * Test case in pos/i94-nada.scala. This test crashes with an illegal instance
+ * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is
+ * missing.
+ */
+ def pruneLambdaParams(tp: Type) =
+ if (comparedPolyTypes.nonEmpty) {
+ val approx = new ApproximatingTypeMap {
+ def apply(t: Type): Type = t match {
+ case t @ PolyParam(pt: PolyType, n) if comparedPolyTypes contains pt =>
+ val effectiveVariance = if (fromBelow) -variance else variance
+ val bounds = pt.paramBounds(n)
+ if (effectiveVariance > 0) bounds.lo
+ else if (effectiveVariance < 0) bounds.hi
+ else NoType
+ case _ =>
+ mapOver(t)
+ }
+ }
+ approx(tp)
+ }
+ else tp
+
+ def addParamBound(bound: PolyParam) =
+ if (fromBelow) addLess(bound, param) else addLess(param, bound)
+
+ /** Drop all constrained parameters that occur at the toplevel in `bound` and
+ * handle them by `addLess` calls.
+ * The preconditions make sure that such parameters occur only
+ * in one of two ways:
+ *
+ * 1.
+ *
+ * P <: Ts1 | ... | Tsm (m > 0)
+ * Tsi = T1 & ... Tn (n >= 0)
+ * Some of the Ti are constrained parameters
+ *
+ * 2.
+ *
+ * Ts1 & ... & Tsm <: P (m > 0)
+ * Tsi = T1 | ... | Tn (n >= 0)
+ * Some of the Ti are constrained parameters
+ *
+ * In each case we cannot leave the parameter in place,
+ * because that would risk making a parameter later a subtype or supertype
+ * of a bound where the parameter occurs again at toplevel, which leads to cycles
+ * in the subtyping test. So we intentionally narrow the constraint by
+ * recording an isLess relationship instead (even though this is not implied
+ * by the bound).
+ *
+ * Narrowing a constraint is better than widening it, because narrowing leads
+ * to incompleteness (which we face anyway, see for instance eitherIsSubType)
+ * but widening leads to unsoundness.
+ *
+ * A test case that demonstrates the problem is i864.scala.
+ * Turn Config.checkConstraintsSeparated on to get an accurate diagnostic
+ * of the cycle when it is created.
+ *
+ * @return The pruned type if all `addLess` calls succeed, `NoType` otherwise.
+ */
+ def prune(bound: Type): Type = bound match {
+ case bound: AndOrType =>
+ val p1 = prune(bound.tp1)
+ val p2 = prune(bound.tp2)
+ if (p1.exists && p2.exists) bound.derivedAndOrType(p1, p2)
+ else NoType
+ case bound: TypeVar if constraint contains bound.origin =>
+ prune(bound.underlying)
+ case bound: PolyParam =>
+ constraint.entry(bound) match {
+ case NoType => pruneLambdaParams(bound)
+ case _: TypeBounds =>
+ if (!addParamBound(bound)) NoType
+ else if (fromBelow) defn.NothingType
+ else defn.AnyType
+ case inst =>
+ prune(inst)
+ }
+ case _ =>
+ pruneLambdaParams(bound)
+ }
+
+ try bound match {
+ case bound: PolyParam if constraint contains bound =>
+ addParamBound(bound)
+ case _ =>
+ val pbound = prune(bound)
+ pbound.exists && (
+ if (fromBelow) addLowerBound(param, pbound) else addUpperBound(param, pbound))
+ }
+ finally addConstraintInvocations -= 1
+ }
+ }
+
+ /** Instantiate `param` to `tp` if the constraint stays satisfiable */
+ protected def tryInstantiate(param: PolyParam, tp: Type): Boolean = {
+ val saved = constraint
+ constraint =
+ if (addConstraint(param, tp, fromBelow = true) &&
+ addConstraint(param, tp, fromBelow = false)) constraint.replace(param, tp)
+ else saved
+ constraint ne saved
+ }
+
+ /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */
+ def checkPropagated(msg: => String)(result: Boolean): Boolean = {
+ if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) {
+ val saved = frozenConstraint
+ frozenConstraint = true
+ for (p <- constraint.domainParams) {
+ def check(cond: => Boolean, q: PolyParam, ordering: String, explanation: String): Unit =
+ assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg")
+ for (u <- constraint.upper(p))
+ check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated")
+ for (l <- constraint.lower(p)) {
+ check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated")
+ check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing")
+ }
+ }
+ frozenConstraint = saved
+ }
+ result
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
new file mode 100644
index 000000000..e0f659cc6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
@@ -0,0 +1,17 @@
+package dotty.tools.dotc
+package core
+
+import Contexts._
+import config.Printers.typr
+
+trait ConstraintRunInfo { self: RunInfo =>
+ private var maxSize = 0
+ private var maxConstraint: Constraint = _
+ def recordConstraintSize(c: Constraint, size: Int) =
+ if (size > maxSize) {
+ maxSize = size
+ maxConstraint = c
+ }
+ def printMaxConstraint()(implicit ctx: Context) =
+ if (maxSize > 0) typr.println(s"max constraint = ${maxConstraint.show}")
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala
new file mode 100644
index 000000000..639c4d111
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala
@@ -0,0 +1,709 @@
+package dotty.tools
+package dotc
+package core
+
+import interfaces.CompilerCallback
+import Decorators._
+import Periods._
+import Names._
+import Phases._
+import Types._
+import Symbols._
+import Scopes._
+import NameOps._
+import Uniques._
+import SymDenotations._
+import Comments._
+import Flags.ParamAccessor
+import util.Positions._
+import ast.Trees._
+import ast.untpd
+import util.{FreshNameCreator, SimpleMap, SourceFile, NoSource}
+import typer.{Implicits, ImplicitRunInfo, ImportInfo, Inliner, NamerContextOps, SearchHistory, TypeAssigner, Typer}
+import Implicits.ContextualImplicits
+import config.Settings._
+import config.Config
+import reporting._
+import collection.mutable
+import collection.immutable.BitSet
+import printing._
+import config.{Settings, ScalaSettings, Platform, JavaPlatform}
+import language.implicitConversions
+import DenotTransformers.DenotTransformer
+import util.Property.Key
+import xsbti.AnalysisCallback
+
+object Contexts {
+
+ /** A context is passed basically everywhere in dotc.
+ * This is convenient but carries the risk of captured contexts in
+ * objects that turn into space leaks. To combat this risk, here are some
+ * conventions to follow:
+ *
+ * - Never let an implicit context be an argument of a class whose instances
+ * live longer than the context.
+ * - Classes that need contexts for their initialization take an explicit parameter
+ * named `initctx`. They pass initctx to all positions where it is needed
+ * (and these positions should all be part of the intialization sequence of the class).
+ * - Classes that need contexts that survive initialization are instead passed
+ * a "condensed context", typically named `cctx` (or they create one). Condensed contexts
+ * just add some basic information to the context base without the
+ * risk of capturing complete trees.
+ * - To make sure these rules are kept, it would be good to do a sanity
+ * check using bytecode inspection with javap or scalap: Keep track
+ * of all class fields of type context; allow them only in whitelisted
+ * classes (which should be short-lived).
+ */
+ abstract class Context extends Periods
+ with Substituters
+ with TypeOps
+ with Phases
+ with Printers
+ with Symbols
+ with SymDenotations
+ with Reporting
+ with NamerContextOps
+ with Cloneable { thiscontext =>
+ implicit def ctx: Context = this
+
+ /** The context base at the root */
+ val base: ContextBase
+
+ /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */
+ def outersIterator = new Iterator[Context] {
+ var current = thiscontext
+ def hasNext = current != NoContext
+ def next = { val c = current; current = current.outer; c }
+ }
+
+ /** The outer context */
+ private[this] var _outer: Context = _
+ protected def outer_=(outer: Context) = _outer = outer
+ def outer: Context = _outer
+
+ /** The compiler callback implementation, or null if no callback will be called. */
+ private[this] var _compilerCallback: CompilerCallback = _
+ protected def compilerCallback_=(callback: CompilerCallback) =
+ _compilerCallback = callback
+ def compilerCallback: CompilerCallback = _compilerCallback
+
+ /** The sbt callback implementation if we are run from sbt, null otherwise */
+ private[this] var _sbtCallback: AnalysisCallback = _
+ protected def sbtCallback_=(callback: AnalysisCallback) =
+ _sbtCallback = callback
+ def sbtCallback: AnalysisCallback = _sbtCallback
+
+ /** The current context */
+ private[this] var _period: Period = _
+ protected def period_=(period: Period) = {
+ assert(period.firstPhaseId == period.lastPhaseId, period)
+ _period = period
+ }
+ def period: Period = _period
+
+ /** The scope nesting level */
+ private[this] var _mode: Mode = _
+ protected def mode_=(mode: Mode) = _mode = mode
+ def mode: Mode = _mode
+
+ /** The current type comparer */
+ private[this] var _typerState: TyperState = _
+ protected def typerState_=(typerState: TyperState) = _typerState = typerState
+ def typerState: TyperState = _typerState
+
+ /** The current plain printer */
+ private[this] var _printerFn: Context => Printer = _
+ protected def printerFn_=(printerFn: Context => Printer) = _printerFn = printerFn
+ def printerFn: Context => Printer = _printerFn
+
+ /** The current owner symbol */
+ private[this] var _owner: Symbol = _
+ protected def owner_=(owner: Symbol) = _owner = owner
+ def owner: Symbol = _owner
+
+ /** The current settings values */
+ private[this] var _sstate: SettingsState = _
+ protected def sstate_=(sstate: SettingsState) = _sstate = sstate
+ def sstate: SettingsState = _sstate
+
+ /** The current tree */
+ private[this] var _compilationUnit: CompilationUnit = _
+ protected def compilationUnit_=(compilationUnit: CompilationUnit) = _compilationUnit = compilationUnit
+ def compilationUnit: CompilationUnit = _compilationUnit
+
+ /** The current tree */
+ private[this] var _tree: Tree[_ >: Untyped]= _
+ protected def tree_=(tree: Tree[_ >: Untyped]) = _tree = tree
+ def tree: Tree[_ >: Untyped] = _tree
+
+ /** The current scope */
+ private[this] var _scope: Scope = _
+ protected def scope_=(scope: Scope) = _scope = scope
+ def scope: Scope = _scope
+
+ /** The current type assigner or typer */
+ private[this] var _typeAssigner: TypeAssigner = _
+ protected def typeAssigner_=(typeAssigner: TypeAssigner) = _typeAssigner = typeAssigner
+ def typeAssigner: TypeAssigner = _typeAssigner
+ def typer: Typer = _typeAssigner.asInstanceOf[Typer]
+
+ /** The currently active import info */
+ private[this] var _importInfo: ImportInfo = _
+ protected def importInfo_=(importInfo: ImportInfo) = _importInfo = importInfo
+ def importInfo: ImportInfo = _importInfo
+
+ /** The current compiler-run specific Info */
+ private[this] var _runInfo: RunInfo = _
+ protected def runInfo_=(runInfo: RunInfo) = _runInfo = runInfo
+ def runInfo: RunInfo = _runInfo
+
+ /** An optional diagostics buffer than is used by some checking code
+ * to provide more information in the buffer if it exists.
+ */
+ private var _diagnostics: Option[StringBuilder] = _
+ protected def diagnostics_=(diagnostics: Option[StringBuilder]) = _diagnostics = diagnostics
+ def diagnostics: Option[StringBuilder] = _diagnostics
+
+ /** The current bounds in force for type parameters appearing in a GADT */
+ private var _gadt: GADTMap = _
+ protected def gadt_=(gadt: GADTMap) = _gadt = gadt
+ def gadt: GADTMap = _gadt
+
+ /**The current fresh name creator */
+ private[this] var _freshNames: FreshNameCreator = _
+ protected def freshNames_=(freshNames: FreshNameCreator) = _freshNames = freshNames
+ def freshNames: FreshNameCreator = _freshNames
+
+ def freshName(prefix: String = ""): String = freshNames.newName(prefix)
+ def freshName(prefix: Name): String = freshName(prefix.toString)
+
+ /** A map in which more contextual properties can be stored */
+ private var _moreProperties: Map[Key[Any], Any] = _
+ protected def moreProperties_=(moreProperties: Map[Key[Any], Any]) = _moreProperties = moreProperties
+ def moreProperties: Map[Key[Any], Any] = _moreProperties
+
+ def property[T](key: Key[T]): Option[T] =
+ moreProperties.get(key).asInstanceOf[Option[T]]
+
+ private var _typeComparer: TypeComparer = _
+ protected def typeComparer_=(typeComparer: TypeComparer) = _typeComparer = typeComparer
+ def typeComparer: TypeComparer = {
+ if (_typeComparer.ctx ne this)
+ _typeComparer = _typeComparer.copyIn(this)
+ _typeComparer
+ }
+
+ /** Number of findMember calls on stack */
+ private[core] var findMemberCount: Int = 0
+
+ /** List of names which have a findMemberCall on stack,
+ * after Config.LogPendingFindMemberThreshold is reached.
+ */
+ private[core] var pendingMemberSearches: List[Name] = Nil
+
+ /** The new implicit references that are introduced by this scope */
+ private var implicitsCache: ContextualImplicits = null
+ def implicits: ContextualImplicits = {
+ if (implicitsCache == null )
+ implicitsCache = {
+ val implicitRefs: List[TermRef] =
+ if (isClassDefContext)
+ try owner.thisType.implicitMembers
+ catch {
+ case ex: CyclicReference => Nil
+ }
+ else if (isImportContext) importInfo.importedImplicits
+ else if (isNonEmptyScopeContext) scope.implicitDecls
+ else Nil
+ val outerImplicits =
+ if (isImportContext && importInfo.hiddenRoot.exists)
+ outer.implicits exclude importInfo.hiddenRoot
+ else
+ outer.implicits
+ if (implicitRefs.isEmpty) outerImplicits
+ else new ContextualImplicits(implicitRefs, outerImplicits)(this)
+ }
+ implicitsCache
+ }
+
+ /** The history of implicit searches that are currently active */
+ private var _searchHistory: SearchHistory = null
+ protected def searchHistory_= (searchHistory: SearchHistory) = _searchHistory = searchHistory
+ def searchHistory: SearchHistory = _searchHistory
+
+ /** Those fields are used to cache phases created in withPhase.
+ * phasedCtx is first phase with altered phase ever requested.
+ * phasedCtxs is array that uses phaseId's as indexes,
+ * contexts are created only on request and cached in this array
+ */
+ private var phasedCtx: Context = _
+ private var phasedCtxs: Array[Context] = _
+
+ /** This context at given phase.
+ * This method will always return a phase period equal to phaseId, thus will never return squashed phases
+ */
+ final def withPhase(phaseId: PhaseId): Context =
+ if (this.phaseId == phaseId) this
+ else if (phasedCtx.phaseId == phaseId) phasedCtx
+ else if (phasedCtxs != null && phasedCtxs(phaseId) != null) phasedCtxs(phaseId)
+ else {
+ val ctx1 = fresh.setPhase(phaseId)
+ if (phasedCtx eq this) phasedCtx = ctx1
+ else {
+ if (phasedCtxs == null) phasedCtxs = new Array[Context](base.phases.length)
+ phasedCtxs(phaseId) = ctx1
+ }
+ ctx1
+ }
+
+ final def withPhase(phase: Phase): Context =
+ withPhase(phase.id)
+
+ final def withPhaseNoLater(phase: Phase) =
+ if (phase.exists && ctx.phase.id > phase.id) withPhase(phase) else ctx
+
+ /** If -Ydebug is on, the top of the stack trace where this context
+ * was created, otherwise `null`.
+ */
+ private var creationTrace: Array[StackTraceElement] = _
+
+ private def setCreationTrace() =
+ if (this.settings.YtraceContextCreation.value)
+ creationTrace = (new Throwable).getStackTrace().take(20)
+
+ /** Print all enclosing context's creation stacktraces */
+ def printCreationTraces() = {
+ println("=== context creation trace =======")
+ for (ctx <- outersIterator) {
+ println(s">>>>>>>>> $ctx")
+ if (ctx.creationTrace != null) println(ctx.creationTrace.mkString("\n"))
+ }
+ println("=== end context creation trace ===")
+ }
+
+ /** The current reporter */
+ def reporter: Reporter = typerState.reporter
+
+ /** Is this a context for the members of a class definition? */
+ def isClassDefContext: Boolean =
+ owner.isClass && (owner ne outer.owner)
+
+ /** Is this a context that introduces an import clause? */
+ def isImportContext: Boolean =
+ (this ne NoContext) && (this.importInfo ne outer.importInfo)
+
+ /** Is this a context that introduces a non-empty scope? */
+ def isNonEmptyScopeContext: Boolean =
+ (this.scope ne outer.scope) && this.scope.nonEmpty
+
+ /** Leave message in diagnostics buffer if it exists */
+ def diagnose(str: => String) =
+ for (sb <- diagnostics) {
+ sb.setLength(0)
+ sb.append(str)
+ }
+
+ /** The next outer context whose tree is a template or package definition */
+ def enclTemplate: Context = {
+ var c = this
+ while (c != NoContext && !c.tree.isInstanceOf[Template[_]] && !c.tree.isInstanceOf[PackageDef[_]])
+ c = c.outer
+ c
+ }
+
+ /** The context for a supercall. This context is used for elaborating
+ * the parents of a class and their arguments.
+ * The context is computed from the current class context. It has
+ *
+ * - as owner: The primary constructor of the class
+ * - as outer context: The context enclosing the class context
+ * - as scope: The parameter accessors in the class context
+ * - with additional mode: InSuperCall
+ *
+ * The reasons for this peculiar choice of attributes are as follows:
+ *
+ * - The constructor must be the owner, because that's where any local methods or closures
+ * should go.
+ * - The context may not see any class members (inherited or defined), and should
+ * instead see definitions defined in the outer context which might be shadowed by
+ * such class members. That's why the outer context must be the outer context of the class.
+ * - At the same time the context should see the parameter accessors of the current class,
+ * that's why they get added to the local scope. An alternative would have been to have the
+ * context see the constructor parameters instead, but then we'd need a final substitution step
+ * from constructor parameters to class parameter accessors.
+ */
+ def superCallContext: Context = {
+ val locals = newScopeWith(owner.asClass.paramAccessors: _*)
+ superOrThisCallContext(owner.primaryConstructor, locals)
+ }
+
+ /** The context for the arguments of a this(...) constructor call.
+ * The context is computed from the local auxiliary constructor context.
+ * It has
+ *
+ * - as owner: The auxiliary constructor
+ * - as outer context: The context enclosing the enclosing class context
+ * - as scope: The parameters of the auxiliary constructor.
+ */
+ def thisCallArgContext: Context = {
+ assert(owner.isClassConstructor)
+ val constrCtx = outersIterator.dropWhile(_.outer.owner == owner).next
+ superOrThisCallContext(owner, constrCtx.scope)
+ .setTyperState(typerState)
+ .setGadt(gadt)
+ }
+
+ /** The super- or this-call context with given owner and locals. */
+ private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = {
+ var classCtx = outersIterator.dropWhile(!_.isClassDefContext).next
+ classCtx.outer.fresh.setOwner(owner)
+ .setScope(locals)
+ .setMode(classCtx.mode | Mode.InSuperCall)
+ }
+
+ /** The context of expression `expr` seen as a member of a statement sequence */
+ def exprContext(stat: Tree[_ >: Untyped], exprOwner: Symbol) =
+ if (exprOwner == this.owner) this
+ else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext
+ else ctx.fresh.setOwner(exprOwner)
+
+ /** The current source file; will be derived from current
+ * compilation unit.
+ */
+ def source: SourceFile =
+ if (compilationUnit == null) NoSource else compilationUnit.source
+
+ /** Does current phase use an erased types interpretation? */
+ def erasedTypes: Boolean = phase.erasedTypes
+
+ /** Is the debug option set? */
+ def debug: Boolean = base.settings.debug.value
+
+ /** Is the verbose option set? */
+ def verbose: Boolean = base.settings.verbose.value
+
+ /** Should use colors when printing? */
+ def useColors: Boolean =
+ base.settings.color.value == "always"
+
+ /** A condensed context containing essential information of this but
+ * no outer contexts except the initial context.
+ private var _condensed: CondensedContext = null
+ def condensed: CondensedContext = {
+ if (_condensed eq outer.condensed)
+ _condensed = base.initialCtx.fresh
+ .withPeriod(period)
+ .withNewMode(mode)
+ // typerState and its constraint is not preserved in condensed
+ // reporter is always ThrowingReporter
+ .withPrinterFn(printerFn)
+ .withOwner(owner)
+ .withSettings(sstate)
+ // tree is not preserved in condensed
+ .withRunInfo(runInfo)
+ .withDiagnostics(diagnostics)
+ .withMoreProperties(moreProperties)
+ _condensed
+ }
+ */
+
+ protected def init(outer: Context): this.type = {
+ this.outer = outer
+ this.implicitsCache = null
+ this.phasedCtx = this
+ this.phasedCtxs = null
+ setCreationTrace()
+ this
+ }
+
+ /** A fresh clone of this context. */
+ def fresh: FreshContext = clone.asInstanceOf[FreshContext].init(this)
+
+ final def withOwner(owner: Symbol): Context =
+ if (owner ne this.owner) fresh.setOwner(owner) else this
+
+ override def toString =
+ "Context(\n" +
+ (outersIterator map ( ctx => s" owner = ${ctx.owner}, scope = ${ctx.scope}") mkString "\n")
+ }
+
+ /** A condensed context provides only a small memory footprint over
+ * a Context base, and therefore can be stored without problems in
+ * long-lived objects.
+ abstract class CondensedContext extends Context {
+ override def condensed = this
+ }
+ */
+
+ /** A fresh context allows selective modification
+ * of its attributes using the with... methods.
+ */
+ abstract class FreshContext extends Context {
+ def setPeriod(period: Period): this.type = { this.period = period; this }
+ def setMode(mode: Mode): this.type = { this.mode = mode; this }
+ def setCompilerCallback(callback: CompilerCallback): this.type = { this.compilerCallback = callback; this }
+ def setSbtCallback(callback: AnalysisCallback): this.type = { this.sbtCallback = callback; this }
+ def setTyperState(typerState: TyperState): this.type = { this.typerState = typerState; this }
+ def setReporter(reporter: Reporter): this.type = setTyperState(typerState.withReporter(reporter))
+ def setNewTyperState: this.type = setTyperState(typerState.fresh(isCommittable = true))
+ def setExploreTyperState: this.type = setTyperState(typerState.fresh(isCommittable = false))
+ def setPrinterFn(printer: Context => Printer): this.type = { this.printerFn = printer; this }
+ def setOwner(owner: Symbol): this.type = { assert(owner != NoSymbol); this.owner = owner; this }
+ def setSettings(sstate: SettingsState): this.type = { this.sstate = sstate; this }
+ def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { this.compilationUnit = compilationUnit; this }
+ def setTree(tree: Tree[_ >: Untyped]): this.type = { this.tree = tree; this }
+ def setScope(scope: Scope): this.type = { this.scope = scope; this }
+ def setNewScope: this.type = { this.scope = newScope; this }
+ def setTypeAssigner(typeAssigner: TypeAssigner): this.type = { this.typeAssigner = typeAssigner; this }
+ def setTyper(typer: Typer): this.type = { this.scope = typer.scope; setTypeAssigner(typer) }
+ def setImportInfo(importInfo: ImportInfo): this.type = { this.importInfo = importInfo; this }
+ def setRunInfo(runInfo: RunInfo): this.type = { this.runInfo = runInfo; this }
+ def setDiagnostics(diagnostics: Option[StringBuilder]): this.type = { this.diagnostics = diagnostics; this }
+ def setGadt(gadt: GADTMap): this.type = { this.gadt = gadt; this }
+ def setTypeComparerFn(tcfn: Context => TypeComparer): this.type = { this.typeComparer = tcfn(this); this }
+ def setSearchHistory(searchHistory: SearchHistory): this.type = { this.searchHistory = searchHistory; this }
+ def setFreshNames(freshNames: FreshNameCreator): this.type = { this.freshNames = freshNames; this }
+ def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = { this.moreProperties = moreProperties; this }
+
+ def setProperty[T](key: Key[T], value: T): this.type =
+ setMoreProperties(moreProperties.updated(key, value))
+
+ def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid))
+ def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end))
+
+ def setSetting[T](setting: Setting[T], value: T): this.type =
+ setSettings(setting.updateIn(sstate, value))
+
+ def setFreshGADTBounds: this.type = { this.gadt = new GADTMap(gadt.bounds); this }
+
+ def setDebug = setSetting(base.settings.debug, true)
+ }
+
+ implicit class ModeChanges(val c: Context) extends AnyVal {
+ final def withModeBits(mode: Mode): Context =
+ if (mode != c.mode) c.fresh.setMode(mode) else c
+
+ final def addMode(mode: Mode): Context = withModeBits(c.mode | mode)
+ final def maskMode(mode: Mode): Context = withModeBits(c.mode & mode)
+ final def retractMode(mode: Mode): Context = withModeBits(c.mode &~ mode)
+ }
+
+ implicit class FreshModeChanges(val c: FreshContext) extends AnyVal {
+ final def addMode(mode: Mode): c.type = c.setMode(c.mode | mode)
+ final def maskMode(mode: Mode): c.type = c.setMode(c.mode & mode)
+ final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode)
+ }
+
+ /** A class defining the initial context with given context base
+ * and set of possible settings.
+ */
+ private class InitialContext(val base: ContextBase, settings: SettingGroup) extends FreshContext {
+ outer = NoContext
+ period = InitialPeriod
+ mode = Mode.None
+ typerState = new TyperState(new ConsoleReporter())
+ printerFn = new RefinedPrinter(_)
+ owner = NoSymbol
+ sstate = settings.defaultState
+ tree = untpd.EmptyTree
+ typeAssigner = TypeAssigner
+ runInfo = new RunInfo(this)
+ diagnostics = None
+ freshNames = new FreshNameCreator.Default
+ moreProperties = Map.empty
+ typeComparer = new TypeComparer(this)
+ searchHistory = new SearchHistory(0, Map())
+ gadt = new GADTMap(SimpleMap.Empty)
+ }
+
+ @sharable object NoContext extends Context {
+ val base = null
+ override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null)(this)
+ }
+
+ /** A context base defines state and associated methods that exist once per
+ * compiler run.
+ */
+ class ContextBase extends ContextState
+ with Denotations.DenotationsBase
+ with Phases.PhasesBase {
+
+ /** The applicable settings */
+ val settings = new ScalaSettings
+
+ /** The initial context */
+ val initialCtx: Context = new InitialContext(this, settings)
+
+ /** The symbol loaders */
+ val loaders = new SymbolLoaders
+
+ /** The platform, initialized by `initPlatform()`. */
+ private var _platform: Platform = _
+
+ /** The platform */
+ def platform: Platform = {
+ if (_platform == null) {
+ throw new IllegalStateException(
+ "initialize() must be called before accessing platform")
+ }
+ _platform
+ }
+
+ protected def newPlatform(implicit ctx: Context): Platform =
+ new JavaPlatform
+
+ /** The loader that loads the members of _root_ */
+ def rootLoader(root: TermSymbol)(implicit ctx: Context): SymbolLoader = platform.rootLoader(root)
+
+ // Set up some phases to get started */
+ usePhases(List(SomePhase))
+
+ /** The standard definitions */
+ val definitions = new Definitions
+
+ /** Initializes the `ContextBase` with a starting context.
+ * This initializes the `platform` and the `definitions`.
+ */
+ def initialize()(implicit ctx: Context): Unit = {
+ _platform = newPlatform
+ definitions.init()
+ }
+
+ def squashed(p: Phase): Phase = {
+ allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase)
+ }
+ }
+
+ /** The essential mutable state of a context base, collected into a common class */
+ class ContextState {
+ // Symbols state
+
+ /** A counter for unique ids */
+ private[core] var _nextId = 0
+
+ def nextId = { _nextId += 1; _nextId }
+
+ /** A map from a superclass id to the typeref of the class that has it */
+ private[core] var classOfId = new Array[ClassSymbol](Config.InitialSuperIdsSize)
+
+ /** A map from a the typeref of a class to its superclass id */
+ private[core] val superIdOfClass = new mutable.AnyRefMap[ClassSymbol, Int]
+
+ /** The last allocated superclass id */
+ private[core] var lastSuperId = -1
+
+ /** Allocate and return next free superclass id */
+ private[core] def nextSuperId: Int = {
+ lastSuperId += 1
+ if (lastSuperId >= classOfId.length) {
+ val tmp = new Array[ClassSymbol](classOfId.length * 2)
+ classOfId.copyToArray(tmp)
+ classOfId = tmp
+ }
+ lastSuperId
+ }
+
+ // Types state
+ /** A table for hash consing unique types */
+ private[core] val uniques = new util.HashSet[Type](Config.initialUniquesCapacity) {
+ override def hash(x: Type): Int = x.hash
+ }
+
+ /** A table for hash consing unique refined types */
+ private[dotc] val uniqueRefinedTypes = new RefinedUniques
+
+ /** A table for hash consing unique named types */
+ private[core] val uniqueNamedTypes = new NamedTypeUniques
+
+ /** A table for hash consing unique type bounds */
+ private[core] val uniqueTypeAliases = new TypeAliasUniques
+
+ private def uniqueSets = Map(
+ "uniques" -> uniques,
+ "uniqueRefinedTypes" -> uniqueRefinedTypes,
+ "uniqueNamedTypes" -> uniqueNamedTypes,
+ "uniqueTypeAliases" -> uniqueTypeAliases)
+
+ /** A map that associates label and size of all uniques sets */
+ def uniquesSizes: Map[String, Int] = uniqueSets.mapValues(_.size)
+
+ /** The number of recursive invocation of underlying on a NamedType
+ * during a controlled operation.
+ */
+ private[core] var underlyingRecursions: Int = 0
+
+ /** The set of named types on which a currently active invocation
+ * of underlying during a controlled operation exists. */
+ private[core] val pendingUnderlying = new mutable.HashSet[Type]
+
+ /** A flag that some unsafe nonvariant instantiation was encountered
+ * in this run. Used as a shortcut to a avoid scans of types in
+ * Typer.typedSelect.
+ */
+ private[dotty] var unsafeNonvariant: RunId = NoRunId
+
+ // Phases state
+
+ private[core] var phasesPlan: List[List[Phase]] = _
+
+ /** Phases by id */
+ private[core] var phases: Array[Phase] = _
+
+ /** Phases with consecutive Transforms grouped into a single phase, Empty array if squashing is disabled */
+ private[core] var squashedPhases: Array[Phase] = Array.empty[Phase]
+
+ /** Next denotation transformer id */
+ private[core] var nextDenotTransformerId: Array[Int] = _
+
+ private[core] var denotTransformers: Array[DenotTransformer] = _
+
+ // Printers state
+ /** Number of recursive invocations of a show method on current stack */
+ private[dotc] var toTextRecursions = 0
+
+ // Reporters state
+ private[dotc] var indent = 0
+
+ protected[dotc] val indentTab = " "
+
+ def reset() = {
+ for ((_, set) <- uniqueSets) set.clear()
+ for (i <- 0 until classOfId.length) classOfId(i) = null
+ superIdOfClass.clear()
+ lastSuperId = -1
+ }
+
+ // Test that access is single threaded
+
+ /** The thread on which `checkSingleThreaded was invoked last */
+ @sharable private var thread: Thread = null
+
+ /** Check that we are on the same thread as before */
+ def checkSingleThreaded() =
+ if (thread == null) thread = Thread.currentThread()
+ else assert(thread == Thread.currentThread(), "illegal multithreaded access to ContextBase")
+ }
+
+ object Context {
+
+ /** Implicit conversion that injects all printer operations into a context */
+ implicit def toPrinter(ctx: Context): Printer = ctx.printer
+
+ /** implicit conversion that injects all ContextBase members into a context */
+ implicit def toBase(ctx: Context): ContextBase = ctx.base
+
+ // @sharable val theBase = new ContextBase // !!! DEBUG, so that we can use a minimal context for reporting even in code that normally cannot access a context
+ }
+
+ /** Info that changes on each compiler run */
+ class RunInfo(initctx: Context) extends ImplicitRunInfo with ConstraintRunInfo {
+ implicit val ctx: Context = initctx
+ }
+
+ class GADTMap(initBounds: SimpleMap[Symbol, TypeBounds]) {
+ private var myBounds = initBounds
+ def setBounds(sym: Symbol, b: TypeBounds): Unit =
+ myBounds = myBounds.updated(sym, b)
+ def bounds = myBounds
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala
new file mode 100644
index 000000000..a105741f5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala
@@ -0,0 +1,185 @@
+package dotty.tools.dotc
+package core
+
+import annotation.tailrec
+import Symbols._
+import Contexts._, Names._, Phases._, printing.Texts._, printing.Printer, printing.Showable
+import util.Positions.Position, util.SourcePosition
+import collection.mutable.ListBuffer
+import dotty.tools.dotc.transform.TreeTransforms._
+import ast.tpd._
+import scala.language.implicitConversions
+import printing.Formatting._
+
+/** This object provides useful implicit decorators for types defined elsewhere */
+object Decorators {
+
+ /** Turns Strings into PreNames, adding toType/TermName methods */
+ implicit class StringDecorator(val s: String) extends AnyVal with PreName {
+ def toTypeName: TypeName = typeName(s)
+ def toTermName: TermName = termName(s)
+ def toText(printer: Printer): Text = Str(s)
+ }
+
+ /** Implements a findSymbol method on iterators of Symbols that
+ * works like find but avoids Option, replacing None with NoSymbol.
+ */
+ implicit class SymbolIteratorDecorator(val it: Iterator[Symbol]) extends AnyVal {
+ final def findSymbol(p: Symbol => Boolean): Symbol = {
+ while (it.hasNext) {
+ val sym = it.next
+ if (p(sym)) return sym
+ }
+ NoSymbol
+ }
+ }
+
+ final val MaxFilterRecursions = 1000
+
+ /** Implements filterConserve, zipWithConserve methods
+ * on lists that avoid duplication of list nodes where feasible.
+ */
+ implicit class ListDecorator[T](val xs: List[T]) extends AnyVal {
+
+ final def mapconserve[U](f: T => U): List[U] = {
+ @tailrec
+ def loop(mapped: ListBuffer[U], unchanged: List[U], pending: List[T]): List[U] =
+ if (pending.isEmpty) {
+ if (mapped eq null) unchanged
+ else mapped.prependToList(unchanged)
+ } else {
+ val head0 = pending.head
+ val head1 = f(head0)
+
+ if (head1.asInstanceOf[AnyRef] eq head0.asInstanceOf[AnyRef])
+ loop(mapped, unchanged, pending.tail)
+ else {
+ val b = if (mapped eq null) new ListBuffer[U] else mapped
+ var xc = unchanged
+ while (xc ne pending) {
+ b += xc.head
+ xc = xc.tail
+ }
+ b += head1
+ val tail0 = pending.tail
+ loop(b, tail0.asInstanceOf[List[U]], tail0)
+ }
+ }
+ loop(null, xs.asInstanceOf[List[U]], xs)
+ }
+
+ /** Like `xs filter p` but returns list `xs` itself - instead of a copy -
+ * if `p` is true for all elements and `xs` is not longer
+ * than `MaxFilterRecursions`.
+ */
+ def filterConserve(p: T => Boolean): List[T] = {
+ def loop(xs: List[T], nrec: Int): List[T] = xs match {
+ case Nil => xs
+ case x :: xs1 =>
+ if (nrec < MaxFilterRecursions) {
+ val ys1 = loop(xs1, nrec + 1)
+ if (p(x))
+ if (ys1 eq xs1) xs else x :: ys1
+ else
+ ys1
+ } else xs filter p
+ }
+ loop(xs, 0)
+ }
+
+ /** Like `(xs, ys).zipped.map(f)`, but returns list `xs` itself
+ * - instead of a copy - if function `f` maps all elements of
+ * `xs` to themselves. Also, it is required that `ys` is at least
+ * as long as `xs`.
+ */
+ def zipWithConserve[U](ys: List[U])(f: (T, U) => T): List[T] =
+ if (xs.isEmpty) xs
+ else {
+ val x1 = f(xs.head, ys.head)
+ val xs1 = xs.tail.zipWithConserve(ys.tail)(f)
+ if ((x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) &&
+ (xs1 eq xs.tail)) xs
+ else x1 :: xs1
+ }
+
+ def foldRightBN[U](z: => U)(op: (T, => U) => U): U = xs match {
+ case Nil => z
+ case x :: xs1 => op(x, xs1.foldRightBN(z)(op))
+ }
+
+ final def hasSameLengthAs[U](ys: List[U]): Boolean = {
+ @tailrec def loop(xs: List[T], ys: List[U]): Boolean =
+ if (xs.isEmpty) ys.isEmpty
+ else ys.nonEmpty && loop(xs.tail, ys.tail)
+ loop(xs, ys)
+ }
+
+ /** Union on lists seen as sets */
+ def | (ys: List[T]): List[T] = xs ++ (ys filterNot (xs contains _))
+
+ /** Intersection on lists seen as sets */
+ def & (ys: List[T]): List[T] = xs filter (ys contains _)
+ }
+
+ implicit class ListOfListDecorator[T](val xss: List[List[T]]) extends AnyVal {
+ def nestedMap[U](f: T => U): List[List[U]] = xss map (_ map f)
+ def nestedMapconserve[U](f: T => U): List[List[U]] = xss mapconserve (_ mapconserve f)
+ }
+
+ implicit class TextToString(val text: Text) extends AnyVal {
+ def show(implicit ctx: Context) = text.mkString(ctx.settings.pageWidth.value)
+ }
+
+ /** Test whether a list of strings representing phases contains
+ * a given phase. See [[config.CompilerCommand#explainAdvanced]] for the
+ * exact meaning of "contains" here.
+ */
+ implicit class PhaseListDecorator(val names: List[String]) extends AnyVal {
+ def containsPhase(phase: Phase): Boolean = phase match {
+ case phase: TreeTransformer => phase.miniPhases.exists(containsPhase)
+ case _ =>
+ names exists { name =>
+ name == "all" || {
+ val strippedName = name.stripSuffix("+")
+ val logNextPhase = name ne strippedName
+ phase.phaseName.startsWith(strippedName) ||
+ (logNextPhase && phase.prev.phaseName.startsWith(strippedName))
+ }
+ }
+ }
+ }
+
+ implicit def sourcePos(pos: Position)(implicit ctx: Context): SourcePosition = {
+ def recur(inlinedCalls: List[Tree], pos: Position): SourcePosition = inlinedCalls match {
+ case inlinedCall :: rest =>
+ sourceFile(inlinedCall).atPos(pos).withOuter(recur(rest, inlinedCall.pos))
+ case empty =>
+ ctx.source.atPos(pos)
+ }
+ recur(enclosingInlineds, pos)
+ }
+
+ implicit class StringInterpolators(val sc: StringContext) extends AnyVal {
+
+ /** General purpose string formatting */
+ def i(args: Any*)(implicit ctx: Context): String =
+ new StringFormatter(sc).assemble(args)
+
+ /** Formatting for error messages: Like `i` but suppress follow-on
+ * error messages after the first one if some of their arguments are "non-sensical".
+ */
+ def em(args: Any*)(implicit ctx: Context): String =
+ new ErrorMessageFormatter(sc).assemble(args)
+
+ /** Formatting with added explanations: Like `em`, but add explanations to
+ * give more info about type variables and to disambiguate where needed.
+ */
+ def ex(args: Any*)(implicit ctx: Context): String =
+ explained2(implicit ctx => em(args: _*))
+
+ /** Formatter that adds syntax highlighting to all interpolated values */
+ def hl(args: Any*)(implicit ctx: Context): String =
+ new SyntaxFormatter(sc).assemble(args).stripMargin
+ }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala
new file mode 100644
index 000000000..4b090d9b1
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala
@@ -0,0 +1,807 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._, Denotations._, SymDenotations._, StdNames._, Names._
+import Flags._, Scopes._, Decorators._, NameOps._, util.Positions._, Periods._
+import unpickleScala2.Scala2Unpickler.ensureConstructor
+import scala.annotation.{ switch, meta }
+import scala.collection.{ mutable, immutable }
+import PartialFunction._
+import collection.mutable
+import scala.reflect.api.{ Universe => ApiUniverse }
+
+object Definitions {
+ val MaxTupleArity, MaxAbstractFunctionArity = 22
+ val MaxFunctionArity = 30
+ // Awaiting a definite solution that drops the limit altogether, 30 gives a safety
+ // margin over the previous 22, so that treecopiers in miniphases are allowed to
+ // temporarily create larger closures. This is needed in lambda lift where large closures
+ // are first formed by treecopiers before they are split apart into parameters and
+ // environment in the lambdalift transform itself.
+}
+
+/** A class defining symbols and types of standard definitions
+ *
+ * Note: There's a much nicer design possible once we have implicit functions.
+ * The idea is explored to some degree in branch wip-definitions (#929): Instead of a type
+ * and a separate symbol definition, we produce in one line an implicit function from
+ * Context to Symbol, and possibly also the corresponding type. This cuts down on all
+ * the duplication encountered here.
+ *
+ * wip-definitions tries to do the same with an implicit conversion from a SymbolPerRun
+ * type to a symbol type. The problem with that is universal equality. Comparisons will
+ * not trigger the conversion and will therefore likely return false results.
+ *
+ * So the branch is put on hold, until we have implicit functions, which will always
+ * automatically be dereferenced.
+ */
+class Definitions {
+ import Definitions._
+
+ private implicit var ctx: Context = _
+
+ private def newSymbol[N <: Name](owner: Symbol, name: N, flags: FlagSet, info: Type) =
+ ctx.newSymbol(owner, name, flags | Permanent, info)
+
+ private def newClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, infoFn: ClassSymbol => Type) =
+ ctx.newClassSymbol(owner, name, flags | Permanent, infoFn).entered
+
+ private def newCompleteClassSymbol(owner: Symbol, name: TypeName, flags: FlagSet, parents: List[TypeRef], decls: Scope = newScope) =
+ ctx.newCompleteClassSymbol(owner, name, flags | Permanent, parents, decls).entered
+
+ private def newTopClassSymbol(name: TypeName, flags: FlagSet, parents: List[TypeRef]) =
+ completeClass(newCompleteClassSymbol(ScalaPackageClass, name, flags, parents))
+
+ private def newTypeField(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) =
+ scope.enter(newSymbol(cls, name, flags, TypeBounds.empty))
+
+ private def newTypeParam(cls: ClassSymbol, name: TypeName, flags: FlagSet, scope: MutableScope) =
+ newTypeField(cls, name, flags | ClassTypeParamCreationFlags, scope)
+
+ private def newSyntheticTypeParam(cls: ClassSymbol, scope: MutableScope, paramFlags: FlagSet, suffix: String = "T0") =
+ newTypeParam(cls, suffix.toTypeName.expandedName(cls), ExpandedName | paramFlags, scope)
+
+ // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only
+ // implemented in Dotty and not in Scala 2.
+ // See <http://docs.scala-lang.org/sips/pending/repeated-byname.html>.
+ private def specialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: => Seq[Type]): ClassSymbol = {
+ val completer = new LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val cls = denot.asClass.classSymbol
+ val paramDecls = newScope
+ val typeParam = newSyntheticTypeParam(cls, paramDecls, paramFlags)
+ def instantiate(tpe: Type) =
+ if (tpe.typeParams.nonEmpty) tpe.appliedTo(typeParam.typeRef)
+ else tpe
+ val parents = parentConstrs.toList map instantiate
+ val parentRefs: List[TypeRef] = ctx.normalizeToClassRefs(parents, cls, paramDecls)
+ denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parentRefs, paramDecls)
+ }
+ }
+ newClassSymbol(ScalaPackageClass, name, EmptyFlags, completer)
+ }
+
+ private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol =
+ newSymbol(cls, name.encode, flags | Method, info).entered.asTerm
+
+ private def newAliasType(name: TypeName, tpe: Type, flags: FlagSet = EmptyFlags): TypeSymbol = {
+ val sym = newSymbol(ScalaPackageClass, name, flags, TypeAlias(tpe))
+ ScalaPackageClass.currentPackageDecls.enter(sym)
+ sym
+ }
+
+ private def newPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int,
+ resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags) = {
+ val tparamNames = tpnme.syntheticTypeParamNames(typeParamCount)
+ val tparamBounds = tparamNames map (_ => TypeBounds.empty)
+ val ptype = PolyType(tparamNames)(_ => tparamBounds, resultTypeFn)
+ newMethod(cls, name, ptype, flags)
+ }
+
+ private def newT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
+ newPolyMethod(cls, name, 1, resultTypeFn, flags)
+
+ private def newT1EmptyParamsMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) =
+ newPolyMethod(cls, name, 1, pt => MethodType(Nil, resultTypeFn(pt)), flags)
+
+ private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef] = {
+ val arr = new Array[TypeRef](arity + 1)
+ for (i <- countFrom to arity) arr(i) = ctx.requiredClassRef(name + i)
+ arr
+ }
+
+ private def completeClass(cls: ClassSymbol): ClassSymbol = {
+ ensureConstructor(cls, EmptyScope)
+ if (cls.linkedClass.exists) cls.linkedClass.info = NoType
+ cls
+ }
+
+ lazy val RootClass: ClassSymbol = ctx.newPackageSymbol(
+ NoSymbol, nme.ROOT, (root, rootcls) => ctx.rootLoader(root)).moduleClass.asClass
+ lazy val RootPackage: TermSymbol = ctx.newSymbol(
+ NoSymbol, nme.ROOTPKG, PackageCreationFlags, TypeRef(NoPrefix, RootClass))
+
+ lazy val EmptyPackageVal = ctx.newPackageSymbol(
+ RootClass, nme.EMPTY_PACKAGE, (emptypkg, emptycls) => ctx.rootLoader(emptypkg)).entered
+ lazy val EmptyPackageClass = EmptyPackageVal.moduleClass.asClass
+
+ /** A package in which we can place all methods that are interpreted specially by the compiler */
+ lazy val OpsPackageVal = ctx.newCompletePackageSymbol(RootClass, nme.OPS_PACKAGE).entered
+ lazy val OpsPackageClass = OpsPackageVal.moduleClass.asClass
+
+ lazy val ScalaPackageVal = ctx.requiredPackage("scala")
+ lazy val ScalaMathPackageVal = ctx.requiredPackage("scala.math")
+ lazy val ScalaPackageClass = ScalaPackageVal.moduleClass.asClass
+ lazy val JavaPackageVal = ctx.requiredPackage("java")
+ lazy val JavaLangPackageVal = ctx.requiredPackage("java.lang")
+ // fundamental modules
+ lazy val SysPackage = ctx.requiredModule("scala.sys.package")
+ lazy val Sys_errorR = SysPackage.moduleClass.requiredMethodRef(nme.error)
+ def Sys_error(implicit ctx: Context) = Sys_errorR.symbol
+
+ /** The `scalaShadowing` package is used to safely modify classes and
+ * objects in scala so that they can be used from dotty. They will
+ * be visible as members of the `scala` package, replacing any objects
+ * or classes with the same name. But their binary artifacts are
+ * in `scalaShadowing` so they don't clash with the same-named `scala`
+ * members at runtime.
+ */
+ lazy val ScalaShadowingPackageVal = ctx.requiredPackage("scalaShadowing")
+ lazy val ScalaShadowingPackageClass = ScalaShadowingPackageVal.moduleClass.asClass
+
+ /** Note: We cannot have same named methods defined in Object and Any (and AnyVal, for that matter)
+ * because after erasure the Any and AnyVal references get remapped to the Object methods
+ * which would result in a double binding assertion failure.
+ * Instead we do the following:
+ *
+ * - Have some methods exist only in Any, and remap them with the Erasure denotation
+ * transformer to be owned by Object.
+ * - Have other methods exist only in Object.
+ * To achieve this, we synthesize all Any and Object methods; Object methods no longer get
+ * loaded from a classfile.
+ *
+ * There's a remaining question about `getClass`. In Scala2.x `getClass` was handled by compiler magic.
+ * This is deemed too cumersome for Dotty and therefore right now `getClass` gets no special treatment;
+ * it's just a method on `Any` which returns the raw type `java.lang.Class`. An alternative
+ * way to get better `getClass` typing would be to treat `getClass` as a method of a generic
+ * decorator which gets remapped in a later phase to Object#getClass. Then we could give it
+ * the right type without changing the typechecker:
+ *
+ * implicit class AnyGetClass[T](val x: T) extends AnyVal {
+ * def getClass: java.lang.Class[T] = ???
+ * }
+ */
+ lazy val AnyClass: ClassSymbol = completeClass(newCompleteClassSymbol(ScalaPackageClass, tpnme.Any, Abstract, Nil))
+ def AnyType = AnyClass.typeRef
+ lazy val AnyValClass: ClassSymbol = completeClass(newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyVal, Abstract, List(AnyClass.typeRef)))
+ def AnyValType = AnyValClass.typeRef
+
+ lazy val Any_== = newMethod(AnyClass, nme.EQ, methOfAny(BooleanType), Final)
+ lazy val Any_!= = newMethod(AnyClass, nme.NE, methOfAny(BooleanType), Final)
+ lazy val Any_equals = newMethod(AnyClass, nme.equals_, methOfAny(BooleanType))
+ lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType))
+ lazy val Any_toString = newMethod(AnyClass, nme.toString_, MethodType(Nil, StringType))
+ lazy val Any_## = newMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final)
+ lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.empty)), Final)
+ lazy val Any_isInstanceOf = newT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final)
+ lazy val Any_asInstanceOf = newT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, PolyParam(_, 0), Final)
+
+ def AnyMethods = List(Any_==, Any_!=, Any_equals, Any_hashCode,
+ Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf)
+
+ lazy val ObjectClass: ClassSymbol = {
+ val cls = ctx.requiredClass("java.lang.Object")
+ assert(!cls.isCompleted, "race for completing java.lang.Object")
+ cls.info = ClassInfo(cls.owner.thisType, cls, AnyClass.typeRef :: Nil, newScope)
+
+ // The companion object doesn't really exist, `NoType` is the general
+ // technique to do that. Here we need to set it before completing
+ // attempt to load Object's classfile, which causes issue #1648.
+ val companion = JavaLangPackageVal.info.decl(nme.Object).symbol
+ companion.info = NoType // to indicate that it does not really exist
+
+ completeClass(cls)
+ }
+ def ObjectType = ObjectClass.typeRef
+
+ lazy val AnyRefAlias: TypeSymbol = newAliasType(tpnme.AnyRef, ObjectType)
+ def AnyRefType = AnyRefAlias.typeRef
+
+ lazy val Object_eq = newMethod(ObjectClass, nme.eq, methOfAnyRef(BooleanType), Final)
+ lazy val Object_ne = newMethod(ObjectClass, nme.ne, methOfAnyRef(BooleanType), Final)
+ lazy val Object_synchronized = newPolyMethod(ObjectClass, nme.synchronized_, 1,
+ pt => MethodType(List(PolyParam(pt, 0)), PolyParam(pt, 0)), Final)
+ lazy val Object_clone = newMethod(ObjectClass, nme.clone_, MethodType(Nil, ObjectType), Protected)
+ lazy val Object_finalize = newMethod(ObjectClass, nme.finalize_, MethodType(Nil, UnitType), Protected)
+ lazy val Object_notify = newMethod(ObjectClass, nme.notify_, MethodType(Nil, UnitType))
+ lazy val Object_notifyAll = newMethod(ObjectClass, nme.notifyAll_, MethodType(Nil, UnitType))
+ lazy val Object_wait = newMethod(ObjectClass, nme.wait_, MethodType(Nil, UnitType))
+ lazy val Object_waitL = newMethod(ObjectClass, nme.wait_, MethodType(LongType :: Nil, UnitType))
+ lazy val Object_waitLI = newMethod(ObjectClass, nme.wait_, MethodType(LongType :: IntType :: Nil, UnitType))
+
+ def ObjectMethods = List(Object_eq, Object_ne, Object_synchronized, Object_clone,
+ Object_finalize, Object_notify, Object_notifyAll, Object_wait, Object_waitL, Object_waitLI)
+
+ /** Dummy method needed by elimByName */
+ lazy val dummyApply = newPolyMethod(
+ OpsPackageClass, nme.dummyApply, 1,
+ pt => MethodType(List(FunctionOf(Nil, PolyParam(pt, 0))), PolyParam(pt, 0)))
+
+ /** Method representing a throw */
+ lazy val throwMethod = newMethod(OpsPackageClass, nme.THROWkw,
+ MethodType(List(ThrowableType), NothingType))
+
+ lazy val NothingClass: ClassSymbol = newCompleteClassSymbol(
+ ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyClass.typeRef))
+ def NothingType = NothingClass.typeRef
+ lazy val NullClass: ClassSymbol = newCompleteClassSymbol(
+ ScalaPackageClass, tpnme.Null, AbstractFinal, List(ObjectClass.typeRef))
+ def NullType = NullClass.typeRef
+
+ lazy val ScalaPredefModuleRef = ctx.requiredModuleRef("scala.Predef")
+ def ScalaPredefModule(implicit ctx: Context) = ScalaPredefModuleRef.symbol
+
+ lazy val Predef_conformsR = ScalaPredefModule.requiredMethodRef("$conforms")
+ def Predef_conforms(implicit ctx: Context) = Predef_conformsR.symbol
+ lazy val Predef_classOfR = ScalaPredefModule.requiredMethodRef("classOf")
+ def Predef_classOf(implicit ctx: Context) = Predef_classOfR.symbol
+
+ lazy val ScalaRuntimeModuleRef = ctx.requiredModuleRef("scala.runtime.ScalaRunTime")
+ def ScalaRuntimeModule(implicit ctx: Context) = ScalaRuntimeModuleRef.symbol
+ def ScalaRuntimeClass(implicit ctx: Context) = ScalaRuntimeModule.moduleClass.asClass
+
+ def runtimeMethodRef(name: PreName) = ScalaRuntimeModule.requiredMethodRef(name)
+ def ScalaRuntime_dropR(implicit ctx: Context) = runtimeMethodRef(nme.drop)
+ def ScalaRuntime_drop(implicit ctx: Context) = ScalaRuntime_dropR.symbol
+
+ lazy val BoxesRunTimeModuleRef = ctx.requiredModuleRef("scala.runtime.BoxesRunTime")
+ def BoxesRunTimeModule(implicit ctx: Context) = BoxesRunTimeModuleRef.symbol
+ def BoxesRunTimeClass(implicit ctx: Context) = BoxesRunTimeModule.moduleClass.asClass
+ lazy val ScalaStaticsModuleRef = ctx.requiredModuleRef("scala.runtime.Statics")
+ def ScalaStaticsModule(implicit ctx: Context) = ScalaStaticsModuleRef.symbol
+ def ScalaStaticsClass(implicit ctx: Context) = ScalaStaticsModule.moduleClass.asClass
+
+ def staticsMethodRef(name: PreName) = ScalaStaticsModule.requiredMethodRef(name)
+ def staticsMethod(name: PreName) = ScalaStaticsModule.requiredMethod(name)
+
+ lazy val DottyPredefModuleRef = ctx.requiredModuleRef("dotty.DottyPredef")
+ def DottyPredefModule(implicit ctx: Context) = DottyPredefModuleRef.symbol
+
+ def Predef_eqAny(implicit ctx: Context) = DottyPredefModule.requiredMethod(nme.eqAny)
+
+ lazy val DottyArraysModuleRef = ctx.requiredModuleRef("dotty.runtime.Arrays")
+ def DottyArraysModule(implicit ctx: Context) = DottyArraysModuleRef.symbol
+ def newGenericArrayMethod(implicit ctx: Context) = DottyArraysModule.requiredMethod("newGenericArray")
+ def newArrayMethod(implicit ctx: Context) = DottyArraysModule.requiredMethod("newArray")
+
+ lazy val NilModuleRef = ctx.requiredModuleRef("scala.collection.immutable.Nil")
+ def NilModule(implicit ctx: Context) = NilModuleRef.symbol
+
+ lazy val SingletonClass: ClassSymbol =
+ // needed as a synthetic class because Scala 2.x refers to it in classfiles
+ // but does not define it as an explicit class.
+ newCompleteClassSymbol(
+ ScalaPackageClass, tpnme.Singleton, PureInterfaceCreationFlags | Final,
+ List(AnyClass.typeRef), EmptyScope)
+
+ lazy val SeqType: TypeRef = ctx.requiredClassRef("scala.collection.Seq")
+ def SeqClass(implicit ctx: Context) = SeqType.symbol.asClass
+
+ lazy val Seq_applyR = SeqClass.requiredMethodRef(nme.apply)
+ def Seq_apply(implicit ctx: Context) = Seq_applyR.symbol
+ lazy val Seq_headR = SeqClass.requiredMethodRef(nme.head)
+ def Seq_head(implicit ctx: Context) = Seq_headR.symbol
+
+ lazy val ArrayType: TypeRef = ctx.requiredClassRef("scala.Array")
+ def ArrayClass(implicit ctx: Context) = ArrayType.symbol.asClass
+ lazy val Array_applyR = ArrayClass.requiredMethodRef(nme.apply)
+ def Array_apply(implicit ctx: Context) = Array_applyR.symbol
+ lazy val Array_updateR = ArrayClass.requiredMethodRef(nme.update)
+ def Array_update(implicit ctx: Context) = Array_updateR.symbol
+ lazy val Array_lengthR = ArrayClass.requiredMethodRef(nme.length)
+ def Array_length(implicit ctx: Context) = Array_lengthR.symbol
+ lazy val Array_cloneR = ArrayClass.requiredMethodRef(nme.clone_)
+ def Array_clone(implicit ctx: Context) = Array_cloneR.symbol
+ lazy val ArrayConstructorR = ArrayClass.requiredMethodRef(nme.CONSTRUCTOR)
+ def ArrayConstructor(implicit ctx: Context) = ArrayConstructorR.symbol
+ lazy val ArrayModuleType = ctx.requiredModuleRef("scala.Array")
+ def ArrayModule(implicit ctx: Context) = ArrayModuleType.symbol.moduleClass.asClass
+
+
+ lazy val UnitType: TypeRef = valueTypeRef("scala.Unit", BoxedUnitType, java.lang.Void.TYPE, UnitEnc)
+ def UnitClass(implicit ctx: Context) = UnitType.symbol.asClass
+ lazy val BooleanType = valueTypeRef("scala.Boolean", BoxedBooleanType, java.lang.Boolean.TYPE, BooleanEnc)
+ def BooleanClass(implicit ctx: Context) = BooleanType.symbol.asClass
+ lazy val Boolean_notR = BooleanClass.requiredMethodRef(nme.UNARY_!)
+ def Boolean_! = Boolean_notR.symbol
+ lazy val Boolean_andR = BooleanClass.requiredMethodRef(nme.ZAND) // ### harmonize required... calls
+ def Boolean_&& = Boolean_andR.symbol
+ lazy val Boolean_orR = BooleanClass.requiredMethodRef(nme.ZOR)
+ def Boolean_|| = Boolean_orR.symbol
+
+ lazy val ByteType: TypeRef = valueTypeRef("scala.Byte", BoxedByteType, java.lang.Byte.TYPE, ByteEnc)
+ def ByteClass(implicit ctx: Context) = ByteType.symbol.asClass
+ lazy val ShortType: TypeRef = valueTypeRef("scala.Short", BoxedShortType, java.lang.Short.TYPE, ShortEnc)
+ def ShortClass(implicit ctx: Context) = ShortType.symbol.asClass
+ lazy val CharType: TypeRef = valueTypeRef("scala.Char", BoxedCharType, java.lang.Character.TYPE, CharEnc)
+ def CharClass(implicit ctx: Context) = CharType.symbol.asClass
+ lazy val IntType: TypeRef = valueTypeRef("scala.Int", BoxedIntType, java.lang.Integer.TYPE, IntEnc)
+ def IntClass(implicit ctx: Context) = IntType.symbol.asClass
+ lazy val Int_minusR = IntClass.requiredMethodRef(nme.MINUS, List(IntType))
+ def Int_- = Int_minusR.symbol
+ lazy val Int_plusR = IntClass.requiredMethodRef(nme.PLUS, List(IntType))
+ def Int_+ = Int_plusR.symbol
+ lazy val Int_divR = IntClass.requiredMethodRef(nme.DIV, List(IntType))
+ def Int_/ = Int_divR.symbol
+ lazy val Int_mulR = IntClass.requiredMethodRef(nme.MUL, List(IntType))
+ def Int_* = Int_mulR.symbol
+ lazy val Int_eqR = IntClass.requiredMethodRef(nme.EQ, List(IntType))
+ def Int_== = Int_eqR.symbol
+ lazy val Int_geR = IntClass.requiredMethodRef(nme.GE, List(IntType))
+ def Int_>= = Int_geR.symbol
+ lazy val Int_leR = IntClass.requiredMethodRef(nme.LE, List(IntType))
+ def Int_<= = Int_leR.symbol
+ lazy val LongType: TypeRef = valueTypeRef("scala.Long", BoxedLongType, java.lang.Long.TYPE, LongEnc)
+ def LongClass(implicit ctx: Context) = LongType.symbol.asClass
+ lazy val Long_XOR_Long = LongType.member(nme.XOR).requiredSymbol(
+ x => (x is Method) && (x.info.firstParamTypes.head isRef defn.LongClass)
+ )
+ lazy val Long_LSR_Int = LongType.member(nme.LSR).requiredSymbol(
+ x => (x is Method) && (x.info.firstParamTypes.head isRef defn.IntClass)
+ )
+ lazy val FloatType: TypeRef = valueTypeRef("scala.Float", BoxedFloatType, java.lang.Float.TYPE, FloatEnc)
+ def FloatClass(implicit ctx: Context) = FloatType.symbol.asClass
+ lazy val DoubleType: TypeRef = valueTypeRef("scala.Double", BoxedDoubleType, java.lang.Double.TYPE, DoubleEnc)
+ def DoubleClass(implicit ctx: Context) = DoubleType.symbol.asClass
+
+ lazy val BoxedUnitType: TypeRef = ctx.requiredClassRef("scala.runtime.BoxedUnit")
+ def BoxedUnitClass(implicit ctx: Context) = BoxedUnitType.symbol.asClass
+
+ def BoxedUnit_UNIT(implicit ctx: Context) = BoxedUnitClass.linkedClass.requiredValue("UNIT")
+
+ lazy val BoxedBooleanType: TypeRef = ctx.requiredClassRef("java.lang.Boolean")
+ def BoxedBooleanClass(implicit ctx: Context) = BoxedBooleanType.symbol.asClass
+ lazy val BoxedByteType: TypeRef = ctx.requiredClassRef("java.lang.Byte")
+ def BoxedByteClass(implicit ctx: Context) = BoxedByteType.symbol.asClass
+ lazy val BoxedShortType: TypeRef = ctx.requiredClassRef("java.lang.Short")
+ def BoxedShortClass(implicit ctx: Context) = BoxedShortType.symbol.asClass
+ lazy val BoxedCharType: TypeRef = ctx.requiredClassRef("java.lang.Character")
+ def BoxedCharClass(implicit ctx: Context) = BoxedCharType.symbol.asClass
+ lazy val BoxedIntType: TypeRef = ctx.requiredClassRef("java.lang.Integer")
+ def BoxedIntClass(implicit ctx: Context) = BoxedIntType.symbol.asClass
+ lazy val BoxedLongType: TypeRef = ctx.requiredClassRef("java.lang.Long")
+ def BoxedLongClass(implicit ctx: Context) = BoxedLongType.symbol.asClass
+ lazy val BoxedFloatType: TypeRef = ctx.requiredClassRef("java.lang.Float")
+ def BoxedFloatClass(implicit ctx: Context) = BoxedFloatType.symbol.asClass
+ lazy val BoxedDoubleType: TypeRef = ctx.requiredClassRef("java.lang.Double")
+ def BoxedDoubleClass(implicit ctx: Context) = BoxedDoubleType.symbol.asClass
+
+ lazy val BoxedBooleanModule = ctx.requiredModule("java.lang.Boolean")
+ lazy val BoxedByteModule = ctx.requiredModule("java.lang.Byte")
+ lazy val BoxedShortModule = ctx.requiredModule("java.lang.Short")
+ lazy val BoxedCharModule = ctx.requiredModule("java.lang.Character")
+ lazy val BoxedIntModule = ctx.requiredModule("java.lang.Integer")
+ lazy val BoxedLongModule = ctx.requiredModule("java.lang.Long")
+ lazy val BoxedFloatModule = ctx.requiredModule("java.lang.Float")
+ lazy val BoxedDoubleModule = ctx.requiredModule("java.lang.Double")
+ lazy val BoxedUnitModule = ctx.requiredModule("java.lang.Void")
+
+ lazy val ByNameParamClass2x = specialPolyClass(tpnme.BYNAME_PARAM_CLASS, Covariant, Seq(AnyType))
+ lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN, EmptyFlags, Seq(AnyType))
+
+ lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS, Covariant, Seq(ObjectType, SeqType))
+
+ // fundamental classes
+ lazy val StringClass = ctx.requiredClass("java.lang.String")
+ def StringType: Type = StringClass.typeRef
+ lazy val StringModule = StringClass.linkedClass
+
+ lazy val String_+ = newMethod(StringClass, nme.raw.PLUS, methOfAny(StringType), Final)
+ lazy val String_valueOf_Object = StringModule.info.member(nme.valueOf).suchThat(_.info.firstParamTypes match {
+ case List(pt) => (pt isRef AnyClass) || (pt isRef ObjectClass)
+ case _ => false
+ }).symbol
+
+ lazy val JavaCloneableClass = ctx.requiredClass("java.lang.Cloneable")
+ lazy val NullPointerExceptionClass = ctx.requiredClass("java.lang.NullPointerException")
+ lazy val ClassClass = ctx.requiredClass("java.lang.Class")
+ lazy val BoxedNumberClass = ctx.requiredClass("java.lang.Number")
+ lazy val ThrowableClass = ctx.requiredClass("java.lang.Throwable")
+ lazy val ClassCastExceptionClass = ctx.requiredClass("java.lang.ClassCastException")
+ lazy val JavaSerializableClass = ctx.requiredClass("java.lang.Serializable")
+ lazy val ComparableClass = ctx.requiredClass("java.lang.Comparable")
+
+ // in scalac modified to have Any as parent
+
+ lazy val SerializableType: TypeRef = ctx.requiredClassRef("scala.Serializable")
+ def SerializableClass(implicit ctx: Context) = SerializableType.symbol.asClass
+ lazy val StringBuilderType: TypeRef = ctx.requiredClassRef("scala.collection.mutable.StringBuilder")
+ def StringBuilderClass(implicit ctx: Context) = StringBuilderType.symbol.asClass
+ lazy val MatchErrorType: TypeRef = ctx.requiredClassRef("scala.MatchError")
+ def MatchErrorClass(implicit ctx: Context) = MatchErrorType.symbol.asClass
+
+ lazy val StringAddType: TypeRef = ctx.requiredClassRef("scala.runtime.StringAdd")
+ def StringAddClass(implicit ctx: Context) = StringAddType.symbol.asClass
+
+ lazy val StringAdd_plusR = StringAddClass.requiredMethodRef(nme.raw.PLUS)
+ def StringAdd_+(implicit ctx: Context) = StringAdd_plusR.symbol
+
+ lazy val PartialFunctionType: TypeRef = ctx.requiredClassRef("scala.PartialFunction")
+ def PartialFunctionClass(implicit ctx: Context) = PartialFunctionType.symbol.asClass
+ lazy val AbstractPartialFunctionType: TypeRef = ctx.requiredClassRef("scala.runtime.AbstractPartialFunction")
+ def AbstractPartialFunctionClass(implicit ctx: Context) = AbstractPartialFunctionType.symbol.asClass
+ lazy val SymbolType: TypeRef = ctx.requiredClassRef("scala.Symbol")
+ def SymbolClass(implicit ctx: Context) = SymbolType.symbol.asClass
+ lazy val DynamicType: TypeRef = ctx.requiredClassRef("scala.Dynamic")
+ def DynamicClass(implicit ctx: Context) = DynamicType.symbol.asClass
+ lazy val OptionType: TypeRef = ctx.requiredClassRef("scala.Option")
+ def OptionClass(implicit ctx: Context) = OptionType.symbol.asClass
+ lazy val ProductType: TypeRef = ctx.requiredClassRef("scala.Product")
+ def ProductClass(implicit ctx: Context) = ProductType.symbol.asClass
+ lazy val Product_canEqualR = ProductClass.requiredMethodRef(nme.canEqual_)
+ def Product_canEqual(implicit ctx: Context) = Product_canEqualR.symbol
+ lazy val Product_productArityR = ProductClass.requiredMethodRef(nme.productArity)
+ def Product_productArity(implicit ctx: Context) = Product_productArityR.symbol
+ lazy val Product_productPrefixR = ProductClass.requiredMethodRef(nme.productPrefix)
+ def Product_productPrefix(implicit ctx: Context) = Product_productPrefixR.symbol
+ lazy val LanguageModuleRef = ctx.requiredModule("scala.language")
+ def LanguageModuleClass(implicit ctx: Context) = LanguageModuleRef.symbol.moduleClass.asClass
+ lazy val NonLocalReturnControlType: TypeRef = ctx.requiredClassRef("scala.runtime.NonLocalReturnControl")
+
+ lazy val ClassTagType = ctx.requiredClassRef("scala.reflect.ClassTag")
+ def ClassTagClass(implicit ctx: Context) = ClassTagType.symbol.asClass
+ def ClassTagModule(implicit ctx: Context) = ClassTagClass.companionModule
+
+ lazy val EqType = ctx.requiredClassRef("scala.Eq")
+ def EqClass(implicit ctx: Context) = EqType.symbol.asClass
+
+ // Annotation base classes
+ lazy val AnnotationType = ctx.requiredClassRef("scala.annotation.Annotation")
+ def AnnotationClass(implicit ctx: Context) = AnnotationType.symbol.asClass
+ lazy val ClassfileAnnotationType = ctx.requiredClassRef("scala.annotation.ClassfileAnnotation")
+ def ClassfileAnnotationClass(implicit ctx: Context) = ClassfileAnnotationType.symbol.asClass
+ lazy val StaticAnnotationType = ctx.requiredClassRef("scala.annotation.StaticAnnotation")
+ def StaticAnnotationClass(implicit ctx: Context) = StaticAnnotationType.symbol.asClass
+
+ // Annotation classes
+ lazy val AliasAnnotType = ctx.requiredClassRef("scala.annotation.internal.Alias")
+ def AliasAnnot(implicit ctx: Context) = AliasAnnotType.symbol.asClass
+ lazy val AnnotationDefaultAnnotType = ctx.requiredClassRef("scala.annotation.internal.AnnotationDefault")
+ def AnnotationDefaultAnnot(implicit ctx: Context) = AnnotationDefaultAnnotType.symbol.asClass
+ lazy val BodyAnnotType = ctx.requiredClassRef("scala.annotation.internal.Body")
+ def BodyAnnot(implicit ctx: Context) = BodyAnnotType.symbol.asClass
+ lazy val ChildAnnotType = ctx.requiredClassRef("scala.annotation.internal.Child")
+ def ChildAnnot(implicit ctx: Context) = ChildAnnotType.symbol.asClass
+ lazy val CovariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.CovariantBetween")
+ def CovariantBetweenAnnot(implicit ctx: Context) = CovariantBetweenAnnotType.symbol.asClass
+ lazy val ContravariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.ContravariantBetween")
+ def ContravariantBetweenAnnot(implicit ctx: Context) = ContravariantBetweenAnnotType.symbol.asClass
+ lazy val DeprecatedAnnotType = ctx.requiredClassRef("scala.deprecated")
+ def DeprecatedAnnot(implicit ctx: Context) = DeprecatedAnnotType.symbol.asClass
+ lazy val ImplicitNotFoundAnnotType = ctx.requiredClassRef("scala.annotation.implicitNotFound")
+ def ImplicitNotFoundAnnot(implicit ctx: Context) = ImplicitNotFoundAnnotType.symbol.asClass
+ lazy val InlineAnnotType = ctx.requiredClassRef("scala.inline")
+ def InlineAnnot(implicit ctx: Context) = InlineAnnotType.symbol.asClass
+ lazy val InlineParamAnnotType = ctx.requiredClassRef("scala.annotation.internal.InlineParam")
+ def InlineParamAnnot(implicit ctx: Context) = InlineParamAnnotType.symbol.asClass
+ lazy val InvariantBetweenAnnotType = ctx.requiredClassRef("scala.annotation.internal.InvariantBetween")
+ def InvariantBetweenAnnot(implicit ctx: Context) = InvariantBetweenAnnotType.symbol.asClass
+ lazy val MigrationAnnotType = ctx.requiredClassRef("scala.annotation.migration")
+ def MigrationAnnot(implicit ctx: Context) = MigrationAnnotType.symbol.asClass
+ lazy val NativeAnnotType = ctx.requiredClassRef("scala.native")
+ def NativeAnnot(implicit ctx: Context) = NativeAnnotType.symbol.asClass
+ lazy val RemoteAnnotType = ctx.requiredClassRef("scala.remote")
+ def RemoteAnnot(implicit ctx: Context) = RemoteAnnotType.symbol.asClass
+ lazy val RepeatedAnnotType = ctx.requiredClassRef("scala.annotation.internal.Repeated")
+ def RepeatedAnnot(implicit ctx: Context) = RepeatedAnnotType.symbol.asClass
+ lazy val SourceFileAnnotType = ctx.requiredClassRef("scala.annotation.internal.SourceFile")
+ def SourceFileAnnot(implicit ctx: Context) = SourceFileAnnotType.symbol.asClass
+ lazy val ScalaSignatureAnnotType = ctx.requiredClassRef("scala.reflect.ScalaSignature")
+ def ScalaSignatureAnnot(implicit ctx: Context) = ScalaSignatureAnnotType.symbol.asClass
+ lazy val ScalaLongSignatureAnnotType = ctx.requiredClassRef("scala.reflect.ScalaLongSignature")
+ def ScalaLongSignatureAnnot(implicit ctx: Context) = ScalaLongSignatureAnnotType.symbol.asClass
+ lazy val ScalaStrictFPAnnotType = ctx.requiredClassRef("scala.annotation.strictfp")
+ def ScalaStrictFPAnnot(implicit ctx: Context) = ScalaStrictFPAnnotType.symbol.asClass
+ lazy val ScalaStaticAnnotType = ctx.requiredClassRef("scala.annotation.static")
+ def ScalaStaticAnnot(implicit ctx: Context) = ScalaStaticAnnotType.symbol.asClass
+ lazy val SerialVersionUIDAnnotType = ctx.requiredClassRef("scala.SerialVersionUID")
+ def SerialVersionUIDAnnot(implicit ctx: Context) = SerialVersionUIDAnnotType.symbol.asClass
+ lazy val TASTYSignatureAnnotType = ctx.requiredClassRef("scala.annotation.internal.TASTYSignature")
+ def TASTYSignatureAnnot(implicit ctx: Context) = TASTYSignatureAnnotType.symbol.asClass
+ lazy val TASTYLongSignatureAnnotType = ctx.requiredClassRef("scala.annotation.internal.TASTYLongSignature")
+ def TASTYLongSignatureAnnot(implicit ctx: Context) = TASTYLongSignatureAnnotType.symbol.asClass
+ lazy val TailrecAnnotType = ctx.requiredClassRef("scala.annotation.tailrec")
+ def TailrecAnnot(implicit ctx: Context) = TailrecAnnotType.symbol.asClass
+ lazy val SwitchAnnotType = ctx.requiredClassRef("scala.annotation.switch")
+ def SwitchAnnot(implicit ctx: Context) = SwitchAnnotType.symbol.asClass
+ lazy val ThrowsAnnotType = ctx.requiredClassRef("scala.throws")
+ def ThrowsAnnot(implicit ctx: Context) = ThrowsAnnotType.symbol.asClass
+ lazy val TransientAnnotType = ctx.requiredClassRef("scala.transient")
+ def TransientAnnot(implicit ctx: Context) = TransientAnnotType.symbol.asClass
+ lazy val UncheckedAnnotType = ctx.requiredClassRef("scala.unchecked")
+ def UncheckedAnnot(implicit ctx: Context) = UncheckedAnnotType.symbol.asClass
+ lazy val UncheckedStableAnnotType = ctx.requiredClassRef("scala.annotation.unchecked.uncheckedStable")
+ def UncheckedStableAnnot(implicit ctx: Context) = UncheckedStableAnnotType.symbol.asClass
+ lazy val UncheckedVarianceAnnotType = ctx.requiredClassRef("scala.annotation.unchecked.uncheckedVariance")
+ def UncheckedVarianceAnnot(implicit ctx: Context) = UncheckedVarianceAnnotType.symbol.asClass
+ lazy val UnsafeNonvariantAnnotType = ctx.requiredClassRef("scala.annotation.internal.UnsafeNonvariant")
+ def UnsafeNonvariantAnnot(implicit ctx: Context) = UnsafeNonvariantAnnotType.symbol.asClass
+ lazy val VolatileAnnotType = ctx.requiredClassRef("scala.volatile")
+ def VolatileAnnot(implicit ctx: Context) = VolatileAnnotType.symbol.asClass
+ lazy val FieldMetaAnnotType = ctx.requiredClassRef("scala.annotation.meta.field")
+ def FieldMetaAnnot(implicit ctx: Context) = FieldMetaAnnotType.symbol.asClass
+ lazy val GetterMetaAnnotType = ctx.requiredClassRef("scala.annotation.meta.getter")
+ def GetterMetaAnnot(implicit ctx: Context) = GetterMetaAnnotType.symbol.asClass
+ lazy val SetterMetaAnnotType = ctx.requiredClassRef("scala.annotation.meta.setter")
+ def SetterMetaAnnot(implicit ctx: Context) = SetterMetaAnnotType.symbol.asClass
+
+ // convenient one-parameter method types
+ def methOfAny(tp: Type) = MethodType(List(AnyType), tp)
+ def methOfAnyVal(tp: Type) = MethodType(List(AnyValType), tp)
+ def methOfAnyRef(tp: Type) = MethodType(List(ObjectType), tp)
+
+ // Derived types
+
+ def RepeatedParamType = RepeatedParamClass.typeRef
+ def ThrowableType = ThrowableClass.typeRef
+
+ def ClassType(arg: Type)(implicit ctx: Context) = {
+ val ctype = ClassClass.typeRef
+ if (ctx.phase.erasedTypes) ctype else ctype.appliedTo(arg)
+ }
+
+ /** The enumeration type, goven a value of the enumeration */
+ def EnumType(sym: Symbol)(implicit ctx: Context) =
+ // given (in java): "class A { enum E { VAL1 } }"
+ // - sym: the symbol of the actual enumeration value (VAL1)
+ // - .owner: the ModuleClassSymbol of the enumeration (object E)
+ // - .linkedClass: the ClassSymbol of the enumeration (class E)
+ sym.owner.linkedClass.typeRef
+
+ object FunctionOf {
+ def apply(args: List[Type], resultType: Type)(implicit ctx: Context) =
+ FunctionType(args.length).appliedTo(args ::: resultType :: Nil)
+ def unapply(ft: Type)(implicit ctx: Context)/*: Option[(List[Type], Type)]*/ = {
+ // -language:keepUnions difference: unapply needs result type because inferred type
+ // is Some[(List[Type], Type)] | None, which is not a legal unapply type.
+ val tsym = ft.typeSymbol
+ lazy val targs = ft.argInfos
+ val numArgs = targs.length - 1
+ if (numArgs >= 0 && numArgs <= MaxFunctionArity &&
+ (FunctionType(numArgs).symbol == tsym)) Some(targs.init, targs.last)
+ else None
+ }
+ }
+
+ object ArrayOf {
+ def apply(elem: Type)(implicit ctx: Context) =
+ if (ctx.erasedTypes) JavaArrayType(elem)
+ else ArrayType.appliedTo(elem :: Nil)
+ def unapply(tp: Type)(implicit ctx: Context): Option[Type] = tp.dealias match {
+ case at: RefinedType if (at isRef ArrayType.symbol) && at.argInfos.length == 1 => Some(at.argInfos.head)
+ case _ => None
+ }
+ }
+
+ /** An extractor for multi-dimensional arrays.
+ * Note that this will also extract the high bound if an
+ * element type is a wildcard. E.g.
+ *
+ * Array[_ <: Array[_ <: Number]]
+ *
+ * would match
+ *
+ * MultiArrayOf(<Number>, 2)
+ */
+ object MultiArrayOf {
+ def apply(elem: Type, ndims: Int)(implicit ctx: Context): Type =
+ if (ndims == 0) elem else ArrayOf(apply(elem, ndims - 1))
+ def unapply(tp: Type)(implicit ctx: Context): Option[(Type, Int)] = tp match {
+ case ArrayOf(elemtp) =>
+ def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match {
+ case TypeBounds(lo, hi) => recur(hi)
+ case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1)
+ case _ => Some(elemtp, 1)
+ }
+ recur(elemtp)
+ case _ =>
+ None
+ }
+ }
+
+ // ----- Symbol sets ---------------------------------------------------
+
+ lazy val AbstractFunctionType = mkArityArray("scala.runtime.AbstractFunction", MaxAbstractFunctionArity, 0)
+ val AbstractFunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => AbstractFunctionType.map(_.symbol.asClass))
+ def AbstractFunctionClass(n: Int)(implicit ctx: Context) = AbstractFunctionClassPerRun()(ctx)(n)
+ lazy val FunctionType = mkArityArray("scala.Function", MaxFunctionArity, 0)
+ def FunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => FunctionType.map(_.symbol.asClass))
+ def FunctionClass(n: Int)(implicit ctx: Context) = FunctionClassPerRun()(ctx)(n)
+ lazy val Function0_applyR = FunctionType(0).symbol.requiredMethodRef(nme.apply)
+ def Function0_apply(implicit ctx: Context) = Function0_applyR.symbol
+
+ lazy val TupleType = mkArityArray("scala.Tuple", MaxTupleArity, 2)
+ lazy val ProductNType = mkArityArray("scala.Product", MaxTupleArity, 0)
+
+ private lazy val FunctionTypes: Set[TypeRef] = FunctionType.toSet
+ private lazy val TupleTypes: Set[TypeRef] = TupleType.toSet
+ private lazy val ProductTypes: Set[TypeRef] = ProductNType.toSet
+
+ /** If `cls` is a class in the scala package, its name, otherwise EmptyTypeName */
+ def scalaClassName(cls: Symbol)(implicit ctx: Context): TypeName =
+ if (cls.isClass && cls.owner == ScalaPackageClass) cls.asClass.name else EmptyTypeName
+
+ /** If type `ref` refers to a class in the scala package, its name, otherwise EmptyTypeName */
+ def scalaClassName(ref: Type)(implicit ctx: Context): TypeName = scalaClassName(ref.classSymbol)
+
+ private def isVarArityClass(cls: Symbol, prefix: Name) = {
+ val name = scalaClassName(cls)
+ name.startsWith(prefix) && name.drop(prefix.length).forall(_.isDigit)
+ }
+
+ def isBottomClass(cls: Symbol) =
+ cls == NothingClass || cls == NullClass
+ def isBottomType(tp: Type) =
+ tp.derivesFrom(NothingClass) || tp.derivesFrom(NullClass)
+
+ def isFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.Function)
+ def isAbstractFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.AbstractFunction)
+ def isTupleClass(cls: Symbol) = isVarArityClass(cls, tpnme.Tuple)
+ def isProductClass(cls: Symbol) = isVarArityClass(cls, tpnme.Product)
+
+ val StaticRootImportFns = List[() => TermRef](
+ () => JavaLangPackageVal.termRef,
+ () => ScalaPackageVal.termRef
+ )
+
+ val PredefImportFns = List[() => TermRef](
+ () => ScalaPredefModuleRef,
+ () => DottyPredefModuleRef
+ )
+
+ lazy val RootImportFns =
+ if (ctx.settings.YnoImports.value) List.empty[() => TermRef]
+ else if (ctx.settings.YnoPredef.value) StaticRootImportFns
+ else StaticRootImportFns ++ PredefImportFns
+
+ lazy val RootImportTypes = RootImportFns.map(_())
+
+ /** Modules whose members are in the default namespace and their module classes */
+ lazy val UnqualifiedOwnerTypes: Set[NamedType] =
+ RootImportTypes.toSet[NamedType] ++ RootImportTypes.map(_.symbol.moduleClass.typeRef)
+
+ lazy val PhantomClasses = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
+
+ def isPolymorphicAfterErasure(sym: Symbol) =
+ (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf)
+
+ def isTupleType(tp: Type)(implicit ctx: Context) = {
+ val arity = tp.dealias.argInfos.length
+ arity <= MaxTupleArity && TupleType(arity) != null && (tp isRef TupleType(arity).symbol)
+ }
+
+ def tupleType(elems: List[Type]) = {
+ TupleType(elems.size).appliedTo(elems)
+ }
+
+ def isProductSubType(tp: Type)(implicit ctx: Context) =
+ (tp derivesFrom ProductType.symbol) && tp.baseClasses.exists(isProductClass)
+
+ def isFunctionType(tp: Type)(implicit ctx: Context) = {
+ val arity = functionArity(tp)
+ 0 <= arity && arity <= MaxFunctionArity && (tp isRef FunctionType(arity).symbol)
+ }
+
+ def functionArity(tp: Type)(implicit ctx: Context) = tp.dealias.argInfos.length - 1
+
+ // ----- primitive value class machinery ------------------------------------------
+
+ /** This class would also be obviated by the implicit function type design */
+ class PerRun[T](generate: Context => T) {
+ private var current: RunId = NoRunId
+ private var cached: T = _
+ def apply()(implicit ctx: Context): T = {
+ if (current != ctx.runId) {
+ cached = generate(ctx)
+ current = ctx.runId
+ }
+ cached
+ }
+ }
+
+ lazy val ScalaNumericValueTypeList = List(
+ ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType)
+
+ private lazy val ScalaNumericValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypeList.toSet
+ private lazy val ScalaValueTypes: collection.Set[TypeRef] = ScalaNumericValueTypes + UnitType + BooleanType
+ private lazy val ScalaBoxedTypes = ScalaValueTypes map (t => boxedTypes(t.name))
+
+ val ScalaNumericValueClasses = new PerRun[collection.Set[Symbol]](implicit ctx => ScalaNumericValueTypes.map(_.symbol))
+ val ScalaValueClasses = new PerRun[collection.Set[Symbol]](implicit ctx => ScalaValueTypes.map(_.symbol))
+ val ScalaBoxedClasses = new PerRun[collection.Set[Symbol]](implicit ctx => ScalaBoxedTypes.map(_.symbol))
+
+ private val boxedTypes = mutable.Map[TypeName, TypeRef]()
+ private val valueTypeEnc = mutable.Map[TypeName, PrimitiveClassEnc]()
+
+// private val unboxedTypeRef = mutable.Map[TypeName, TypeRef]()
+// private val javaTypeToValueTypeRef = mutable.Map[Class[_], TypeRef]()
+// private val valueTypeNameToJavaType = mutable.Map[TypeName, Class[_]]()
+
+ private def valueTypeRef(name: String, boxed: TypeRef, jtype: Class[_], enc: Int): TypeRef = {
+ val vcls = ctx.requiredClassRef(name)
+ boxedTypes(vcls.name) = boxed
+ valueTypeEnc(vcls.name) = enc
+// unboxedTypeRef(boxed.name) = vcls
+// javaTypeToValueTypeRef(jtype) = vcls
+// valueTypeNameToJavaType(vcls.name) = jtype
+ vcls
+ }
+
+ /** The type of the boxed class corresponding to primitive value type `tp`. */
+ def boxedType(tp: Type)(implicit ctx: Context): TypeRef = boxedTypes(scalaClassName(tp))
+
+ def wrapArrayMethodName(elemtp: Type): TermName = {
+ val cls = elemtp.classSymbol
+ if (cls.isPrimitiveValueClass) nme.wrapXArray(cls.name)
+ else if (cls.derivesFrom(ObjectClass) && !cls.isPhantomClass) nme.wrapRefArray
+ else nme.genericWrapArray
+ }
+
+ type PrimitiveClassEnc = Int
+
+ val ByteEnc = 2
+ val ShortEnc = ByteEnc * 3
+ val CharEnc = 5
+ val IntEnc = ShortEnc * CharEnc
+ val LongEnc = IntEnc * 7
+ val FloatEnc = LongEnc * 11
+ val DoubleEnc = FloatEnc * 13
+ val BooleanEnc = 17
+ val UnitEnc = 19
+
+ def isValueSubType(tref1: TypeRef, tref2: TypeRef)(implicit ctx: Context) =
+ valueTypeEnc(tref2.name) % valueTypeEnc(tref1.name) == 0
+ def isValueSubClass(sym1: Symbol, sym2: Symbol) =
+ valueTypeEnc(sym2.asClass.name) % valueTypeEnc(sym1.asClass.name) == 0
+
+ // ----- Initialization ---------------------------------------------------
+
+ /** Lists core classes that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticScalaClasses = List(
+ AnyClass,
+ AnyRefAlias,
+ RepeatedParamClass,
+ ByNameParamClass2x,
+ AnyValClass,
+ NullClass,
+ NothingClass,
+ SingletonClass,
+ EqualsPatternClass)
+
+ lazy val syntheticCoreClasses = syntheticScalaClasses ++ List(
+ EmptyPackageVal,
+ OpsPackageClass)
+
+ /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */
+ lazy val syntheticCoreMethods = AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod)
+
+ lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet
+
+ private[this] var _isInitialized = false
+ private def isInitialized = _isInitialized
+
+ def init()(implicit ctx: Context) = {
+ this.ctx = ctx
+ if (!_isInitialized) {
+ // force initialization of every symbol that is synthesized or hijacked by the compiler
+ val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses()
+
+ // Enter all symbols from the scalaShadowing package in the scala package
+ for (m <- ScalaShadowingPackageClass.info.decls)
+ ScalaPackageClass.enter(m)
+
+ _isInitialized = true
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala
new file mode 100644
index 000000000..02d27ea33
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala
@@ -0,0 +1,78 @@
+package dotty.tools.dotc
+package core
+
+import Periods._
+import SymDenotations._
+import Contexts._
+import Types._
+import Symbols._
+import Denotations._
+import Phases._
+import java.lang.AssertionError
+import dotty.tools.dotc.util.DotClass
+
+object DenotTransformers {
+
+ /** A transformer group contains a sequence of transformers,
+ * ordered by the phase where they apply. Transformers are added
+ * to a group via `install`.
+ */
+
+ /** A transformer transforms denotations at a given phase */
+ trait DenotTransformer extends Phase {
+
+ /** The last phase during which the transformed denotations are valid */
+ def lastPhaseId(implicit ctx: Context) = ctx.nextDenotTransformerId(id + 1)
+
+ /** The validity period of the transformer in the given context */
+ def validFor(implicit ctx: Context): Period =
+ Period(ctx.runId, id, lastPhaseId)
+
+ /** The transformation method */
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation
+ }
+
+ /** A transformer that only transforms the info field of denotations */
+ trait InfoTransformer extends DenotTransformer {
+
+ def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type
+
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = {
+ val sym = ref.symbol
+ if (sym.exists && !mayChange(sym)) ref
+ else {
+ val info1 = transformInfo(ref.info, ref.symbol)
+ if (info1 eq ref.info) ref
+ else ref match {
+ case ref: SymDenotation => ref.copySymDenotation(info = info1)
+ case _ => ref.derivedSingleDenotation(ref.symbol, info1)
+ }
+ }
+ }
+
+ /** Denotations with a symbol where `mayChange` is false are guaranteed to be
+ * unaffected by this transform, so `transformInfo` need not be run. This
+ * can save time, and more importantly, can help avoid forcing symbol completers.
+ */
+ protected def mayChange(sym: Symbol)(implicit ctx: Context): Boolean = true
+ }
+
+ /** A transformer that only transforms SymDenotations */
+ trait SymTransformer extends DenotTransformer {
+
+ def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation
+
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref match {
+ case ref: SymDenotation => transformSym(ref)
+ case _ => ref
+ }
+ }
+
+ /** A `DenotTransformer` trait that has the identity as its `transform` method.
+ * You might want to inherit from this trait so that new denotations can be
+ * installed using `installAfter` and `enteredAfter` at the end of the phase.
+ */
+ trait IdentityDenotTransformer extends DenotTransformer {
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala
new file mode 100644
index 000000000..6a39c5787
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala
@@ -0,0 +1,1217 @@
+package dotty.tools
+package dotc
+package core
+
+import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation }
+import Contexts.{Context, ContextBase}
+import Names.{Name, PreName}
+import Names.TypeName
+import StdNames._
+import Symbols.NoSymbol
+import Symbols._
+import Types._
+import Periods._
+import Flags._
+import DenotTransformers._
+import Decorators._
+import dotc.transform.Erasure
+import printing.Texts._
+import printing.Printer
+import io.AbstractFile
+import config.Config
+import util.common._
+import collection.mutable.ListBuffer
+import Decorators.SymbolIteratorDecorator
+
+/** Denotations represent the meaning of symbols and named types.
+ * The following diagram shows how the principal types of denotations
+ * and their denoting entities relate to each other. Lines ending in
+ * a down-arrow `v` are member methods. The two methods shown in the diagram are
+ * "symbol" and "deref". Both methods are parameterized by the current context,
+ * and are effectively indexed by current period.
+ *
+ * Lines ending in a horizontal line mean subtying (right is a subtype of left).
+ *
+ * NamedType------TermRefWithSignature
+ * | | Symbol---------ClassSymbol
+ * | | | |
+ * | denot | denot | denot | denot
+ * v v v v
+ * Denotation-+-----SingleDenotation-+------SymDenotation-+----ClassDenotation
+ * | |
+ * +-----MultiDenotation |
+ * |
+ * +--UniqueRefDenotation
+ * +--JointRefDenotation
+ *
+ * Here's a short summary of the classes in this diagram.
+ *
+ * NamedType A type consisting of a prefix type and a name, with fields
+ * prefix: Type
+ * name: Name
+ * It has two subtypes: TermRef and TypeRef
+ * TermRefWithSignature A TermRef that has in addition a signature to select an overloaded variant, with new field
+ * sig: Signature
+ * Symbol A label for a definition or declaration in one compiler run
+ * ClassSymbol A symbol representing a class
+ * Denotation The meaning of a named type or symbol during a period
+ * MultiDenotation A denotation representing several overloaded members
+ * SingleDenotation A denotation representing a non-overloaded member or definition, with main fields
+ * symbol: Symbol
+ * info: Type
+ * UniqueRefDenotation A denotation referring to a single definition with some member type
+ * JointRefDenotation A denotation referring to a member that could resolve to several definitions
+ * SymDenotation A denotation representing a single definition with its original type, with main fields
+ * name: Name
+ * owner: Symbol
+ * flags: Flags
+ * privateWithin: Symbol
+ * annotations: List[Annotation]
+ * ClassDenotation A denotation representing a single class definition.
+ */
+object Denotations {
+
+ implicit def eqDenotation: Eq[Denotation, Denotation] = Eq
+
+ /** A denotation is the result of resolving
+ * a name (either simple identifier or select) during a given period.
+ *
+ * Denotations can be combined with `&` and `|`.
+ * & is conjunction, | is disjunction.
+ *
+ * `&` will create an overloaded denotation from two
+ * non-overloaded denotations if their signatures differ.
+ * Analogously `|` of two denotations with different signatures will give
+ * an empty denotation `NoDenotation`.
+ *
+ * A denotation might refer to `NoSymbol`. This is the case if the denotation
+ * was produced from a disjunction of two denotations with different symbols
+ * and there was no common symbol in a superclass that could substitute for
+ * both symbols. Here is an example:
+ *
+ * Say, we have:
+ *
+ * class A { def f: A }
+ * class B { def f: B }
+ * val x: A | B = if (test) new A else new B
+ * val y = x.f
+ *
+ * Then the denotation of `y` is `SingleDenotation(NoSymbol, A | B)`.
+ *
+ * @param symbol The referencing symbol, or NoSymbol is none exists
+ */
+ abstract class Denotation(val symbol: Symbol) extends util.DotClass with printing.Showable {
+
+ /** The type info of the denotation, exists only for non-overloaded denotations */
+ def info(implicit ctx: Context): Type
+
+ /** The type info, or, if this is a SymDenotation where the symbol
+ * is not yet completed, the completer
+ */
+ def infoOrCompleter: Type
+
+ /** The period during which this denotation is valid. */
+ def validFor: Period
+
+ /** Is this a reference to a type symbol? */
+ def isType: Boolean
+
+ /** Is this a reference to a term symbol? */
+ def isTerm: Boolean = !isType
+
+ /** Is this denotation overloaded? */
+ final def isOverloaded = isInstanceOf[MultiDenotation]
+
+ /** The signature of the denotation. */
+ def signature(implicit ctx: Context): Signature
+
+ /** Resolve overloaded denotation to pick the ones with the given signature
+ * when seen from prefix `site`.
+ * @param relaxed When true, consider only parameter signatures for a match.
+ */
+ def atSignature(sig: Signature, site: Type = NoPrefix, relaxed: Boolean = false)(implicit ctx: Context): Denotation
+
+ /** The variant of this denotation that's current in the given context.
+ * If no such denotation exists, returns the denotation with each alternative
+ * at its first point of definition.
+ */
+ def current(implicit ctx: Context): Denotation
+
+ /** Is this denotation different from NoDenotation or an ErrorDenotation? */
+ def exists: Boolean = true
+
+ /** A denotation with the info of this denotation transformed using `f` */
+ def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation
+
+ /** If this denotation does not exist, fallback to alternative */
+ final def orElse(that: => Denotation) = if (this.exists) this else that
+
+ /** The set of alternative single-denotations making up this denotation */
+ final def alternatives: List[SingleDenotation] = altsWith(alwaysTrue)
+
+ /** The alternatives of this denotation that satisfy the predicate `p`. */
+ def altsWith(p: Symbol => Boolean): List[SingleDenotation]
+
+ /** The unique alternative of this denotation that satisfies the predicate `p`,
+ * or NoDenotation if no satisfying alternative exists.
+ * @throws TypeError if there is at more than one alternative that satisfies `p`.
+ */
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation
+
+ /** If this is a SingleDenotation, return it, otherwise throw a TypeError */
+ def checkUnique(implicit ctx: Context): SingleDenotation = suchThat(alwaysTrue)
+
+ /** Does this denotation have an alternative that satisfies the predicate `p`? */
+ def hasAltWith(p: SingleDenotation => Boolean): Boolean
+
+ /** The denotation made up from the alternatives of this denotation that
+ * are accessible from prefix `pre`, or NoDenotation if no accessible alternative exists.
+ */
+ def accessibleFrom(pre: Type, superAccess: Boolean = false)(implicit ctx: Context): Denotation
+
+ /** Find member of this denotation with given name and
+ * produce a denotation that contains the type of the member
+ * as seen from given prefix `pre`. Exclude all members that have
+ * flags in `excluded` from consideration.
+ */
+ def findMember(name: Name, pre: Type, excluded: FlagSet)(implicit ctx: Context): Denotation =
+ info.findMember(name, pre, excluded)
+
+ /** If this denotation is overloaded, filter with given predicate.
+ * If result is still overloaded throw a TypeError.
+ * Note: disambiguate is slightly different from suchThat in that
+ * single-denotations that do not satisfy the predicate are left alone
+ * (whereas suchThat would map them to NoDenotation).
+ */
+ def disambiguate(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation = this match {
+ case sdenot: SingleDenotation => sdenot
+ case mdenot => suchThat(p) orElse NoQualifyingRef(alternatives)
+ }
+
+ /** Return symbol in this denotation that satisfies the given predicate.
+ * if generateStubs is specified, return a stubsymbol if denotation is a missing ref.
+ * Throw a `TypeError` if predicate fails to disambiguate symbol or no alternative matches.
+ */
+ def requiredSymbol(p: Symbol => Boolean, source: AbstractFile = null, generateStubs: Boolean = true)(implicit ctx: Context): Symbol =
+ disambiguate(p) match {
+ case m @ MissingRef(ownerd, name) =>
+ if (generateStubs) {
+ m.ex.printStackTrace()
+ ctx.newStubSymbol(ownerd.symbol, name, source)
+ }
+ else NoSymbol
+ case NoDenotation | _: NoQualifyingRef =>
+ throw new TypeError(s"None of the alternatives of $this satisfies required predicate")
+ case denot =>
+ denot.symbol
+ }
+
+ def requiredMethod(name: PreName)(implicit ctx: Context): TermSymbol =
+ info.member(name.toTermName).requiredSymbol(_ is Method).asTerm
+ def requiredMethodRef(name: PreName)(implicit ctx: Context): TermRef =
+ requiredMethod(name).termRef
+
+ def requiredMethod(name: PreName, argTypes: List[Type])(implicit ctx: Context): TermSymbol =
+ info.member(name.toTermName).requiredSymbol(x=>
+ (x is Method) && x.info.paramTypess == List(argTypes)
+ ).asTerm
+ def requiredMethodRef(name: PreName, argTypes: List[Type])(implicit ctx: Context): TermRef =
+ requiredMethod(name, argTypes).termRef
+
+ def requiredValue(name: PreName)(implicit ctx: Context): TermSymbol =
+ info.member(name.toTermName).requiredSymbol(_.info.isParameterless).asTerm
+ def requiredValueRef(name: PreName)(implicit ctx: Context): TermRef =
+ requiredValue(name).termRef
+
+ def requiredClass(name: PreName)(implicit ctx: Context): ClassSymbol =
+ info.member(name.toTypeName).requiredSymbol(_.isClass).asClass
+
+ /** The alternative of this denotation that has a type matching `targetType` when seen
+ * as a member of type `site`, `NoDenotation` if none exists.
+ */
+ def matchingDenotation(site: Type, targetType: Type)(implicit ctx: Context): SingleDenotation = {
+ def qualifies(sym: Symbol) = site.memberInfo(sym).matchesLoosely(targetType)
+ if (isOverloaded) {
+ atSignature(targetType.signature, site, relaxed = true) match {
+ case sd: SingleDenotation => sd.matchingDenotation(site, targetType)
+ case md => md.suchThat(qualifies(_))
+ }
+ }
+ else if (exists && !qualifies(symbol)) NoDenotation
+ else asSingleDenotation
+ }
+
+ /** Handle merge conflict by throwing a `MergeError` exception */
+ private def mergeConflict(tp1: Type, tp2: Type)(implicit ctx: Context): Type = {
+ def showType(tp: Type) = tp match {
+ case ClassInfo(_, cls, _, _, _) => cls.showLocated
+ case bounds: TypeBounds => i"type bounds $bounds"
+ case _ => tp.show
+ }
+ if (true) throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2)
+ else throw new Error(s"cannot merge ${showType(tp1)} with ${showType(tp2)}") // flip condition for debugging
+ }
+
+ /** Merge two lists of names. If names in corresponding positions match, keep them,
+ * otherwise generate new synthetic names.
+ */
+ def mergeNames[N <: Name](names1: List[N], names2: List[N], syntheticName: Int => N): List[N] = {
+ for ((name1, name2, idx) <- (names1, names2, 0 until names1.length).zipped)
+ yield if (name1 == name2) name1 else syntheticName(idx)
+ }.toList
+
+ /** Form a denotation by conjoining with denotation `that`.
+ *
+ * NoDenotations are dropped. MultiDenotations are handled by merging
+ * parts with same signatures. SingleDenotations with equal signatures
+ * are joined as follows:
+ *
+ * In a first step, consider only those denotations which have symbols
+ * that are accessible from prefix `pre`.
+ *
+ * If there are several such denotations, try to pick one by applying the following
+ * three precedence rules in decreasing order of priority:
+ *
+ * 1. Prefer denotations with more specific infos.
+ * 2. If infos are equally specific, prefer denotations with concrete symbols over denotations
+ * with abstract symbols.
+ * 3. If infos are equally specific and symbols are equally concrete,
+ * prefer denotations with symbols defined in subclasses
+ * over denotations with symbols defined in proper superclasses.
+ *
+ * If there is exactly one (preferred) accessible denotation, return it.
+ *
+ * If there is no preferred accessible denotation, return a JointRefDenotation
+ * with one of the operand symbols (unspecified which one), and an info which
+ * is the intersection (using `&` or `safe_&` if `safeIntersection` is true)
+ * of the infos of the operand denotations.
+ *
+ * If SingleDenotations with different signatures are joined, return NoDenotation.
+ */
+ def & (that: Denotation, pre: Type, safeIntersection: Boolean = false)(implicit ctx: Context): Denotation = {
+
+ /** Normally, `tp1 & tp2`. Special cases for matching methods and classes, with
+ * the possibility of raising a merge error.
+ */
+ def infoMeet(tp1: Type, tp2: Type): Type = {
+ if (tp1 eq tp2) tp1
+ else tp1 match {
+ case tp1: TypeBounds =>
+ tp2 match {
+ case tp2: TypeBounds => if (safeIntersection) tp1 safe_& tp2 else tp1 & tp2
+ case tp2: ClassInfo if tp1 contains tp2 => tp2
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1: ClassInfo =>
+ tp2 match {
+ case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix)
+ case tp2: TypeBounds if tp2 contains tp1 => tp1
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1 @ MethodType(names1, formals1) if isTerm =>
+ tp2 match {
+ case tp2 @ MethodType(names2, formals2) if ctx.typeComparer.matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
+ tp1.isImplicit == tp2.isImplicit =>
+ tp1.derivedMethodType(
+ mergeNames(names1, names2, nme.syntheticParamName),
+ formals1,
+ infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1)))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case tp1: PolyType if isTerm =>
+ tp2 match {
+ case tp2: PolyType if ctx.typeComparer.matchingTypeParams(tp1, tp2) =>
+ tp1.derivedPolyType(
+ mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
+ tp1.paramBounds,
+ infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1)))
+ case _: MethodicType =>
+ mergeConflict(tp1, tp2)
+ }
+ case _ =>
+ tp1 & tp2
+ }
+ }
+
+ /** Try to merge denot1 and denot2 without adding a new signature. */
+ def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match {
+ case denot1 @ MultiDenotation(denot11, denot12) =>
+ val d1 = mergeDenot(denot11, denot2)
+ if (d1.exists) denot1.derivedMultiDenotation(d1, denot12)
+ else {
+ val d2 = mergeDenot(denot12, denot2)
+ if (d2.exists) denot1.derivedMultiDenotation(denot11, d2)
+ else NoDenotation
+ }
+ case denot1: SingleDenotation =>
+ if (denot1 eq denot2) denot1
+ else if (denot1.matches(denot2)) mergeSingleDenot(denot1, denot2)
+ else NoDenotation
+ }
+
+ /** Try to merge single-denotations. */
+ def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): SingleDenotation = {
+ val info1 = denot1.info
+ val info2 = denot2.info
+ val sym1 = denot1.symbol
+ val sym2 = denot2.symbol
+
+ val sym2Accessible = sym2.isAccessibleFrom(pre)
+
+ /** Does `sym1` come before `sym2` in the linearization of `pre`? */
+ def precedes(sym1: Symbol, sym2: Symbol) = {
+ def precedesIn(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 => (sym1 eq bc) || !(sym2 eq bc) && precedesIn(bcs1)
+ case Nil => true
+ }
+ (sym1 ne sym2) &&
+ (sym1.derivesFrom(sym2) ||
+ !sym2.derivesFrom(sym1) && precedesIn(pre.baseClasses))
+ }
+
+ /** Similar to SymDenotation#accessBoundary, but without the special cases. */
+ def accessBoundary(sym: Symbol) =
+ if (sym.is(Private)) sym.owner
+ else sym.privateWithin.orElse(
+ if (sym.is(Protected)) sym.owner.enclosingPackageClass
+ else defn.RootClass)
+
+ /** Establish a partial order "preference" order between symbols.
+ * Give preference to `sym1` over `sym2` if one of the following
+ * conditions holds, in decreasing order of weight:
+ * 1. sym1 is concrete and sym2 is abstract
+ * 2. The owner of sym1 comes before the owner of sym2 in the linearization
+ * of the type of the prefix `pre`.
+ * 3. The access boundary of sym2 is properly contained in the access
+ * boundary of sym1. For protected access, we count the enclosing
+ * package as access boundary.
+ * 4. sym1 a method but sym2 is not.
+ * The aim of these criteria is to give some disambiguation on access which
+ * - does not depend on textual order or other arbitrary choices
+ * - minimizes raising of doubleDef errors
+ */
+ def preferSym(sym1: Symbol, sym2: Symbol) =
+ sym1.eq(sym2) ||
+ sym1.isAsConcrete(sym2) &&
+ (!sym2.isAsConcrete(sym1) ||
+ precedes(sym1.owner, sym2.owner) ||
+ accessBoundary(sym2).isProperlyContainedIn(accessBoundary(sym1)) ||
+ sym1.is(Method) && !sym2.is(Method)) ||
+ sym1.info.isErroneous
+
+ /** Sym preference provided types also override */
+ def prefer(sym1: Symbol, sym2: Symbol, info1: Type, info2: Type) =
+ preferSym(sym1, sym2) && info1.overrides(info2)
+
+ def handleDoubleDef =
+ if (preferSym(sym1, sym2)) denot1
+ else if (preferSym(sym2, sym1)) denot2
+ else doubleDefError(denot1, denot2, pre)
+
+ if (sym2Accessible && prefer(sym2, sym1, info2, info1)) denot2
+ else {
+ val sym1Accessible = sym1.isAccessibleFrom(pre)
+ if (sym1Accessible && prefer(sym1, sym2, info1, info2)) denot1
+ else if (sym1Accessible && sym2.exists && !sym2Accessible) denot1
+ else if (sym2Accessible && sym1.exists && !sym1Accessible) denot2
+ else if (isDoubleDef(sym1, sym2)) handleDoubleDef
+ else {
+ val sym =
+ if (!sym1.exists) sym2
+ else if (!sym2.exists) sym1
+ else if (preferSym(sym2, sym1)) sym2
+ else sym1
+ val jointInfo =
+ try infoMeet(info1, info2)
+ catch {
+ case ex: MergeError =>
+ if (pre.widen.classSymbol.is(Scala2x) || ctx.scala2Mode)
+ info1 // follow Scala2 linearization -
+ // compare with way merge is performed in SymDenotation#computeMembersNamed
+ else
+ throw new MergeError(s"${ex.getMessage} as members of type ${pre.show}", ex.tp1, ex.tp2)
+ }
+ new JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor)
+ }
+ }
+ }
+
+ if (this eq that) this
+ else if (!this.exists) that
+ else if (!that.exists) this
+ else that match {
+ case that: SingleDenotation =>
+ val r = mergeDenot(this, that)
+ if (r.exists) r else MultiDenotation(this, that)
+ case that @ MultiDenotation(denot1, denot2) =>
+ this & (denot1, pre) & (denot2, pre)
+ }
+ }
+
+ /** Form a choice between this denotation and that one.
+ * @param pre The prefix type of the members of the denotation, used
+ * to determine an accessible symbol if it exists.
+ */
+ def | (that: Denotation, pre: Type)(implicit ctx: Context): Denotation = {
+
+ /** Normally, `tp1 | tp2`. Special cases for matching methods and classes, with
+ * the possibility of raising a merge error.
+ */
+ def infoJoin(tp1: Type, tp2: Type): Type = tp1 match {
+ case tp1: TypeBounds =>
+ tp2 match {
+ case tp2: TypeBounds => tp1 | tp2
+ case tp2: ClassInfo if tp1 contains tp2 => tp1
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1: ClassInfo =>
+ tp2 match {
+ case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix | tp2.prefix)
+ case tp2: TypeBounds if tp2 contains tp1 => tp2
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1 @ MethodType(names1, formals1) =>
+ tp2 match {
+ case tp2 @ MethodType(names2, formals2)
+ if ctx.typeComparer.matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
+ tp1.isImplicit == tp2.isImplicit =>
+ tp1.derivedMethodType(
+ mergeNames(names1, names2, nme.syntheticParamName),
+ formals1, tp1.resultType | tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case tp1: PolyType =>
+ tp2 match {
+ case tp2: PolyType if ctx.typeComparer.matchingTypeParams(tp1, tp2) =>
+ tp1.derivedPolyType(
+ mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
+ tp1.paramBounds, tp1.resultType | tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case _ =>
+ tp1 | tp2
+ }
+
+ def unionDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation =
+ if (denot1.matches(denot2)) {
+ val sym1 = denot1.symbol
+ val sym2 = denot2.symbol
+ val info1 = denot1.info
+ val info2 = denot2.info
+ val sameSym = sym1 eq sym2
+ if (sameSym && (info1 frozen_<:< info2)) denot2
+ else if (sameSym && (info2 frozen_<:< info1)) denot1
+ else {
+ val jointSym =
+ if (sameSym) sym1
+ else {
+ val owner2 = if (sym2 ne NoSymbol) sym2.owner else NoSymbol
+ /** Determine a symbol which is overridden by both sym1 and sym2.
+ * Preference is given to accessible symbols.
+ */
+ def lubSym(overrides: Iterator[Symbol], previous: Symbol): Symbol =
+ if (!overrides.hasNext) previous
+ else {
+ val candidate = overrides.next
+ if (owner2 derivesFrom candidate.owner)
+ if (candidate isAccessibleFrom pre) candidate
+ else lubSym(overrides, previous orElse candidate)
+ else
+ lubSym(overrides, previous)
+ }
+ lubSym(sym1.allOverriddenSymbols, NoSymbol)
+ }
+ new JointRefDenotation(
+ jointSym, infoJoin(info1, info2), denot1.validFor & denot2.validFor)
+ }
+ }
+ else NoDenotation
+
+ if (this eq that) this
+ else if (!this.exists) this
+ else if (!that.exists) that
+ else this match {
+ case denot1 @ MultiDenotation(denot11, denot12) =>
+ denot1.derivedMultiDenotation(denot11 | (that, pre), denot12 | (that, pre))
+ case denot1: SingleDenotation =>
+ that match {
+ case denot2 @ MultiDenotation(denot21, denot22) =>
+ denot2.derivedMultiDenotation(this | (denot21, pre), this | (denot22, pre))
+ case denot2: SingleDenotation =>
+ unionDenot(denot1, denot2)
+ }
+ }
+ }
+
+ final def asSingleDenotation = asInstanceOf[SingleDenotation]
+ final def asSymDenotation = asInstanceOf[SymDenotation]
+
+ def toText(printer: Printer): Text = printer.toText(this)
+ }
+
+ /** An overloaded denotation consisting of the alternatives of both given denotations.
+ */
+ case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol) {
+ final def infoOrCompleter = multiHasNot("info")
+ final def info(implicit ctx: Context) = infoOrCompleter
+ final def validFor = denot1.validFor & denot2.validFor
+ final def isType = false
+ final def signature(implicit ctx: Context) = Signature.OverloadedSignature
+ def atSignature(sig: Signature, site: Type, relaxed: Boolean)(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.atSignature(sig, site, relaxed), denot2.atSignature(sig, site, relaxed))
+ def current(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.current, denot2.current)
+ def altsWith(p: Symbol => Boolean): List[SingleDenotation] =
+ denot1.altsWith(p) ++ denot2.altsWith(p)
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation = {
+ val sd1 = denot1.suchThat(p)
+ val sd2 = denot2.suchThat(p)
+ if (sd1.exists)
+ if (sd2.exists)
+ if (isDoubleDef(denot1.symbol, denot2.symbol)) doubleDefError(denot1, denot2)
+ else throw new TypeError(s"failure to disambiguate overloaded reference $this")
+ else sd1
+ else sd2
+ }
+ def hasAltWith(p: SingleDenotation => Boolean): Boolean =
+ denot1.hasAltWith(p) || denot2.hasAltWith(p)
+ def accessibleFrom(pre: Type, superAccess: Boolean)(implicit ctx: Context): Denotation = {
+ val d1 = denot1 accessibleFrom (pre, superAccess)
+ val d2 = denot2 accessibleFrom (pre, superAccess)
+ if (!d1.exists) d2
+ else if (!d2.exists) d1
+ else derivedMultiDenotation(d1, d2)
+ }
+ def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.mapInfo(f), denot2.mapInfo(f))
+ def derivedMultiDenotation(d1: Denotation, d2: Denotation) =
+ if ((d1 eq denot1) && (d2 eq denot2)) this else MultiDenotation(d1, d2)
+ override def toString = alternatives.mkString(" <and> ")
+
+ private def multiHasNot(op: String): Nothing =
+ throw new UnsupportedOperationException(
+ s"multi-denotation with alternatives $alternatives does not implement operation $op")
+ }
+
+ /** A non-overloaded denotation */
+ abstract class SingleDenotation(symbol: Symbol) extends Denotation(symbol) with PreDenotation {
+ def hasUniqueSym: Boolean
+ protected def newLikeThis(symbol: Symbol, info: Type): SingleDenotation
+
+ final def signature(implicit ctx: Context): Signature = {
+ if (isType) Signature.NotAMethod // don't force info if this is a type SymDenotation
+ else info match {
+ case info: MethodicType =>
+ try info.signature
+ catch { // !!! DEBUG
+ case scala.util.control.NonFatal(ex) =>
+ ctx.echo(s"cannot take signature of ${info.show}")
+ throw ex
+ }
+ case _ => Signature.NotAMethod
+ }
+ }
+
+ def derivedSingleDenotation(symbol: Symbol, info: Type)(implicit ctx: Context): SingleDenotation =
+ if ((symbol eq this.symbol) && (info eq this.info)) this
+ else newLikeThis(symbol, info)
+
+ def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation =
+ derivedSingleDenotation(symbol, f(info))
+
+ def orElse(that: => SingleDenotation) = if (this.exists) this else that
+
+ def altsWith(p: Symbol => Boolean): List[SingleDenotation] =
+ if (exists && p(symbol)) this :: Nil else Nil
+
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation =
+ if (exists && p(symbol)) this else NoDenotation
+
+ def hasAltWith(p: SingleDenotation => Boolean): Boolean =
+ exists && p(this)
+
+ def accessibleFrom(pre: Type, superAccess: Boolean)(implicit ctx: Context): Denotation =
+ if (!symbol.exists || symbol.isAccessibleFrom(pre, superAccess)) this else NoDenotation
+
+ def atSignature(sig: Signature, site: Type, relaxed: Boolean)(implicit ctx: Context): SingleDenotation = {
+ val situated = if (site == NoPrefix) this else asSeenFrom(site)
+ val matches = sig.matchDegree(situated.signature) >=
+ (if (relaxed) Signature.ParamMatch else Signature.FullMatch)
+ if (matches) this else NoDenotation
+ }
+
+ // ------ Forming types -------------------------------------------
+
+ /** The TypeRef representing this type denotation at its original location. */
+ def typeRef(implicit ctx: Context): TypeRef =
+ TypeRef(symbol.owner.thisType, symbol.name.asTypeName, this)
+
+ /** The TermRef representing this term denotation at its original location. */
+ def termRef(implicit ctx: Context): TermRef =
+ TermRef(symbol.owner.thisType, symbol.name.asTermName, this)
+
+ /** The TermRef representing this term denotation at its original location
+ * and at signature `NotAMethod`.
+ */
+ def valRef(implicit ctx: Context): TermRef =
+ TermRef.withSigAndDenot(symbol.owner.thisType, symbol.name.asTermName, Signature.NotAMethod, this)
+
+ /** The TermRef representing this term denotation at its original location
+ * at the denotation's signature.
+ * @note Unlike `valRef` and `termRef`, this will force the completion of the
+ * denotation via a call to `info`.
+ */
+ def termRefWithSig(implicit ctx: Context): TermRef =
+ TermRef.withSigAndDenot(symbol.owner.thisType, symbol.name.asTermName, signature, this)
+
+ /** The NamedType representing this denotation at its original location.
+ * Same as either `typeRef` or `termRefWithSig` depending whether this denotes a type or not.
+ */
+ def namedType(implicit ctx: Context): NamedType =
+ if (isType) typeRef else termRefWithSig
+
+ // ------ Transformations -----------------------------------------
+
+ private[this] var myValidFor: Period = Nowhere
+
+ def validFor = myValidFor
+ def validFor_=(p: Period) =
+ myValidFor = p
+
+ /** The next SingleDenotation in this run, with wrap-around from last to first.
+ *
+ * There may be several `SingleDenotation`s with different validity
+ * representing the same underlying definition at different phases.
+ * These are called a "flock". Flock members are generated by
+ * @See current. Flock members are connected in a ring
+ * with their `nextInRun` fields.
+ *
+ * There are the following invariants concerning flock members
+ *
+ * 1) validity periods are non-overlapping
+ * 2) the union of all validity periods is a contiguous
+ * interval.
+ */
+ protected var nextInRun: SingleDenotation = this
+
+ /** The version of this SingleDenotation that was valid in the first phase
+ * of this run.
+ */
+ def initial: SingleDenotation =
+ if (validFor == Nowhere) this
+ else {
+ var current = nextInRun
+ while (current.validFor.code > this.myValidFor.code) current = current.nextInRun
+ current
+ }
+
+ def history: List[SingleDenotation] = {
+ val b = new ListBuffer[SingleDenotation]
+ var current = initial
+ do {
+ b += (current)
+ current = current.nextInRun
+ }
+ while (current ne initial)
+ b.toList
+ }
+
+ /** Invalidate all caches and fields that depend on base classes and their contents */
+ def invalidateInheritedInfo(): Unit = ()
+
+ /** Move validity period of this denotation to a new run. Throw a StaleSymbol error
+ * if denotation is no longer valid.
+ */
+ private def bringForward()(implicit ctx: Context): SingleDenotation = this match {
+ case denot: SymDenotation if ctx.stillValid(denot) =>
+ assert(ctx.runId > validFor.runId || ctx.settings.YtestPickler.value, // mixing test pickler with debug printing can travel back in time
+ s"denotation $denot invalid in run ${ctx.runId}. ValidFor: $validFor")
+ var d: SingleDenotation = denot
+ do {
+ d.validFor = Period(ctx.period.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId)
+ d.invalidateInheritedInfo()
+ d = d.nextInRun
+ } while (d ne denot)
+ this
+ case _ =>
+ if (coveredInterval.containsPhaseId(ctx.phaseId)) {
+ if (ctx.debug) ctx.traceInvalid(this)
+ staleSymbolError
+ }
+ else NoDenotation
+ }
+
+ /** Produce a denotation that is valid for the given context.
+ * Usually called when !(validFor contains ctx.period)
+ * (even though this is not a precondition).
+ * If the runId of the context is the same as runId of this denotation,
+ * the right flock member is located, or, if it does not exist yet,
+ * created by invoking a transformer (@See Transformers).
+ * If the runId's differ, but this denotation is a SymDenotation
+ * and its toplevel owner class or module
+ * is still a member of its enclosing package, then the whole flock
+ * is brought forward to be valid in the new runId. Otherwise
+ * the symbol is stale, which constitutes an internal error.
+ */
+ def current(implicit ctx: Context): SingleDenotation = {
+ val currentPeriod = ctx.period
+ val valid = myValidFor
+ if (valid.code <= 0) {
+ // can happen if we sit on a stale denotation which has been replaced
+ // wholesale by an installAfter; in this case, proceed to the next
+ // denotation and try again.
+ if (validFor == Nowhere && nextInRun.validFor != Nowhere) return nextInRun.current
+ assert(false)
+ }
+
+ if (valid.runId != currentPeriod.runId)
+ if (exists) initial.bringForward.current
+ else this
+ else {
+ var cur = this
+ if (currentPeriod.code > valid.code) {
+ // search for containing period as long as nextInRun increases.
+ var next = nextInRun
+ while (next.validFor.code > valid.code && !(next.validFor contains currentPeriod)) {
+ cur = next
+ next = next.nextInRun
+ }
+ if (next.validFor.code > valid.code) {
+ // in this case, next.validFor contains currentPeriod
+ cur = next
+ cur
+ } else {
+ //println(s"might need new denot for $cur, valid for ${cur.validFor} at $currentPeriod")
+ // not found, cur points to highest existing variant
+ val nextTransformerId = ctx.nextDenotTransformerId(cur.validFor.lastPhaseId)
+ if (currentPeriod.lastPhaseId <= nextTransformerId)
+ cur.validFor = Period(currentPeriod.runId, cur.validFor.firstPhaseId, nextTransformerId)
+ else {
+ var startPid = nextTransformerId + 1
+ val transformer = ctx.denotTransformers(nextTransformerId)
+ //println(s"transforming $this with $transformer")
+ try {
+ next = transformer.transform(cur)(ctx.withPhase(transformer)).syncWithParents
+ } catch {
+ case ex: CyclicReference =>
+ println(s"error while transforming $this") // DEBUG
+ throw ex
+ }
+ if (next eq cur)
+ startPid = cur.validFor.firstPhaseId
+ else {
+ next match {
+ case next: ClassDenotation =>
+ assert(!next.is(Package), s"illegal transformation of package denotation by transformer ${ctx.withPhase(transformer).phase}")
+ next.resetFlag(Frozen)
+ case _ =>
+ }
+ next.insertAfter(cur)
+ cur = next
+ }
+ cur.validFor = Period(currentPeriod.runId, startPid, transformer.lastPhaseId)
+ //printPeriods(cur)
+ //println(s"new denot: $cur, valid for ${cur.validFor}")
+ }
+ cur.current // multiple transformations could be required
+ }
+ } else {
+ // currentPeriod < end of valid; in this case a version must exist
+ // but to be defensive we check for infinite loop anyway
+ var cnt = 0
+ while (!(cur.validFor contains currentPeriod)) {
+ //println(s"searching: $cur at $currentPeriod, valid for ${cur.validFor}")
+ cur = cur.nextInRun
+ // Note: One might be tempted to add a `prev` field to get to the new denotation
+ // more directly here. I tried that, but it degrades rather than improves
+ // performance: Test setup: Compile everything in dotc and immediate subdirectories
+ // 10 times. Best out of 10: 18154ms with `prev` field, 17777ms without.
+ cnt += 1
+ if (cnt > MaxPossiblePhaseId)
+ return current(ctx.withPhase(coveredInterval.firstPhaseId))
+ }
+ cur
+ }
+ }
+ }
+
+ private def demandOutsideDefinedMsg(implicit ctx: Context): String =
+ s"demanding denotation of $this at phase ${ctx.phase}(${ctx.phaseId}) outside defined interval: defined periods are${definedPeriodsString}"
+
+ /** Install this denotation to be the result of the given denotation transformer.
+ * This is the implementation of the same-named method in SymDenotations.
+ * It's placed here because it needs access to private fields of SingleDenotation.
+ * @pre Can only be called in `phase.next`.
+ */
+ protected def installAfter(phase: DenotTransformer)(implicit ctx: Context): Unit = {
+ val targetId = phase.next.id
+ if (ctx.phaseId != targetId) installAfter(phase)(ctx.withPhase(phase.next))
+ else {
+ val current = symbol.current
+ // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}")
+ // printPeriods(current)
+ this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId)
+ if (current.validFor.firstPhaseId >= targetId)
+ insertInsteadOf(current)
+ else {
+ current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1)
+ insertAfter(current)
+ }
+ // printPeriods(this)
+ }
+ }
+
+ /** Apply a transformation `f` to all denotations in this group that start at or after
+ * given phase. Denotations are replaced while keeping the same validity periods.
+ */
+ protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(implicit ctx: Context): Unit = {
+ var current = symbol.current
+ while (current.validFor.firstPhaseId < phase.id && (current.nextInRun.validFor.code > current.validFor.code))
+ current = current.nextInRun
+ var hasNext = true
+ while ((current.validFor.firstPhaseId >= phase.id) && hasNext) {
+ val current1: SingleDenotation = f(current.asSymDenotation)
+ if (current1 ne current) {
+ current1.validFor = current.validFor
+ current1.insertInsteadOf(current)
+ }
+ hasNext = current1.nextInRun.validFor.code > current1.validFor.code
+ current = current1.nextInRun
+ }
+ }
+
+ /** Insert this denotation so that it follows `prev`. */
+ private def insertAfter(prev: SingleDenotation) = {
+ this.nextInRun = prev.nextInRun
+ prev.nextInRun = this
+ }
+
+ /** Insert this denotation instead of `old`.
+ * Also ensure that `old` refers with `nextInRun` to this denotation
+ * and set its `validFor` field to `NoWhere`. This is necessary so that
+ * references to the old denotation can be brought forward via `current`
+ * to a valid denotation.
+ *
+ * The code to achieve this is subtle in that it works correctly
+ * whether the replaced denotation is the only one in its cycle or not.
+ */
+ private def insertInsteadOf(old: SingleDenotation): Unit = {
+ var prev = old
+ while (prev.nextInRun ne old) prev = prev.nextInRun
+ // order of next two assignments is important!
+ prev.nextInRun = this
+ this.nextInRun = old.nextInRun
+ old.validFor = Nowhere
+ }
+
+ def staleSymbolError(implicit ctx: Context) = {
+ def ownerMsg = this match {
+ case denot: SymDenotation => s"in ${denot.owner}"
+ case _ => ""
+ }
+ def msg = s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${myValidFor}, is referred to in run ${ctx.period}"
+ throw new StaleSymbol(msg)
+ }
+
+ /** The period (interval of phases) for which there exists
+ * a valid denotation in this flock.
+ */
+ def coveredInterval(implicit ctx: Context): Period = {
+ var cur = this
+ var cnt = 0
+ var interval = validFor
+ do {
+ cur = cur.nextInRun
+ cnt += 1
+ assert(cnt <= MaxPossiblePhaseId, demandOutsideDefinedMsg)
+ interval |= cur.validFor
+ } while (cur ne this)
+ interval
+ }
+
+ /** For ClassDenotations only:
+ * If caches influenced by parent classes are still valid, the denotation
+ * itself, otherwise a freshly initialized copy.
+ */
+ def syncWithParents(implicit ctx: Context): SingleDenotation = this
+
+ /** Show declaration string; useful for showing declarations
+ * as seen from subclasses.
+ */
+ def showDcl(implicit ctx: Context): String = ctx.dclText(this).show
+
+ override def toString =
+ if (symbol == NoSymbol) symbol.toString
+ else s"<SingleDenotation of type $infoOrCompleter>"
+
+ def definedPeriodsString: String = {
+ var sb = new StringBuilder()
+ var cur = this
+ var cnt = 0
+ do {
+ sb.append(" " + cur.validFor)
+ cur = cur.nextInRun
+ cnt += 1
+ if (cnt > MaxPossiblePhaseId) { sb.append(" ..."); cur = this }
+ } while (cur ne this)
+ sb.toString
+ }
+
+ // ------ PreDenotation ops ----------------------------------------------
+
+ final def first = this
+ final def last = this
+ final def toDenot(pre: Type)(implicit ctx: Context): Denotation = this
+ final def containsSym(sym: Symbol): Boolean = hasUniqueSym && (symbol eq sym)
+ final def matches(other: SingleDenotation)(implicit ctx: Context): Boolean = {
+ val d = signature.matchDegree(other.signature)
+ d == Signature.FullMatch ||
+ d >= Signature.ParamMatch && info.matches(other.info)
+ }
+ final def filterWithPredicate(p: SingleDenotation => Boolean): SingleDenotation =
+ if (p(this)) this else NoDenotation
+ final def filterDisjoint(denots: PreDenotation)(implicit ctx: Context): SingleDenotation =
+ if (denots.exists && denots.matches(this)) NoDenotation else this
+ def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(implicit ctx: Context): SingleDenotation =
+ if (hasUniqueSym && prevDenots.containsSym(symbol)) NoDenotation
+ else if (isType) filterDisjoint(ownDenots).asSeenFrom(pre)
+ else asSeenFrom(pre).filterDisjoint(ownDenots)
+ final def filterExcluded(excluded: FlagSet)(implicit ctx: Context): SingleDenotation =
+ if (excluded.isEmpty || !(this overlaps excluded)) this else NoDenotation
+
+ type AsSeenFromResult = SingleDenotation
+ protected def computeAsSeenFrom(pre: Type)(implicit ctx: Context): SingleDenotation = {
+ val symbol = this.symbol
+ val owner = this match {
+ case thisd: SymDenotation => thisd.owner
+ case _ => if (symbol.exists) symbol.owner else NoSymbol
+ }
+ if (!owner.membersNeedAsSeenFrom(pre)) this
+ else derivedSingleDenotation(symbol, info.asSeenFrom(pre, owner))
+ }
+
+ private def overlaps(fs: FlagSet)(implicit ctx: Context): Boolean = this match {
+ case sd: SymDenotation => sd is fs
+ case _ => symbol is fs
+ }
+ }
+
+ abstract class NonSymSingleDenotation(symbol: Symbol) extends SingleDenotation(symbol) {
+ def infoOrCompleter: Type
+ def info(implicit ctx: Context) = infoOrCompleter
+ def isType = infoOrCompleter.isInstanceOf[TypeType]
+ }
+
+ class UniqueRefDenotation(
+ symbol: Symbol,
+ val infoOrCompleter: Type,
+ initValidFor: Period) extends NonSymSingleDenotation(symbol) {
+ validFor = initValidFor
+ override def hasUniqueSym: Boolean = true
+ protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = new UniqueRefDenotation(s, i, validFor)
+ }
+
+ class JointRefDenotation(
+ symbol: Symbol,
+ val infoOrCompleter: Type,
+ initValidFor: Period) extends NonSymSingleDenotation(symbol) {
+ validFor = initValidFor
+ override def hasUniqueSym = false
+ protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = new JointRefDenotation(s, i, validFor)
+ }
+
+ class ErrorDenotation(implicit ctx: Context) extends NonSymSingleDenotation(NoSymbol) {
+ override def exists = false
+ override def hasUniqueSym = false
+ def infoOrCompleter = NoType
+ validFor = Period.allInRun(ctx.runId)
+ protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = this
+ }
+
+ /** An error denotation that provides more info about the missing reference.
+ * Produced by staticRef, consumed by requiredSymbol.
+ */
+ case class MissingRef(val owner: SingleDenotation, name: Name)(implicit ctx: Context) extends ErrorDenotation {
+ val ex: Exception = new Exception
+ }
+
+ /** An error denotation that provides more info about alternatives
+ * that were found but that do not qualify.
+ * Produced by staticRef, consumed by requiredSymbol.
+ */
+ case class NoQualifyingRef(alts: List[SingleDenotation])(implicit ctx: Context) extends ErrorDenotation
+
+ /** A double definition
+ */
+ def isDoubleDef(sym1: Symbol, sym2: Symbol)(implicit ctx: Context): Boolean =
+ (sym1.exists && sym2.exists &&
+ (sym1 ne sym2) && (sym1.owner eq sym2.owner) &&
+ !sym1.is(Bridge) && !sym2.is(Bridge))
+
+ def doubleDefError(denot1: Denotation, denot2: Denotation, pre: Type = NoPrefix)(implicit ctx: Context): Nothing = {
+ val sym1 = denot1.symbol
+ val sym2 = denot2.symbol
+ def fromWhere = if (pre == NoPrefix) "" else i"\nwhen seen as members of $pre"
+ throw new MergeError(
+ i"""cannot merge
+ | $sym1: ${sym1.info} and
+ | $sym2: ${sym2.info};
+ |they are both defined in ${sym1.owner} but have matching signatures
+ | ${denot1.info} and
+ | ${denot2.info}$fromWhere""",
+ denot2.info, denot2.info)
+ }
+
+ // --------------- PreDenotations -------------------------------------------------
+
+ /** A PreDenotation represents a group of single denotations
+ * It is used as an optimization to avoid forming MultiDenotations too eagerly.
+ */
+ trait PreDenotation {
+
+ /** A denotation in the group exists */
+ def exists: Boolean
+
+ /** First/last denotation in the group */
+ def first: Denotation
+ def last: Denotation
+
+ /** Convert to full denotation by &-ing all elements */
+ def toDenot(pre: Type)(implicit ctx: Context): Denotation
+
+ /** Group contains a denotation that refers to given symbol */
+ def containsSym(sym: Symbol): Boolean
+
+ /** Group contains a denotation with given signature */
+ def matches(other: SingleDenotation)(implicit ctx: Context): Boolean
+
+ /** Keep only those denotations in this group which satisfy predicate `p`. */
+ def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation
+
+ /** Keep only those denotations in this group which have a signature
+ * that's not already defined by `denots`.
+ */
+ def filterDisjoint(denots: PreDenotation)(implicit ctx: Context): PreDenotation
+
+ /** Keep only those inherited members M of this predenotation for which the following is true
+ * - M is not marked Private
+ * - If M has a unique symbol, it does not appear in `prevDenots`.
+ * - M's signature as seen from prefix `pre` does not appear in `ownDenots`
+ * Return the denotation as seen from `pre`.
+ * Called from SymDenotations.computeMember. There, `ownDenots` are the denotations found in
+ * the base class, which shadow any inherited denotations with the same signature.
+ * `prevDenots` are the denotations that are defined in the class or inherited from
+ * a base type which comes earlier in the linearization.
+ */
+ def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(implicit ctx: Context): PreDenotation
+
+ /** Keep only those denotations in this group whose flags do not intersect
+ * with `excluded`.
+ */
+ def filterExcluded(excluded: FlagSet)(implicit ctx: Context): PreDenotation
+
+ private var cachedPrefix: Type = _
+ private var cachedAsSeenFrom: AsSeenFromResult = _
+ private var validAsSeenFrom: Period = Nowhere
+ type AsSeenFromResult <: PreDenotation
+
+ /** The denotation with info(s) as seen from prefix type */
+ final def asSeenFrom(pre: Type)(implicit ctx: Context): AsSeenFromResult =
+ if (Config.cacheAsSeenFrom) {
+ if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) {
+ cachedAsSeenFrom = computeAsSeenFrom(pre)
+ cachedPrefix = pre
+ validAsSeenFrom = ctx.period
+ }
+ cachedAsSeenFrom
+ } else computeAsSeenFrom(pre)
+
+ protected def computeAsSeenFrom(pre: Type)(implicit ctx: Context): AsSeenFromResult
+
+ /** The union of two groups. */
+ def union(that: PreDenotation) =
+ if (!this.exists) that
+ else if (!that.exists) this
+ else DenotUnion(this, that)
+ }
+
+ final case class DenotUnion(denots1: PreDenotation, denots2: PreDenotation) extends PreDenotation {
+ assert(denots1.exists && denots2.exists, s"Union of non-existing denotations ($denots1) and ($denots2)")
+ def exists = true
+ def first = denots1.first
+ def last = denots2.last
+ def toDenot(pre: Type)(implicit ctx: Context) =
+ (denots1 toDenot pre) & (denots2 toDenot pre, pre)
+ def containsSym(sym: Symbol) =
+ (denots1 containsSym sym) || (denots2 containsSym sym)
+ def matches(other: SingleDenotation)(implicit ctx: Context): Boolean =
+ denots1.matches(other) || denots2.matches(other)
+ def filterWithPredicate(p: SingleDenotation => Boolean): PreDenotation =
+ derivedUnion(denots1 filterWithPredicate p, denots2 filterWithPredicate p)
+ def filterDisjoint(denots: PreDenotation)(implicit ctx: Context): PreDenotation =
+ derivedUnion(denots1 filterDisjoint denots, denots2 filterDisjoint denots)
+ def mapInherited(ownDenots: PreDenotation, prevDenots: PreDenotation, pre: Type)(implicit ctx: Context): PreDenotation =
+ derivedUnion(denots1.mapInherited(ownDenots, prevDenots, pre), denots2.mapInherited(ownDenots, prevDenots, pre))
+ def filterExcluded(excluded: FlagSet)(implicit ctx: Context): PreDenotation =
+ derivedUnion(denots1.filterExcluded(excluded), denots2.filterExcluded(excluded))
+
+ type AsSeenFromResult = PreDenotation
+ protected def computeAsSeenFrom(pre: Type)(implicit ctx: Context): PreDenotation =
+ derivedUnion(denots1.asSeenFrom(pre), denots2.asSeenFrom(pre))
+ private def derivedUnion(denots1: PreDenotation, denots2: PreDenotation) =
+ if ((denots1 eq this.denots1) && (denots2 eq this.denots2)) this
+ else denots1 union denots2
+ }
+
+ // --------------- Context Base Trait -------------------------------
+
+ trait DenotationsBase { this: ContextBase =>
+
+ /** The current denotation of the static reference given by path,
+ * or a MissingRef or NoQualifyingRef instance, if it does not exist.
+ * if generateStubs is set, generates stubs for missing top-level symbols
+ */
+ def staticRef(path: Name, generateStubs: Boolean = true)(implicit ctx: Context): Denotation = {
+ def recur(path: Name, len: Int): Denotation = {
+ val point = path.lastIndexOf('.', len - 1)
+ val owner =
+ if (point > 0) recur(path.toTermName, point).disambiguate(_.info.isParameterless)
+ else if (path.isTermName) defn.RootClass.denot
+ else defn.EmptyPackageClass.denot
+ if (owner.exists) {
+ val name = path slice (point + 1, len)
+ val result = owner.info.member(name)
+ if (result ne NoDenotation) result
+ else {
+ val alt =
+ if (generateStubs) missingHook(owner.symbol.moduleClass, name)
+ else NoSymbol
+ if (alt.exists) alt.denot
+ else MissingRef(owner, name)
+ }
+ }
+ else owner
+ }
+ recur(path, path.length)
+ }
+
+ /** If we are looking for a non-existing term name in a package,
+ * assume it is a package for which we do not have a directory and
+ * enter it.
+ */
+ def missingHook(owner: Symbol, name: Name)(implicit ctx: Context): Symbol =
+ if ((owner is Package) && name.isTermName)
+ ctx.newCompletePackageSymbol(owner, name.asTermName).entered
+ else
+ NoSymbol
+ }
+
+ /** An exception for accessing symbols that are no longer valid in current run */
+ class StaleSymbol(msg: => String) extends Exception {
+ util.Stats.record("stale symbol")
+ override def getMessage() = msg
+ }
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala
new file mode 100644
index 000000000..63fbc98dc
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Flags.scala
@@ -0,0 +1,640 @@
+package dotty.tools.dotc.core
+
+import language.implicitConversions
+
+object Flags {
+
+ /** A FlagSet represents a set of flags. Flags are encoded as follows:
+ * The first two bits indicate whether a flagset applies to terms,
+ * to types, or to both. Bits 2..63 are available for properties
+ * and can be doubly used for terms and types.
+ * Combining two FlagSets with `|` will give a FlagSet
+ * that has the intersection of the applicability to terms/types
+ * of the two flag sets. It is checked that the intersection is not empty.
+ */
+ case class FlagSet(val bits: Long) extends AnyVal {
+
+ /** The union of this flag set and the given flag set
+ */
+ def | (that: FlagSet): FlagSet =
+ if (bits == 0) that
+ else if (that.bits == 0) this
+ else {
+ val tbits = bits & that.bits & KINDFLAGS
+ assert(tbits != 0, s"illegal flagset combination: $this and $that")
+ FlagSet(tbits | ((this.bits | that.bits) & ~KINDFLAGS))
+ }
+
+ /** The intersection of this flag set and the given flag set */
+ def & (that: FlagSet) = FlagSet(bits & that.bits)
+
+ /** The intersection of this flag set with the complement of the given flag set */
+ def &~ (that: FlagSet) = {
+ val tbits = bits & KINDFLAGS
+ if ((tbits & that.bits) == 0) this
+ else FlagSet(tbits | ((this.bits & ~that.bits) & ~KINDFLAGS))
+ }
+
+ /** Does this flag set have a non-empty intersection with the given flag set?
+ * This means that both the kind flags and the carrier bits have non-empty intersection.
+ */
+ def is(flags: FlagSet): Boolean = {
+ val fs = bits & flags.bits
+ (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0
+ }
+
+ /** Does this flag set have a non-empty intersection with the given flag set,
+ * and at the same time contain none of the flags in the `butNot` set?
+ */
+ def is(flags: FlagSet, butNot: FlagSet): Boolean = is(flags) && !is(butNot)
+
+ /** Does this flag set have all of the flags in given flag conjunction?
+ * Pre: The intersection of the typeflags of both sets must be non-empty.
+ */
+ def is(flags: FlagConjunction): Boolean = {
+ val fs = bits & flags.bits
+ (fs & KINDFLAGS) != 0 &&
+ (fs >>> TYPESHIFT) == (flags.bits >>> TYPESHIFT)
+ }
+
+ /** Does this flag set have all of the flags in given flag conjunction?
+ * and at the same time contain none of the flags in the `butNot` set?
+ * Pre: The intersection of the typeflags of both sets must be non-empty.
+ */
+ def is(flags: FlagConjunction, butNot: FlagSet): Boolean = is(flags) && !is(butNot)
+
+ def isEmpty = (bits & ~KINDFLAGS) == 0
+
+ /** Is this flag set a subset of that one? */
+ def <= (that: FlagSet) = (bits & that.bits) == bits
+
+ /** Does this flag set apply to terms? */
+ def isTermFlags = (bits & TERMS) != 0
+
+ /** Does this flag set apply to terms? */
+ def isTypeFlags = (bits & TYPES) != 0
+
+ /** This flag set with all flags transposed to be type flags */
+ def toTypeFlags = if (bits == 0) this else FlagSet(bits & ~KINDFLAGS | TYPES)
+
+ /** This flag set with all flags transposed to be term flags */
+ def toTermFlags = if (bits == 0) this else FlagSet(bits & ~KINDFLAGS | TERMS)
+
+ /** This flag set with all flags transposed to be common flags */
+ def toCommonFlags = if (bits == 0) this else FlagSet(bits | KINDFLAGS)
+
+ /** The number of non-kind flags in this set */
+ def numFlags: Int = java.lang.Long.bitCount(bits & ~KINDFLAGS)
+
+ /** The lowest non-kind bit set in this flagset */
+ def firstBit: Int = java.lang.Long.numberOfTrailingZeros(bits & ~KINDFLAGS)
+
+ /** The list of non-empty names of flags with given index idx that are set in this FlagSet */
+ private def flagString(idx: Int): List[String] =
+ if ((bits & (1L << idx)) == 0) Nil
+ else {
+ def halfString(kind: Int) =
+ if ((bits & (1L << kind)) != 0) flagName(idx)(kind) else ""
+ val termFS = halfString(TERMindex)
+ val typeFS = halfString(TYPEindex)
+ val strs = termFS :: (if (termFS == typeFS) Nil else typeFS :: Nil)
+ strs filter (_.nonEmpty)
+ }
+
+ /** The list of non-empty names of flags that are set in this FlagSet */
+ def flagStrings: Seq[String] = {
+ val rawStrings = (2 to MaxFlag).flatMap(flagString)
+ if (this is Local)
+ rawStrings.filter(_ != "<local>").map {
+ case "private" => "private[this]"
+ case "protected" => "protected[this]"
+ case str => str
+ }
+ else rawStrings
+ }
+
+ /** The string representation of this flag set */
+ override def toString = flagStrings.mkString(" ")
+ }
+
+ /** A class representing flag sets that should be tested
+ * conjunctively. I.e. for a flag conjunction `fc`,
+ * `x is fc` tests whether `x` contains all flags in `fc`.
+ */
+ case class FlagConjunction(bits: Long) {
+ override def toString = FlagSet(bits).toString
+ }
+
+ private final val TYPESHIFT = 2
+ private final val TERMindex = 0
+ private final val TYPEindex = 1
+ private final val TERMS = 1 << TERMindex
+ private final val TYPES = 1 << TYPEindex
+ private final val KINDFLAGS = TERMS | TYPES
+
+ private final val FirstFlag = 2
+ private final val FirstNotPickledFlag = 48
+ private final val MaxFlag = 63
+
+ private val flagName = Array.fill(64, 2)("")
+
+ private def isDefinedAsFlag(idx: Int) = flagName(idx) exists (_.nonEmpty)
+
+ /** The flag set containing all defined flags of either kind whose bits
+ * lie in the given range
+ */
+ private def flagRange(start: Int, end: Int) =
+ FlagSet((KINDFLAGS.toLong /: (start until end)) ((bits, idx) =>
+ if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits))
+
+ /** The flag with given index between 2 and 63 which applies to terms.
+ * Installs given name as the name of the flag. */
+ private def termFlag(index: Int, name: String): FlagSet = {
+ flagName(index)(TERMindex) = name
+ FlagSet(TERMS | (1L << index))
+ }
+
+ /** The flag with given index between 2 and 63 which applies to types.
+ * Installs given name as the name of the flag. */
+ private def typeFlag(index: Int, name: String): FlagSet = {
+ flagName(index)(TYPEindex) = name
+ FlagSet(TYPES | (1L << index))
+ }
+
+ /** The flag with given index between 2 and 63 which applies to both terms and types
+ * Installs given name as the name of the flag. */
+ private def commonFlag(index: Int, name: String): FlagSet = {
+ flagName(index)(TERMindex) = name
+ flagName(index)(TYPEindex) = name
+ FlagSet(TERMS | TYPES | (1L << index))
+ }
+
+ /** The union of all flags in given flag set */
+ def union(flagss: FlagSet*) = (EmptyFlags /: flagss)(_ | _)
+
+ /** The conjunction of all flags in given flag set */
+ def allOf(flagss: FlagSet*) = {
+ assert(flagss forall (_.numFlags == 1), "Flags.allOf doesn't support flag " + flagss.find(_.numFlags != 1))
+ FlagConjunction(union(flagss: _*).bits)
+ }
+
+ def commonFlags(flagss: FlagSet*) = union(flagss.map(_.toCommonFlags): _*)
+
+ /** The empty flag set */
+ final val EmptyFlags = FlagSet(0)
+
+ /** The undefined flag set */
+ final val UndefinedFlags = FlagSet(~KINDFLAGS)
+
+ // Available flags:
+
+ /** Labeled with `private` modifier */
+ final val Private = commonFlag(2, "private")
+ final val PrivateTerm = Private.toTermFlags
+ final val PrivateType = Private.toTypeFlags
+
+ /** Labeled with `protected` modifier */
+ final val Protected = commonFlag(3, "protected")
+
+ /** Labeled with `override` modifier */
+ final val Override = commonFlag(4, "override")
+
+ /** A declared, but not defined member */
+ final val Deferred = commonFlag(5, "<deferred>")
+ final val DeferredTerm = Deferred.toTermFlags
+ final val DeferredType = Deferred.toTypeFlags
+
+ /** Labeled with `final` modifier */
+ final val Final = commonFlag(6, "final")
+
+ /** A method symbol. */
+ final val MethodOrHKCommon = commonFlag(7, "<method>")
+ final val Method = MethodOrHKCommon.toTermFlags
+ final val HigherKinded = MethodOrHKCommon.toTypeFlags
+
+ /** A (term or type) parameter to a class or method */
+ final val Param = commonFlag(8, "<param>")
+ final val TermParam = Param.toTermFlags
+ final val TypeParam = Param.toTypeFlags
+
+ /** Labeled with `implicit` modifier (implicit value) */
+ final val ImplicitCommon = commonFlag(9, "implicit")
+ final val Implicit = ImplicitCommon.toTermFlags
+
+ /** Labeled with `lazy` (a lazy val). */
+ final val Lazy = termFlag(10, "lazy")
+
+ /** A trait */
+ final val Trait = typeFlag(10, "<trait>")
+
+ final val LazyOrTrait = Lazy.toCommonFlags
+
+ /** A value or variable accessor (getter or setter) */
+ final val Accessor = termFlag(11, "<accessor>")
+
+ /** Labeled with `sealed` modifier (sealed class) */
+ final val Sealed = typeFlag(11, "sealed")
+
+ final val AccessorOrSealed = Accessor.toCommonFlags
+
+ /** A mutable var */
+ final val Mutable = termFlag(12, "mutable")
+
+ /** Symbol is local to current class (i.e. private[this] or protected[this]
+ * pre: Private or Protected are also set
+ */
+ final val Local = commonFlag(13, "<local>")
+
+ /** A field generated for a primary constructor parameter (no matter if it's a 'val' or not),
+ * or an accessor of such a field.
+ */
+ final val ParamAccessor = commonFlag(14, "<paramaccessor>")
+ final val TermParamAccessor = ParamAccessor.toTermFlags
+ final val TypeParamAccessor = ParamAccessor.toTypeFlags
+
+ /** A value or class implementing a module */
+ final val Module = commonFlag(15, "module")
+ final val ModuleVal = Module.toTermFlags
+ final val ModuleClass = Module.toTypeFlags
+
+ /** A value or class representing a package */
+ final val Package = commonFlag(16, "<package>")
+ final val PackageVal = Package.toTermFlags
+ final val PackageClass = Package.toTypeFlags
+
+ /** A case class or its companion object */
+ final val Case = commonFlag(17, "case")
+ final val CaseClass = Case.toTypeFlags
+ final val CaseVal = Case.toTermFlags
+
+ /** A compiler-generated symbol, which is visible for type-checking
+ * (compare with artifact)
+ */
+ final val Synthetic = commonFlag(18, "<synthetic>")
+
+ /** Symbol's name is expanded */
+ final val ExpandedName = commonFlag(19, "<expandedname>")
+
+ /** A covariant type variable / an outer accessor */
+ final val CovariantOrOuter = commonFlag(20, "")
+ final val Covariant = typeFlag(20, "<covariant>")
+ final val OuterAccessor = termFlag(20, "<outer accessor>")
+
+ /** A contravariant type variable / a label method */
+ final val ContravariantOrLabel = commonFlag(21, "")
+ final val Contravariant = typeFlag(21, "<contravariant>")
+ final val Label = termFlag(21, "<label>")
+
+
+ /** A trait that has only abstract methods as members
+ * (and therefore can be represented by a Java interface
+ */
+ final val PureInterface = typeFlag(22, "interface") // TODO when unpickling, reconstitute from context
+
+ /** Labeled with of abstract & override */
+ final val AbsOverride = termFlag(22, "abstract override")
+
+ /** Labeled with `abstract` modifier (an abstract class)
+ * Note: You should never see Abstract on any symbol except a class.
+ * Note: the flag counts as common, because it can be combined with OVERRIDE in a term.
+ */
+ final val Abstract = commonFlag(23, "abstract")
+
+ /** Lazy val or method is known or assumed to be stable and realizable */
+ final val Stable = termFlag(24, "<stable>")
+
+ /** A case parameter accessor */
+ final val CaseAccessor = termFlag(25, "<caseaccessor>")
+
+ /** A binding for a type parameter of a base class or trait.
+ * TODO: Replace with combination of isType, ExpandedName, and Override?
+ */
+ final val BaseTypeArg = typeFlag(25, "<basetypearg>")
+
+ final val CaseAccessorOrBaseTypeArg = CaseAccessor.toCommonFlags
+
+ /** A super accessor */
+ final val SuperAccessor = termFlag(26, "<superaccessor>")
+
+ /** An unpickled Scala 2.x class */
+ final val Scala2x = typeFlag(26, "<scala-2.x>")
+
+ final val SuperAccessorOrScala2x = SuperAccessor.toCommonFlags
+
+ /** A method that has default params */
+ final val DefaultParameterized = termFlag(27, "<defaultparam>")
+
+ /** A type that is defined by a type bind */
+ final val BindDefinedType = typeFlag(27, "<bind-defined>")
+
+ /** Symbol is inlined */
+ final val Inline = commonFlag(29, "inline")
+
+ /** Symbol is defined by a Java class */
+ final val JavaDefined = commonFlag(30, "<java>")
+
+ /** Symbol is implemented as a Java static */
+ final val JavaStatic = commonFlag(31, "<static>")
+ final val JavaStaticTerm = JavaStatic.toTermFlags
+ final val JavaStaticType = JavaStatic.toTypeFlags
+
+ /** Trait does not have fields or initialization code */
+ final val NoInits = typeFlag(32, "<noInits>")
+
+ /** Variable is accessed from nested function. */
+ final val Captured = termFlag(32, "<captured>")
+
+ /** Symbol should be ignored when typechecking; will be marked ACC_SYNTHETIC in bytecode */
+ final val Artifact = commonFlag(33, "<artifact>")
+
+ /** A bridge method. Set by Erasure */
+ final val Bridge = termFlag(34, "<bridge>")
+
+ /** All class attributes are fully defined */
+ final val FullyCompleted = typeFlag(34, "<fully-completed>")
+
+ /** Symbol is a Java varargs bridge */ // (needed?)
+ final val VBridge = termFlag(35, "<vbridge>") // TODO remove
+
+ /** Symbol is a method which should be marked ACC_SYNCHRONIZED */
+ final val Synchronized = termFlag(36, "<synchronized>")
+
+ /** Symbol is a Java-style varargs method */
+ final val JavaVarargs = termFlag(37, "<varargs>")
+
+ /** Symbol is a Java default method */
+ final val DefaultMethod = termFlag(38, "<defaultmethod>")
+
+ /** Symbol is a Java enum */
+ final val Enum = commonFlag(40, "<enum>")
+
+ // Flags following this one are not pickled
+
+ /** Symbol always defines a fresh named type */
+ final val Fresh = commonFlag(45, "<fresh>")
+
+ /** Symbol is defined in a super call */
+ final val InSuperCall = commonFlag(46, "<in supercall>")
+
+ /** Denotation is in train of being loaded and completed, used to catch cyclic dependencies */
+ final val Touched = commonFlag(48, "<touched>")
+
+ /** Class is not allowed to accept new members because fingerprint of subclass has been taken */
+ final val Frozen = commonFlag(49, "<frozen>")
+
+ /** An error symbol */
+ final val Erroneous = commonFlag(50, "<is-error>")
+
+ /** Class has been lifted out to package level, local value has been lifted out to class level */
+ final val Lifted = commonFlag(51, "<lifted>")
+
+ /** Term member has been mixed in */
+ final val MixedIn = commonFlag(52, "<mixedin>")
+
+ /** Symbol is a generated specialized member */
+ final val Specialized = commonFlag(53, "<specialized>")
+
+ /** Symbol is a self name */
+ final val SelfName = termFlag(54, "<selfname>")
+
+ /** Symbol is an implementation class of a Scala2 trait */
+ final val ImplClass = typeFlag(54, "<implclass>")
+
+ final val SelfNameOrImplClass = SelfName.toCommonFlags
+
+ /** An existentially bound symbol (Scala 2.x only) */
+ final val Scala2ExistentialCommon = commonFlag(55, "<existential>")
+ final val Scala2Existential = Scala2ExistentialCommon.toTypeFlags
+
+ /** An overloaded symbol (Scala 2.x only) */
+ final val Scala2Overloaded = termFlag(56, "<overloaded>")
+
+ /** A module variable (Scala 2.x only) */
+ final val Scala2ModuleVar = termFlag(57, "<modulevar>")
+
+ /** A definition that's initialized before the super call (Scala 2.x only) */
+ final val Scala2PreSuper = termFlag(58, "<presuper>")
+
+ /** A macro (Scala 2.x only) */
+ final val Macro = commonFlag(59, "<macro>")
+
+ /** A method that is known to have inherited default parameters */
+ final val InheritedDefaultParams = termFlag(60, "<inherited-default-param>")
+
+ /** A method that is known to have no default parameters */
+ final val NoDefaultParams = termFlag(61, "<no-default-param>")
+
+ /** A denotation that is valid in all run-ids */
+ final val Permanent = commonFlag(62, "<permanent>")
+
+// --------- Combined Flag Sets and Conjunctions ----------------------
+
+ /** Flags representing source modifiers */
+ final val SourceModifierFlags =
+ commonFlags(Private, Protected, Abstract, Final, Inline,
+ Sealed, Case, Implicit, Override, AbsOverride, Lazy, JavaStatic)
+
+ /** Flags representing modifiers that can appear in trees */
+ final val ModifierFlags =
+ SourceModifierFlags | Module | Param | Synthetic | Package | Local |
+ commonFlags(Mutable)
+ // | Trait is subsumed by commonFlags(Lazy) from SourceModifierFlags
+
+ assert(ModifierFlags.isTermFlags && ModifierFlags.isTypeFlags)
+
+ /** Flags representing access rights */
+ final val AccessFlags = Private | Protected | Local
+
+ /** Flags guaranteed to be set upon symbol creation */
+ final val FromStartFlags =
+ AccessFlags | Module | Package | Deferred | Final | MethodOrHKCommon | Param | ParamAccessor | Scala2ExistentialCommon |
+ Mutable.toCommonFlags | InSuperCall | Touched | JavaStatic | CovariantOrOuter | ContravariantOrLabel | ExpandedName | AccessorOrSealed |
+ CaseAccessorOrBaseTypeArg | Fresh | Frozen | Erroneous | ImplicitCommon | Permanent | Synthetic |
+ Inline | LazyOrTrait | SuperAccessorOrScala2x | SelfNameOrImplClass
+
+ assert(FromStartFlags.isTermFlags && FromStartFlags.isTypeFlags)
+ // TODO: Should check that FromStartFlags do not change in completion
+
+ /** A value that's unstable unless complemented with a Stable flag */
+ final val UnstableValue = Mutable | Method
+
+ /** Flags that express the variance of a type parameter. */
+ final val VarianceFlags = Covariant | Contravariant
+
+ /** Flags that are passed from a type parameter of a class to a refinement symbol
+ * that sets the type parameter */
+ final val RetainedTypeArgFlags = VarianceFlags | ExpandedName | Protected | Local
+
+ /** Modules always have these flags set */
+ final val ModuleCreationFlags = ModuleVal | Lazy | Final | Stable
+
+ /** Module classes always have these flags set */
+ final val ModuleClassCreationFlags = ModuleClass | Final
+
+ /** Accessors always have these flags set */
+ final val AccessorCreationFlags = Method | Accessor
+
+ /** Pure interfaces always have these flags */
+ final val PureInterfaceCreationFlags = Trait | NoInits | PureInterface
+
+ final val NoInitsInterface = NoInits | PureInterface
+
+ /** The flags of the self symbol */
+ final val SelfSymFlags = Private | Local | Deferred
+
+ /** The flags of a class type parameter */
+ final def ClassTypeParamCreationFlags = TypeParam | Deferred | Protected | Local
+
+ /** Flags that can apply to both a module val and a module class, except those that
+ * are added at creation anyway
+ */
+ final val RetainedModuleValAndClassFlags: FlagSet =
+ AccessFlags | Package | Case |
+ Synthetic | ExpandedName | JavaDefined | JavaStatic | Artifact |
+ Erroneous | Lifted | MixedIn | Specialized
+
+ /** Flags that can apply to a module val */
+ final val RetainedModuleValFlags: FlagSet = RetainedModuleValAndClassFlags |
+ Override | Final | Method | Implicit | Lazy |
+ Accessor | AbsOverride | Stable | Captured | Synchronized
+
+ /** Flags that can apply to a module class */
+ final val RetainedModuleClassFlags: FlagSet = RetainedModuleValAndClassFlags |
+ InSuperCall | ImplClass
+
+ /** Packages and package classes always have these flags set */
+ final val PackageCreationFlags =
+ Module | Package | Final | JavaDefined
+
+ /** These flags are pickled */
+ final val PickledFlags = flagRange(FirstFlag, FirstNotPickledFlag)
+
+ final val AnyFlags = flagRange(FirstFlag, MaxFlag)
+
+ /** An abstract class or a trait */
+ final val AbstractOrTrait = Abstract | Trait
+
+ /** Labeled `private` or `protected[local]` */
+ final val PrivateOrLocal = Private | Local
+
+ /** Either a module or a final class */
+ final val ModuleOrFinal = ModuleClass | Final
+
+ /** Either mutable or lazy */
+ final val MutableOrLazy = Mutable | Lazy
+
+ /** Either method or lazy */
+ final val MethodOrLazy = Method | Lazy
+
+ /** Either method or lazy or deferred */
+ final val MethodOrLazyOrDeferred = Method | Lazy | Deferred
+
+ /** Labeled `private`, `final`, or `inline` */
+ final val PrivateOrFinalOrInline = Private | Final | Inline
+
+ /** A private method */
+ final val PrivateMethod = allOf(Private, Method)
+
+ /** A private accessor */
+ final val PrivateAccessor = allOf(Private, Accessor)
+
+ /** A type parameter with synthesized name */
+ final val ExpandedTypeParam = allOf(ExpandedName, TypeParam)
+
+ /** An inline method */
+ final val InlineMethod = allOf(Inline, Method)
+
+ /** An inline parameter */
+ final val InlineParam = allOf(Inline, Param)
+
+ /** A parameter or parameter accessor */
+ final val ParamOrAccessor = Param | ParamAccessor
+
+ /** A lazy or deferred value */
+ final val LazyOrDeferred = Lazy | Deferred
+
+ /** A synthetic or private definition */
+ final val SyntheticOrPrivate = Synthetic | Private
+
+ /** A type parameter or type parameter accessor */
+ final val TypeParamOrAccessor = TypeParam | TypeParamAccessor
+
+ /** A deferred member or a parameter accessor (these don't have right hand sides) */
+ final val DeferredOrParamAccessor = Deferred | ParamAccessor
+
+ /** value that's final or inline */
+ final val FinalOrInline = Final | Inline
+
+ /** If symbol of a type alias has these flags, prefer the alias */
+ final val AliasPreferred = TypeParam | BaseTypeArg | ExpandedName
+
+ /** A covariant type parameter instance */
+ final val LocalCovariant = allOf(Local, Covariant)
+
+ /** A contravariant type parameter instance */
+ final val LocalContravariant = allOf(Local, Contravariant)
+
+ /** Has defined or inherited default parameters */
+ final val HasDefaultParams = DefaultParameterized | InheritedDefaultParams
+
+ /** Is valid forever */
+ final val ValidForever = Package | Permanent | Scala2ExistentialCommon
+
+ /** Is a default parameter in Scala 2*/
+ final val DefaultParameter = allOf(Param, DefaultParameterized)
+
+ /** A trait that does not need to be initialized */
+ final val NoInitsTrait = allOf(Trait, NoInits)
+
+ /** A Java interface, potentially with default methods */
+ final val JavaTrait = allOf(JavaDefined, Trait, NoInits)
+
+ /** A Java interface */ // TODO when unpickling, reconstitute from context
+ final val JavaInterface = allOf(JavaDefined, Trait)
+
+ /** A Java companion object */
+ final val JavaModule = allOf(JavaDefined, Module)
+
+ /** A Java companion object */
+ final val JavaProtected = allOf(JavaDefined, Protected)
+
+ /** Labeled private[this] */
+ final val PrivateLocal = allOf(Private, Local)
+
+ /** A private[this] parameter accessor */
+ final val PrivateLocalParamAccessor = allOf(Private, Local, ParamAccessor)
+
+ /** A parameter forwarder */
+ final val ParamForwarder = allOf(Method, Stable, ParamAccessor)
+
+ /** A private[this] parameter */
+ final val PrivateLocalParam = allOf(Private, Local, Param)
+
+ /** A private parameter accessor */
+ final val PrivateParamAccessor = allOf(Private, ParamAccessor)
+
+ /** A type parameter introduced with [type ... ] */
+ final val NamedTypeParam = allOf(TypeParam, ParamAccessor)
+
+ /** A local parameter */
+ final val ParamAndLocal = allOf(Param, Local)
+
+ /** Labeled protected[this] */
+ final val ProtectedLocal = allOf(Protected, Local)
+
+ /** Java symbol which is `protected` and `static` */
+ final val StaticProtected = allOf(JavaDefined, Protected, JavaStatic)
+
+ final val AbstractFinal = allOf(Abstract, Final)
+ final val AbstractSealed = allOf(Abstract, Sealed)
+ final val SyntheticArtifact = allOf(Synthetic, Artifact)
+ final val SyntheticModule = allOf(Synthetic, Module)
+ final val SyntheticTermParam = allOf(Synthetic, TermParam)
+ final val SyntheticTypeParam = allOf(Synthetic, TypeParam)
+ final val SyntheticCase = allOf(Synthetic, Case)
+ final val AbstractAndOverride = allOf(Abstract, Override)
+ final val Scala2Trait = allOf(Scala2x, Trait)
+
+ implicit def conjToFlagSet(conj: FlagConjunction): FlagSet =
+ FlagSet(conj.bits)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Hashable.scala b/compiler/src/dotty/tools/dotc/core/Hashable.scala
new file mode 100644
index 000000000..e4510c53e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Hashable.scala
@@ -0,0 +1,103 @@
+package dotty.tools.dotc
+package core
+
+import Types._
+import scala.util.hashing.{ MurmurHash3 => hashing }
+
+object Hashable {
+
+ /** A hash value indicating that the underlying type is not
+ * cached in uniques.
+ */
+ final val NotCached = 0
+
+ /** An alternative value returned from `hash` if the
+ * computed hashCode would be `NotCached`.
+ */
+ private[core] final val NotCachedAlt = Int.MinValue
+
+ /** A value that indicates that the hash code is unknown
+ */
+ private[core] final val HashUnknown = 1234
+
+ /** An alternative value if computeHash would otherwise yield HashUnknown
+ */
+ private[core] final val HashUnknownAlt = 4321
+}
+
+trait Hashable {
+ import Hashable._
+
+ protected def hashSeed: Int = getClass.hashCode
+
+ protected final def finishHash(hashCode: Int, arity: Int): Int =
+ avoidSpecialHashes(hashing.finalizeHash(hashCode, arity))
+
+ final def identityHash = avoidSpecialHashes(System.identityHashCode(this))
+
+ protected def finishHash(seed: Int, arity: Int, tp: Type): Int = {
+ val elemHash = tp.hash
+ if (elemHash == NotCached) return NotCached
+ finishHash(hashing.mix(seed, elemHash), arity + 1)
+ }
+
+ protected def finishHash(seed: Int, arity: Int, tp1: Type, tp2: Type): Int = {
+ val elemHash = tp1.hash
+ if (elemHash == NotCached) return NotCached
+ finishHash(hashing.mix(seed, elemHash), arity + 1, tp2)
+ }
+
+ protected def finishHash(seed: Int, arity: Int, tps: List[Type]): Int = {
+ var h = seed
+ var xs = tps
+ var len = arity
+ while (xs.nonEmpty) {
+ val elemHash = xs.head.hash
+ if (elemHash == NotCached) return NotCached
+ h = hashing.mix(h, elemHash)
+ xs = xs.tail
+ len += 1
+ }
+ finishHash(h, len)
+ }
+
+ protected def finishHash(seed: Int, arity: Int, tp: Type, tps: List[Type]): Int = {
+ val elemHash = tp.hash
+ if (elemHash == NotCached) return NotCached
+ finishHash(hashing.mix(seed, elemHash), arity + 1, tps)
+ }
+
+ protected final def doHash(x: Any): Int =
+ finishHash(hashing.mix(hashSeed, x.hashCode), 1)
+
+ protected final def doHash(tp: Type): Int =
+ finishHash(hashSeed, 0, tp)
+
+ protected final def doHash(x1: Any, tp2: Type): Int =
+ finishHash(hashing.mix(hashSeed, x1.hashCode), 1, tp2)
+
+ protected final def doHash(tp1: Type, tp2: Type): Int =
+ finishHash(hashSeed, 0, tp1, tp2)
+
+ protected final def doHash(x1: Any, tp2: Type, tp3: Type): Int =
+ finishHash(hashing.mix(hashSeed, x1.hashCode), 1, tp2, tp3)
+
+ protected final def doHash(tp1: Type, tps2: List[Type]): Int =
+ finishHash(hashSeed, 0, tp1, tps2)
+
+ protected final def doHash(x1: Any, tp2: Type, tps3: List[Type]): Int =
+ finishHash(hashing.mix(hashSeed, x1.hashCode), 1, tp2, tps3)
+
+
+ protected final def doHash(x1: Int, x2: Int): Int =
+ finishHash(hashing.mix(hashing.mix(hashSeed, x1), x2), 1)
+
+ protected final def addDelta(elemHash: Int, delta: Int) =
+ if (elemHash == NotCached) NotCached
+ else avoidSpecialHashes(elemHash + delta)
+
+ private def avoidSpecialHashes(h: Int) =
+ if (h == NotCached) NotCachedAlt
+ else if (h == HashUnknown) HashUnknownAlt
+ else h
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala
new file mode 100644
index 000000000..406a84af6
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Mode.scala
@@ -0,0 +1,89 @@
+package dotty.tools.dotc.core
+
+/** A collection of mode bits that are part of a context */
+case class Mode(val bits: Int) extends AnyVal {
+ import Mode._
+ def | (that: Mode) = Mode(bits | that.bits)
+ def & (that: Mode) = Mode(bits & that.bits)
+ def &~ (that: Mode) = Mode(bits & ~that.bits)
+ def is (that: Mode) = (bits & that.bits) == that.bits
+
+ def isExpr = (this & PatternOrType) == None
+
+ override def toString =
+ (0 until 31).filter(i => (bits & (1 << i)) != 0).map(modeName).mkString("Mode(", ",", ")")
+}
+
+object Mode {
+ val None = Mode(0)
+
+ private val modeName = new Array[String](32)
+
+ def newMode(bit: Int, name: String): Mode = {
+ modeName(bit) = name
+ Mode(1 << bit)
+ }
+
+ val Pattern = newMode(0, "Pattern")
+ val Type = newMode(1, "Type")
+
+ val ImplicitsEnabled = newMode(2, "ImplicitsEnabled")
+ val InferringReturnType = newMode(3, "InferringReturnType")
+
+ /** This mode bit is set if we collect information without reference to a valid
+ * context with typerstate and constraint. This is typically done when we
+ * cache the eligibility of implicits. Caching needs to be done across different constraints.
+ * Therefore, if TypevarsMissContext is set, subtyping becomes looser, and assumes
+ * that PolyParams can be sub- and supertypes of anything. See TypeComparer.
+ */
+ val TypevarsMissContext = newMode(4, "TypevarsMissContext")
+ val CheckCyclic = newMode(5, "CheckCyclic")
+
+ val InSuperCall = newMode(6, "InSuperCall")
+
+ /** Allow GADTFlexType labelled types to have their bounds adjusted */
+ val GADTflexible = newMode(8, "GADTflexible")
+
+ /** Allow dependent functions. This is currently necessary for unpickling, because
+ * some dependent functions are passed through from the front end(s?), even though they
+ * are technically speaking illegal.
+ */
+ val AllowDependentFunctions = newMode(9, "AllowDependentFunctions")
+
+ /** We are currently printing something: avoid to produce more logs about
+ * the printing
+ */
+ val Printing = newMode(10, "Printing")
+
+ /** We are currently typechecking an ident to determine whether some implicit
+ * is shadowed - don't do any other shadowing tests.
+ */
+ val ImplicitShadowing = newMode(11, "ImplicitShadowing")
+
+ /** We are currently in a `viewExists` check. In that case, ambiguous
+ * implicits checks are disabled and we succeed with the first implicit
+ * found.
+ */
+ val ImplicitExploration = newMode(12, "ImplicitExploration")
+
+ /** We are currently unpickling Scala2 info */
+ val Scala2Unpickling = newMode(13, "Scala2Unpickling")
+
+ /** Use Scala2 scheme for overloading and implicit resolution */
+ val OldOverloadingResolution = newMode(14, "OldOverloadingResolution")
+
+ /** Allow hk applications of type lambdas to wildcard arguments;
+ * used for checking that such applications do not normally arise
+ */
+ val AllowLambdaWildcardApply = newMode(15, "AllowHKApplyToWildcards")
+
+ /** Read original positions when unpickling from TASTY */
+ val ReadPositions = newMode(16, "ReadPositions")
+
+ val PatternOrType = Pattern | Type
+
+ /** We are elaborating the fully qualified name of a package clause.
+ * In this case, identifiers should never be imported.
+ */
+ val InPackageClauseName = newMode(17, "InPackageClauseName")
+}
diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala
new file mode 100644
index 000000000..4c7f5b0a9
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala
@@ -0,0 +1,432 @@
+package dotty.tools.dotc
+package core
+
+import java.security.MessageDigest
+import scala.annotation.switch
+import scala.io.Codec
+import Names._, StdNames._, Contexts._, Symbols._, Flags._
+import Decorators.StringDecorator
+import util.{Chars, NameTransformer}
+import Chars.isOperatorPart
+
+object NameOps {
+
+ final object compactify {
+ lazy val md5 = MessageDigest.getInstance("MD5")
+
+ /** COMPACTIFY
+ *
+ * The hashed name has the form (prefix + marker + md5 + marker + suffix), where
+ * - prefix/suffix.length = MaxNameLength / 4
+ * - md5.length = 32
+ *
+ * We obtain the formula:
+ *
+ * FileNameLength = 2*(MaxNameLength / 4) + 2.marker.length + 32 + 6
+ *
+ * (+6 for ".class"). MaxNameLength can therefore be computed as follows:
+ */
+ def apply(s: String)(implicit ctx: Context): String = {
+ val marker = "$$$$"
+ val limit: Int = ctx.settings.maxClassfileName.value
+ val MaxNameLength = (limit - 6) min 2 * (limit - 6 - 2 * marker.length - 32)
+
+ def toMD5(s: String, edge: Int): String = {
+ val prefix = s take edge
+ val suffix = s takeRight edge
+
+ val cs = s.toArray
+ val bytes = Codec toUTF8 cs
+ md5 update bytes
+ val md5chars = (md5.digest() map (b => (b & 0xFF).toHexString)).mkString
+
+ prefix + marker + md5chars + marker + suffix
+ }
+
+ if (s.length <= MaxNameLength) s else toMD5(s, MaxNameLength / 4)
+ }
+ }
+
+ class PrefixNameExtractor(pre: TermName) {
+ def apply(name: TermName): TermName = pre ++ name
+ def unapply(name: TermName): Option[TermName] =
+ if (name startsWith pre) Some(name.drop(pre.length).asTermName) else None
+ }
+
+ object SuperAccessorName extends PrefixNameExtractor(nme.SUPER_PREFIX)
+ object InitializerName extends PrefixNameExtractor(nme.INITIALIZER_PREFIX)
+
+ implicit class NameDecorator[N <: Name](val name: N) extends AnyVal {
+ import nme._
+
+ def likeTyped(n: PreName): N =
+ (if (name.isTermName) n.toTermName else n.toTypeName).asInstanceOf[N]
+
+ def isConstructorName = name == CONSTRUCTOR || name == TRAIT_CONSTRUCTOR
+ def isStaticConstructorName = name == STATIC_CONSTRUCTOR
+ def isExceptionResultName = name startsWith EXCEPTION_RESULT_PREFIX
+ def isImplClassName = name endsWith IMPL_CLASS_SUFFIX
+ def isLocalDummyName = name startsWith LOCALDUMMY_PREFIX
+ def isLoopHeaderLabel = (name startsWith WHILE_PREFIX) || (name startsWith DO_WHILE_PREFIX)
+ def isProtectedAccessorName = name startsWith PROTECTED_PREFIX
+ def isReplWrapperName = name containsSlice INTERPRETER_IMPORT_WRAPPER
+ def isTraitSetterName = name containsSlice TRAIT_SETTER_SEPARATOR
+ def isSetterName = name endsWith SETTER_SUFFIX
+ def isSingletonName = name endsWith SINGLETON_SUFFIX
+ def isModuleClassName = name endsWith MODULE_SUFFIX
+ def isAvoidClashName = name endsWith AVOID_CLASH_SUFFIX
+ def isImportName = name startsWith IMPORT
+ def isFieldName = name endsWith LOCAL_SUFFIX
+ def isShadowedName = name.length > 0 && name.head == '(' && name.startsWith(nme.SHADOWED)
+ def isDefaultGetterName = name.isTermName && name.asTermName.defaultGetterIndex >= 0
+ def isScala2LocalSuffix = name.endsWith(" ")
+ def isModuleVarName(name: Name): Boolean =
+ name.stripAnonNumberSuffix endsWith MODULE_VAR_SUFFIX
+ def isSelectorName = name.startsWith(" ") && name.tail.forall(_.isDigit)
+ def isLazyLocal = name.endsWith(nme.LAZY_LOCAL)
+ def isOuterSelect = name.endsWith(nme.OUTER_SELECT)
+ def isInlineAccessor = name.startsWith(nme.INLINE_ACCESSOR_PREFIX)
+
+ /** Is name a variable name? */
+ def isVariableName: Boolean = name.length > 0 && {
+ val first = name.head
+ (((first.isLower && first.isLetter) || first == '_')
+ && (name != false_)
+ && (name != true_)
+ && (name != null_))
+ }
+
+ def isOpAssignmentName: Boolean = name match {
+ case raw.NE | raw.LE | raw.GE | EMPTY =>
+ false
+ case _ =>
+ name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.head)
+ }
+
+ /** If the name ends with $nn where nn are
+ * all digits, strip the $ and the digits.
+ * Otherwise return the argument.
+ */
+ def stripAnonNumberSuffix: Name = {
+ var pos = name.length
+ while (pos > 0 && name(pos - 1).isDigit)
+ pos -= 1
+
+ if (pos > 0 && pos < name.length && name(pos - 1) == '$')
+ name take (pos - 1)
+ else
+ name
+ }
+
+ /** Convert this module name to corresponding module class name */
+ def moduleClassName: TypeName = (name ++ tpnme.MODULE_SUFFIX).toTypeName
+
+ /** Convert this module class name to corresponding source module name */
+ def sourceModuleName: TermName = stripModuleClassSuffix.toTermName
+
+ /** If name ends in module class suffix, drop it */
+ def stripModuleClassSuffix: Name =
+ if (isModuleClassName) name dropRight MODULE_SUFFIX.length else name
+
+ /** Append a suffix so that this name does not clash with another name in the same scope */
+ def avoidClashName: TermName = (name ++ AVOID_CLASH_SUFFIX).toTermName
+
+ /** If name ends in "avoid clash" suffix, drop it */
+ def stripAvoidClashSuffix: Name =
+ if (isAvoidClashName) name dropRight AVOID_CLASH_SUFFIX.length else name
+
+ /** If flags is a ModuleClass but not a Package, add module class suffix */
+ def adjustIfModuleClass(flags: Flags.FlagSet): N = {
+ if (flags is (ModuleClass, butNot = Package)) name.asTypeName.moduleClassName
+ else stripAvoidClashSuffix
+ }.asInstanceOf[N]
+
+ /** The superaccessor for method with given name */
+ def superName: TermName = (nme.SUPER_PREFIX ++ name).toTermName
+
+ /** The expanded name of `name` relative to given class `base`.
+ */
+ def expandedName(base: Symbol, separator: Name)(implicit ctx: Context): N =
+ expandedName(if (base is Flags.ExpandedName) base.name else base.fullNameSeparated("$"), separator)
+
+ def expandedName(base: Symbol)(implicit ctx: Context): N = expandedName(base, nme.EXPAND_SEPARATOR)
+
+ /** The expanded name of `name` relative to `basename` with given `separator`
+ */
+ def expandedName(prefix: Name, separator: Name = nme.EXPAND_SEPARATOR): N =
+ name.fromName(prefix ++ separator ++ name).asInstanceOf[N]
+
+ def expandedName(prefix: Name): N = expandedName(prefix, nme.EXPAND_SEPARATOR)
+
+ /** Revert the expanded name. Note: This currently gives incorrect results
+ * if the normal name contains `nme.EXPAND_SEPARATOR`, i.e. two consecutive '$'
+ * signs. This can happen for instance if a super accessor is paired with
+ * an encoded name, e.g. super$$plus$eq. See #765.
+ */
+ def unexpandedName: N = {
+ var idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
+
+ // Hack to make super accessors from traits work. They would otherwise fail because of #765
+ // TODO: drop this once we have more robust name handling
+ if (idx > FalseSuperLength && name.slice(idx - FalseSuperLength, idx) == FalseSuper)
+ idx -= FalseSuper.length
+
+ if (idx < 0) name else (name drop (idx + nme.EXPAND_SEPARATOR.length)).asInstanceOf[N]
+ }
+
+ def expandedPrefix: N = {
+ val idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
+ assert(idx >= 0)
+ name.take(idx).asInstanceOf[N]
+ }
+
+ def shadowedName: N = likeTyped(nme.SHADOWED ++ name)
+
+ def revertShadowed: N = likeTyped(name.drop(nme.SHADOWED.length))
+
+ def implClassName: N = likeTyped(name ++ tpnme.IMPL_CLASS_SUFFIX)
+
+ def errorName: N = likeTyped(name ++ nme.ERROR)
+
+ def freshened(implicit ctx: Context): N =
+ likeTyped(
+ if (name.isModuleClassName) name.stripModuleClassSuffix.freshened.moduleClassName
+ else likeTyped(ctx.freshName(name ++ NameTransformer.NAME_JOIN_STRING)))
+
+ /** Translate a name into a list of simple TypeNames and TermNames.
+ * In all segments before the last, type/term is determined by whether
+ * the following separator char is '.' or '#'. The last segment
+ * is of the same type as the original name.
+ *
+ * Examples:
+ *
+ * package foo {
+ * object Lorax { object Wog ; class Wog }
+ * class Lorax { object Zax ; class Zax }
+ * }
+ *
+ * f("foo.Lorax".toTermName) == List("foo": Term, "Lorax": Term) // object Lorax
+ * f("foo.Lorax".toTypeName) == List("foo": Term, "Lorax": Type) // class Lorax
+ * f("Lorax.Wog".toTermName) == List("Lorax": Term, "Wog": Term) // object Wog
+ * f("Lorax.Wog".toTypeName) == List("Lorax": Term, "Wog": Type) // class Wog
+ * f("Lorax#Zax".toTermName) == List("Lorax": Type, "Zax": Term) // object Zax
+ * f("Lorax#Zax".toTypeName) == List("Lorax": Type, "Zax": Type) // class Zax
+ *
+ * Note that in actual scala syntax you cannot refer to object Zax without an
+ * instance of Lorax, so Lorax#Zax could only mean the type. One might think
+ * that Lorax#Zax.type would work, but this is not accepted by the parser.
+ * For the purposes of referencing that object, the syntax is allowed.
+ */
+ def segments: List[Name] = {
+ def mkName(name: Name, follow: Char): Name =
+ if (follow == '.') name.toTermName else name.toTypeName
+
+ name.indexWhere(ch => ch == '.' || ch == '#') match {
+ case -1 =>
+ if (name.isEmpty) scala.Nil else name :: scala.Nil
+ case idx =>
+ mkName(name take idx, name(idx)) :: (name drop (idx + 1)).segments
+ }
+ }
+
+ /** The name of the generic runtime operation corresponding to an array operation */
+ def genericArrayOp: TermName = name match {
+ case nme.apply => nme.array_apply
+ case nme.length => nme.array_length
+ case nme.update => nme.array_update
+ case nme.clone_ => nme.array_clone
+ }
+
+ /** The name of the primitive runtime operation corresponding to an array operation */
+ def primitiveArrayOp: TermName = name match {
+ case nme.apply => nme.primitive.arrayApply
+ case nme.length => nme.primitive.arrayLength
+ case nme.update => nme.primitive.arrayUpdate
+ case nme.clone_ => nme.clone_
+ }
+
+ def specializedFor(classTargs: List[Types.Type], classTargsNames: List[Name], methodTargs: List[Types.Type], methodTarsNames: List[Name])(implicit ctx: Context): name.ThisName = {
+
+ def typeToTag(tp: Types.Type): Name = {
+ tp.classSymbol match {
+ case t if t eq defn.IntClass => nme.specializedTypeNames.Int
+ case t if t eq defn.BooleanClass => nme.specializedTypeNames.Boolean
+ case t if t eq defn.ByteClass => nme.specializedTypeNames.Byte
+ case t if t eq defn.LongClass => nme.specializedTypeNames.Long
+ case t if t eq defn.ShortClass => nme.specializedTypeNames.Short
+ case t if t eq defn.FloatClass => nme.specializedTypeNames.Float
+ case t if t eq defn.UnitClass => nme.specializedTypeNames.Void
+ case t if t eq defn.DoubleClass => nme.specializedTypeNames.Double
+ case t if t eq defn.CharClass => nme.specializedTypeNames.Char
+ case _ => nme.specializedTypeNames.Object
+ }
+ }
+
+ val methodTags: Seq[Name] = (methodTargs zip methodTarsNames).sortBy(_._2).map(x => typeToTag(x._1))
+ val classTags: Seq[Name] = (classTargs zip classTargsNames).sortBy(_._2).map(x => typeToTag(x._1))
+
+ name.fromName(name ++ nme.specializedTypeNames.prefix ++
+ methodTags.fold(nme.EMPTY)(_ ++ _) ++ nme.specializedTypeNames.separator ++
+ classTags.fold(nme.EMPTY)(_ ++ _) ++ nme.specializedTypeNames.suffix)
+ }
+
+ /** If name length exceeds allowable limit, replace part of it by hash */
+ def compactified(implicit ctx: Context): TermName = termName(compactify(name.toString))
+ }
+
+ // needed???
+ private val Boxed = Map[TypeName, TypeName](
+ tpnme.Boolean -> jtpnme.BoxedBoolean,
+ tpnme.Byte -> jtpnme.BoxedByte,
+ tpnme.Char -> jtpnme.BoxedCharacter,
+ tpnme.Short -> jtpnme.BoxedShort,
+ tpnme.Int -> jtpnme.BoxedInteger,
+ tpnme.Long -> jtpnme.BoxedLong,
+ tpnme.Float -> jtpnme.BoxedFloat,
+ tpnme.Double -> jtpnme.BoxedDouble)
+
+ implicit class TermNameDecorator(val name: TermName) extends AnyVal {
+ import nme._
+
+ def setterName: TermName =
+ if (name.isFieldName) name.fieldToGetter.setterName
+ else name ++ SETTER_SUFFIX
+
+ def getterName: TermName =
+ if (name.isFieldName) fieldToGetter
+ else setterToGetter
+
+ def fieldName: TermName =
+ if (name.isSetterName) {
+ if (name.isTraitSetterName) {
+ // has form <$-separated-trait-name>$_setter_$ `name`_$eq
+ val start = name.indexOfSlice(TRAIT_SETTER_SEPARATOR) + TRAIT_SETTER_SEPARATOR.length
+ val end = name.indexOfSlice(SETTER_SUFFIX)
+ name.slice(start, end) ++ LOCAL_SUFFIX
+ } else getterName.fieldName
+ }
+ else name ++ LOCAL_SUFFIX
+
+ private def setterToGetter: TermName = {
+ assert(name.endsWith(SETTER_SUFFIX), name + " is referenced as a setter but has wrong name format")
+ name.take(name.length - SETTER_SUFFIX.length).asTermName
+ }
+
+ def fieldToGetter: TermName = {
+ assert(name.isFieldName)
+ name.take(name.length - LOCAL_SUFFIX.length).asTermName
+ }
+
+ /** Nominally, name$default$N, encoded for <init>
+ * @param Post the parameters position.
+ * @note Default getter name suffixes start at 1, so `pos` has to be adjusted by +1
+ */
+ def defaultGetterName(pos: Int): TermName = {
+ val prefix = if (name.isConstructorName) DEFAULT_GETTER_INIT else name
+ prefix ++ DEFAULT_GETTER ++ (pos + 1).toString
+ }
+
+ /** Nominally, name from name$default$N, CONSTRUCTOR for <init> */
+ def defaultGetterToMethod: TermName = {
+ val p = name.indexOfSlice(DEFAULT_GETTER)
+ if (p >= 0) {
+ val q = name.take(p).asTermName
+ // i.e., if (q.decoded == CONSTRUCTOR.toString) CONSTRUCTOR else q
+ if (q == DEFAULT_GETTER_INIT) CONSTRUCTOR else q
+ } else name
+ }
+
+ /** If this is a default getter, its index (starting from 0), else -1 */
+ def defaultGetterIndex: Int = {
+ var i = name.length
+ while (i > 0 && name(i - 1).isDigit) i -= 1
+ if (i > 0 && i < name.length && name.take(i).endsWith(DEFAULT_GETTER))
+ name.drop(i).toString.toInt - 1
+ else
+ -1
+ }
+
+ def stripScala2LocalSuffix: TermName =
+ if (name.isScala2LocalSuffix) name.init.asTermName else name
+
+ /** The name of an accessor for protected symbols. */
+ def protectedAccessorName: TermName =
+ PROTECTED_PREFIX ++ name.unexpandedName
+
+ /** The name of a setter for protected symbols. Used for inherited Java fields. */
+ def protectedSetterName: TermName =
+ PROTECTED_SET_PREFIX ++ name.unexpandedName
+
+ def moduleVarName: TermName =
+ name ++ MODULE_VAR_SUFFIX
+
+ /** The name unary_x for a prefix operator x */
+ def toUnaryName: TermName = name match {
+ case raw.MINUS => UNARY_-
+ case raw.PLUS => UNARY_+
+ case raw.TILDE => UNARY_~
+ case raw.BANG => UNARY_!
+ case _ => name
+ }
+
+ /** The name of a method which stands in for a primitive operation
+ * during structural type dispatch.
+ */
+ def primitiveInfixMethodName: TermName = name match {
+ case OR => takeOr
+ case XOR => takeXor
+ case AND => takeAnd
+ case EQ => testEqual
+ case NE => testNotEqual
+ case ADD => add
+ case SUB => subtract
+ case MUL => multiply
+ case DIV => divide
+ case MOD => takeModulo
+ case LSL => shiftSignedLeft
+ case LSR => shiftLogicalRight
+ case ASR => shiftSignedRight
+ case LT => testLessThan
+ case LE => testLessOrEqualThan
+ case GE => testGreaterOrEqualThan
+ case GT => testGreaterThan
+ case ZOR => takeConditionalOr
+ case ZAND => takeConditionalAnd
+ case _ => NO_NAME
+ }
+
+ /** Postfix/prefix, really.
+ */
+ def primitivePostfixMethodName: TermName = name match {
+ case UNARY_! => takeNot
+ case UNARY_+ => positive
+ case UNARY_- => negate
+ case UNARY_~ => complement
+ case `toByte` => toByte
+ case `toShort` => toShort
+ case `toChar` => toCharacter
+ case `toInt` => toInteger
+ case `toLong` => toLong
+ case `toFloat` => toFloat
+ case `toDouble` => toDouble
+ case _ => NO_NAME
+ }
+
+ def primitiveMethodName: TermName =
+ primitiveInfixMethodName match {
+ case NO_NAME => primitivePostfixMethodName
+ case name => name
+ }
+
+ def lazyLocalName = name ++ nme.LAZY_LOCAL
+ def nonLazyName = {
+ assert(name.isLazyLocal)
+ name.dropRight(nme.LAZY_LOCAL.length)
+ }
+
+ def inlineAccessorName = nme.INLINE_ACCESSOR_PREFIX ++ name ++ "$"
+ }
+
+ private final val FalseSuper = "$$super".toTermName
+ private val FalseSuperLength = FalseSuper.length
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala
new file mode 100644
index 000000000..11f0b55a8
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Names.scala
@@ -0,0 +1,372 @@
+package dotty.tools
+package dotc
+package core
+
+import scala.io.Codec
+import util.NameTransformer
+import printing.{Showable, Texts, Printer}
+import Texts.Text
+import Decorators._
+import Contexts.Context
+import collection.IndexedSeqOptimized
+import collection.generic.CanBuildFrom
+import collection.mutable.{ Builder, StringBuilder }
+import collection.immutable.WrappedString
+import collection.generic.CanBuildFrom
+import util.DotClass
+//import annotation.volatile
+
+object Names {
+
+ /** A common class for things that can be turned into names.
+ * Instances are both names and strings, the latter via a decorator.
+ */
+ trait PreName extends Any with Showable {
+ def toTypeName: TypeName
+ def toTermName: TermName
+ }
+
+ implicit def eqName: Eq[Name, Name] = Eq
+
+ /** A name is essentially a string, with three differences
+ * 1. Names belong in one of two name spaces: they are type names or term names.
+ * Term names have a sub-category of "local" field names.
+ * The same string can correspond a name in each of the three namespaces.
+ * 2. Names are hash-consed. Two names
+ * representing the same string in the same universe are always reference identical.
+ * 3. Names are intended to be encoded strings. @see dotc.util.NameTransformer.
+ * The encoding will be applied when converting a string to a name.
+ */
+ abstract class Name extends DotClass
+ with PreName
+ with collection.immutable.Seq[Char]
+ with IndexedSeqOptimized[Char, Name] {
+
+ /** A type for names of the same kind as this name */
+ type ThisName <: Name
+
+ /** The start index in the character array */
+ val start: Int
+
+ /** The length of the names */
+ override val length: Int
+
+ /** Is this name a type name? */
+ def isTypeName: Boolean
+
+ /** Is this name a term name? */
+ def isTermName: Boolean
+
+ /** This name converted to a type name */
+ def toTypeName: TypeName
+
+ /** This name converted to a term name */
+ def toTermName: TermName
+
+ /** This name downcasted to a type name */
+ def asTypeName: TypeName
+
+ /** This name downcasted to a term name */
+ def asTermName: TermName
+
+ /** Create a new name of same kind as this one, in the given
+ * basis, with `len` characters taken from `cs` starting at `offset`.
+ */
+ def fromChars(cs: Array[Char], offset: Int, len: Int): ThisName
+
+ /** Create new name of same kind as this name and with same
+ * characters as given `name`.
+ */
+ def fromName(name: Name): ThisName = fromChars(chrs, name.start, name.length)
+
+ /** Create new name of same kind as this name with characters from
+ * the given string
+ */
+ def fromString(str: String): ThisName = {
+ val cs = str.toCharArray
+ fromChars(cs, 0, cs.length)
+ }
+
+ override def toString =
+ if (length == 0) "" else new String(chrs, start, length)
+
+ def toText(printer: Printer): Text = printer.toText(this)
+
+ /** Write to UTF8 representation of this name to given character array.
+ * Start copying to index `to`. Return index of next free byte in array.
+ * Array must have enough remaining space for all bytes
+ * (i.e. maximally 3*length bytes).
+ */
+ final def copyUTF8(bs: Array[Byte], offset: Int): Int = {
+ val bytes = Codec.toUTF8(chrs, start, length)
+ scala.compat.Platform.arraycopy(bytes, 0, bs, offset, bytes.length)
+ offset + bytes.length
+ }
+
+ /** Replace \$op_name's by corresponding operator symbols. */
+ def decode: Name =
+ if (contains('$')) fromString(NameTransformer.decode(toString))
+ else this
+
+ /** Replace operator symbols by corresponding \$op_name's. */
+ def encode: Name =
+ if (dontEncode(toTermName)) this else NameTransformer.encode(this)
+
+ /** A more efficient version of concatenation */
+ def ++ (other: Name): ThisName = ++ (other.toString)
+
+ def ++ (other: String): ThisName = {
+ val s = toString + other
+ fromChars(s.toCharArray, 0, s.length)
+ }
+
+ def replace(from: Char, to: Char): ThisName = {
+ val cs = new Array[Char](length)
+ Array.copy(chrs, start, cs, 0, length)
+ for (i <- 0 until length) {
+ if (cs(i) == from) cs(i) = to
+ }
+ fromChars(cs, 0, length)
+ }
+
+ def contains(ch: Char): Boolean = {
+ var i = 0
+ while (i < length && chrs(start + i) != ch) i += 1
+ i < length
+ }
+
+ def firstChar = chrs(start)
+
+ // ----- Collections integration -------------------------------------
+
+ override protected[this] def thisCollection: WrappedString = new WrappedString(repr.toString)
+ override protected[this] def toCollection(repr: Name): WrappedString = new WrappedString(repr.toString)
+
+ override protected[this] def newBuilder: Builder[Char, Name] = unsupported("newBuilder")
+
+ override def apply(index: Int): Char = chrs(start + index)
+
+ override def slice(from: Int, until: Int): ThisName =
+ fromChars(chrs, start + from, until - from)
+
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+
+ override def seq = toCollection(this)
+ }
+
+ class TermName(val start: Int, val length: Int, @sharable private[Names] var next: TermName) extends Name {
+ // `next` is @sharable because it is only modified in the synchronized block of termName.
+ type ThisName = TermName
+ def isTypeName = false
+ def isTermName = true
+
+ @sharable // because it is only modified in the synchronized block of toTypeName.
+ @volatile private[this] var _typeName: TypeName = null
+
+ def toTypeName: TypeName = {
+ if (_typeName == null)
+ synchronized {
+ if (_typeName == null)
+ _typeName = new TypeName(start, length, this)
+ }
+ _typeName
+ }
+ def toTermName = this
+ def asTypeName = throw new ClassCastException(this + " is not a type name")
+ def asTermName = this
+
+ override def hashCode: Int = start
+
+ override protected[this] def newBuilder: Builder[Char, Name] = termNameBuilder
+
+ def fromChars(cs: Array[Char], offset: Int, len: Int): TermName = termName(cs, offset, len)
+ }
+
+ class TypeName(val start: Int, val length: Int, val toTermName: TermName) extends Name {
+ type ThisName = TypeName
+ def isTypeName = true
+ def isTermName = false
+ def toTypeName = this
+ def asTypeName = this
+ def asTermName = throw new ClassCastException(this + " is not a term name")
+
+ override def hashCode: Int = -start
+
+ override protected[this] def newBuilder: Builder[Char, Name] =
+ termNameBuilder.mapResult(_.toTypeName)
+
+ def fromChars(cs: Array[Char], offset: Int, len: Int): TypeName = typeName(cs, offset, len)
+ }
+
+ // Nametable
+
+ private final val InitialHashSize = 0x8000
+ private final val InitialNameSize = 0x20000
+ private final val fillFactor = 0.7
+
+ /** Memory to store all names sequentially. */
+ @sharable // because it's only mutated in synchronized block of termName
+ private[dotty] var chrs: Array[Char] = new Array[Char](InitialNameSize)
+
+ /** The number of characters filled. */
+ @sharable // because it's only mutated in synchronized block of termName
+ private var nc = 0
+
+ /** Hashtable for finding term names quickly. */
+ @sharable // because it's only mutated in synchronized block of termName
+ private var table = new Array[TermName](InitialHashSize)
+
+ /** The number of defined names. */
+ @sharable // because it's only mutated in synchronized block of termName
+ private var size = 1
+
+ /** The hash of a name made of from characters cs[offset..offset+len-1]. */
+ private def hashValue(cs: Array[Char], offset: Int, len: Int): Int =
+ if (len > 0)
+ (len * (41 * 41 * 41) +
+ cs(offset) * (41 * 41) +
+ cs(offset + len - 1) * 41 +
+ cs(offset + (len >> 1)))
+ else 0
+
+ /** Is (the ASCII representation of) name at given index equal to
+ * cs[offset..offset+len-1]?
+ */
+ private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
+ var i = 0
+ while ((i < len) && (chrs(index + i) == cs(offset + i)))
+ i += 1
+ i == len
+ }
+
+ /** Create a term name from the characters in cs[offset..offset+len-1].
+ * Assume they are already encoded.
+ */
+ def termName(cs: Array[Char], offset: Int, len: Int): TermName = synchronized {
+ util.Stats.record("termName")
+ val h = hashValue(cs, offset, len) & (table.size - 1)
+
+ /** Make sure the capacity of the character array is at least `n` */
+ def ensureCapacity(n: Int) =
+ if (n > chrs.length) {
+ val newchrs = new Array[Char](chrs.length * 2)
+ chrs.copyToArray(newchrs)
+ chrs = newchrs
+ }
+
+ /** Enter characters into chrs array. */
+ def enterChars(): Unit = {
+ ensureCapacity(nc + len)
+ var i = 0
+ while (i < len) {
+ chrs(nc + i) = cs(offset + i)
+ i += 1
+ }
+ nc += len
+ }
+
+ /** Rehash chain of names */
+ def rehash(name: TermName): Unit =
+ if (name != null) {
+ val oldNext = name.next
+ val h = hashValue(chrs, name.start, name.length) & (table.size - 1)
+ name.next = table(h)
+ table(h) = name
+ rehash(oldNext)
+ }
+
+ /** Make sure the hash table is large enough for the given load factor */
+ def incTableSize() = {
+ size += 1
+ if (size.toDouble / table.size > fillFactor) {
+ val oldTable = table
+ table = new Array[TermName](table.size * 2)
+ for (i <- 0 until oldTable.size) rehash(oldTable(i))
+ }
+ }
+
+ val next = table(h)
+ var name = next
+ while (name ne null) {
+ if (name.length == len && equals(name.start, cs, offset, len))
+ return name
+ name = name.next
+ }
+ name = new TermName(nc, len, next)
+ enterChars()
+ table(h) = name
+ incTableSize()
+ name
+ }
+
+ /** Create a type name from the characters in cs[offset..offset+len-1].
+ * Assume they are already encoded.
+ */
+ def typeName(cs: Array[Char], offset: Int, len: Int): TypeName =
+ termName(cs, offset, len).toTypeName
+
+ /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1].
+ * Assume they are already encoded.
+ */
+ def termName(bs: Array[Byte], offset: Int, len: Int): TermName = {
+ val chars = Codec.fromUTF8(bs, offset, len)
+ termName(chars, 0, chars.length)
+ }
+
+ /** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1].
+ * Assume they are already encoded.
+ */
+ def typeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
+ termName(bs, offset, len).toTypeName
+
+ /** Create a term name from a string, without encoding operators */
+ def termName(s: String): TermName = termName(s.toCharArray, 0, s.length)
+
+ /** Create a type name from a string, without encoding operators */
+ def typeName(s: String): TypeName = typeName(s.toCharArray, 0, s.length)
+
+ /** The term name represented by the empty string */
+ val EmptyTermName = new TermName(-1, 0, null)
+
+ table(0) = EmptyTermName
+
+ /** The type name represented by the empty string */
+ val EmptyTypeName = EmptyTermName.toTypeName
+
+ // can't move CONSTRUCTOR/EMPTY_PACKAGE to `nme` because of bootstrap failures in `encode`.
+ val CONSTRUCTOR = termName("<init>")
+ val STATIC_CONSTRUCTOR = termName("<clinit>")
+ val EMPTY_PACKAGE = termName("<empty>")
+
+ val dontEncode = Set(CONSTRUCTOR, EMPTY_PACKAGE)
+
+ def termNameBuilder: Builder[Char, TermName] =
+ StringBuilder.newBuilder.mapResult(termName)
+
+ implicit val nameCanBuildFrom: CanBuildFrom[Name, Char, Name] = new CanBuildFrom[Name, Char, Name] {
+ def apply(from: Name): Builder[Char, Name] =
+ StringBuilder.newBuilder.mapResult(s => from.fromChars(s.toCharArray, 0, s.length))
+ def apply(): Builder[Char, Name] = termNameBuilder
+ }
+
+ implicit val NameOrdering: Ordering[Name] = new Ordering[Name] {
+ def compare(x: Name, y: Name): Int = {
+ if (x.isTermName && y.isTypeName) 1
+ else if (x.isTypeName && y.isTermName) -1
+ else if (x eq y) 0
+ else {
+ val until = x.length min y.length
+ var i = 0
+
+ while (i < until && x(i) == y(i)) i = i + 1
+
+ if (i < until) {
+ if (x(i) < y(i)) -1
+ else /*(x(i) > y(i))*/ 1
+ } else {
+ x.length - y.length
+ }
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala
new file mode 100644
index 000000000..72c7a8e51
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala
@@ -0,0 +1,636 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._, Decorators._
+import util.SimpleMap
+import collection.mutable
+import printing.{Printer, Showable}
+import printing.Texts._
+import config.Config
+import collection.immutable.BitSet
+import reflect.ClassTag
+import annotation.tailrec
+
+object OrderingConstraint {
+
+ type ArrayValuedMap[T] = SimpleMap[PolyType, Array[T]]
+
+ /** The type of `OrderingConstraint#boundsMap` */
+ type ParamBounds = ArrayValuedMap[Type]
+
+ /** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */
+ type ParamOrdering = ArrayValuedMap[List[PolyParam]]
+
+ /** A new constraint with given maps */
+ private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(implicit ctx: Context) : OrderingConstraint = {
+ val result = new OrderingConstraint(boundsMap, lowerMap, upperMap)
+ if (Config.checkConstraintsNonCyclic) result.checkNonCyclic()
+ ctx.runInfo.recordConstraintSize(result, result.boundsMap.size)
+ result
+ }
+
+ /** A lens for updating a single entry array in one of the three constraint maps */
+ abstract class ConstraintLens[T <: AnyRef: ClassTag] {
+ def entries(c: OrderingConstraint, poly: PolyType): Array[T]
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[T])(implicit ctx: Context): OrderingConstraint
+ def initial: T
+
+ def apply(c: OrderingConstraint, poly: PolyType, idx: Int) = {
+ val es = entries(c, poly)
+ if (es == null) initial else es(idx)
+ }
+
+ /** The `current` constraint but with the entry for `param` updated to `entry`.
+ * `current` is used linearly. If it is different from `prev` it is
+ * known to be dead after the call. Hence it is OK to update destructively
+ * parts of `current` which are not shared by `prev`.
+ */
+ def update(prev: OrderingConstraint, current: OrderingConstraint,
+ poly: PolyType, idx: Int, entry: T)(implicit ctx: Context): OrderingConstraint = {
+ var es = entries(current, poly)
+ if (es != null && (es(idx) eq entry)) current
+ else {
+ val result =
+ if (es == null) {
+ es = Array.fill(poly.paramNames.length)(initial)
+ updateEntries(current, poly, es)
+ }
+ else if (es ne entries(prev, poly))
+ current // can re-use existing entries array.
+ else {
+ es = es.clone
+ updateEntries(current, poly, es)
+ }
+ es(idx) = entry
+ result
+ }
+ }
+
+ def update(prev: OrderingConstraint, current: OrderingConstraint,
+ param: PolyParam, entry: T)(implicit ctx: Context): OrderingConstraint =
+ update(prev, current, param.binder, param.paramNum, entry)
+
+ def map(prev: OrderingConstraint, current: OrderingConstraint,
+ poly: PolyType, idx: Int, f: T => T)(implicit ctx: Context): OrderingConstraint =
+ update(prev, current, poly, idx, f(apply(current, poly, idx)))
+
+ def map(prev: OrderingConstraint, current: OrderingConstraint,
+ param: PolyParam, f: T => T)(implicit ctx: Context): OrderingConstraint =
+ map(prev, current, param.binder, param.paramNum, f)
+ }
+
+ val boundsLens = new ConstraintLens[Type] {
+ def entries(c: OrderingConstraint, poly: PolyType): Array[Type] =
+ c.boundsMap(poly)
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[Type])(implicit ctx: Context): OrderingConstraint =
+ newConstraint(c.boundsMap.updated(poly, entries), c.lowerMap, c.upperMap)
+ def initial = NoType
+ }
+
+ val lowerLens = new ConstraintLens[List[PolyParam]] {
+ def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ c.lowerMap(poly)
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ newConstraint(c.boundsMap, c.lowerMap.updated(poly, entries), c.upperMap)
+ def initial = Nil
+ }
+
+ val upperLens = new ConstraintLens[List[PolyParam]] {
+ def entries(c: OrderingConstraint, poly: PolyType): Array[List[PolyParam]] =
+ c.upperMap(poly)
+ def updateEntries(c: OrderingConstraint, poly: PolyType, entries: Array[List[PolyParam]])(implicit ctx: Context): OrderingConstraint =
+ newConstraint(c.boundsMap, c.lowerMap, c.upperMap.updated(poly, entries))
+ def initial = Nil
+ }
+}
+
+import OrderingConstraint._
+
+/** Constraint over undetermined type parameters that keeps separate maps to
+ * reflect parameter orderings.
+ * @param boundsMap a map from PolyType to arrays.
+ * Each array contains twice the number of entries as there a type parameters
+ * in the PolyType. The first half of the array contains the type bounds that constrain the
+ * polytype's type parameters. The second half might contain type variables that
+ * track the corresponding parameters, or is left empty (filled with nulls).
+ * An instantiated type parameter is represented by having its instance type in
+ * the corresponding array entry. The dual use of arrays for poly params
+ * and typevars is to save space and hopefully gain some speed.
+ *
+ * @param lowerMap a map from PolyTypes to arrays. Each array entry corresponds
+ * to a parameter P of the polytype; it contains all constrained parameters
+ * Q that are known to be smaller than P, i.e. Q <: P.
+ * @param upperMap a map from PolyTypes to arrays. Each array entry corresponds
+ * to a parameter P of the polytype; it contains all constrained parameters
+ * Q that are known to be greater than P, i.e. P <: Q.
+ */
+class OrderingConstraint(private val boundsMap: ParamBounds,
+ private val lowerMap : ParamOrdering,
+ private val upperMap : ParamOrdering) extends Constraint {
+
+ type This = OrderingConstraint
+
+// ----------- Basic indices --------------------------------------------------
+
+ /** The number of type parameters in the given entry array */
+ private def paramCount(entries: Array[Type]) = entries.length >> 1
+
+ /** The type variable corresponding to parameter numbered `n`, null if none was created */
+ private def typeVar(entries: Array[Type], n: Int): Type =
+ entries(paramCount(entries) + n)
+
+ /** The `boundsMap` entry corresponding to `param` */
+ def entry(param: PolyParam): Type = {
+ val entries = boundsMap(param.binder)
+ if (entries == null) NoType
+ else entries(param.paramNum)
+ }
+
+// ----------- Contains tests --------------------------------------------------
+
+ def contains(pt: PolyType): Boolean = boundsMap(pt) != null
+
+ def contains(param: PolyParam): Boolean = {
+ val entries = boundsMap(param.binder)
+ entries != null && isBounds(entries(param.paramNum))
+ }
+
+ def contains(tvar: TypeVar): Boolean = {
+ val origin = tvar.origin
+ val entries = boundsMap(origin.binder)
+ val pnum = origin.paramNum
+ entries != null && isBounds(entries(pnum)) && (typeVar(entries, pnum) eq tvar)
+ }
+
+ private def isBounds(tp: Type) = tp.isInstanceOf[TypeBounds]
+
+// ---------- Dependency handling ----------------------------------------------
+
+ def lower(param: PolyParam): List[PolyParam] = lowerLens(this, param.binder, param.paramNum)
+ def upper(param: PolyParam): List[PolyParam] = upperLens(this, param.binder, param.paramNum)
+
+ def minLower(param: PolyParam): List[PolyParam] = {
+ val all = lower(param)
+ all.filterNot(p => all.exists(isLess(p, _)))
+ }
+
+ def minUpper(param: PolyParam): List[PolyParam] = {
+ val all = upper(param)
+ all.filterNot(p => all.exists(isLess(_, p)))
+ }
+
+ def exclusiveLower(param: PolyParam, butNot: PolyParam): List[PolyParam] =
+ lower(param).filterNot(isLess(_, butNot))
+
+ def exclusiveUpper(param: PolyParam, butNot: PolyParam): List[PolyParam] =
+ upper(param).filterNot(isLess(butNot, _))
+
+// ---------- Info related to PolyParams -------------------------------------------
+
+ def isLess(param1: PolyParam, param2: PolyParam): Boolean =
+ upper(param1).contains(param2)
+
+ def nonParamBounds(param: PolyParam): TypeBounds =
+ entry(param).asInstanceOf[TypeBounds]
+
+ def fullLowerBound(param: PolyParam)(implicit ctx: Context): Type =
+ (nonParamBounds(param).lo /: minLower(param))(_ | _)
+
+ def fullUpperBound(param: PolyParam)(implicit ctx: Context): Type =
+ (nonParamBounds(param).hi /: minUpper(param))(_ & _)
+
+ def fullBounds(param: PolyParam)(implicit ctx: Context): TypeBounds =
+ nonParamBounds(param).derivedTypeBounds(fullLowerBound(param), fullUpperBound(param))
+
+ def typeVarOfParam(param: PolyParam): Type = {
+ val entries = boundsMap(param.binder)
+ if (entries == null) NoType
+ else {
+ val tvar = typeVar(entries, param.paramNum)
+ if (tvar != null) tvar else NoType
+ }
+ }
+
+// ---------- Adding PolyTypes --------------------------------------------------
+
+ /** The list of parameters P such that, for a fresh type parameter Q:
+ *
+ * Q <: tp implies Q <: P and isUpper = true, or
+ * tp <: Q implies P <: Q and isUpper = false
+ */
+ def dependentParams(tp: Type, isUpper: Boolean): List[PolyParam] = tp match {
+ case param: PolyParam if contains(param) =>
+ param :: (if (isUpper) upper(param) else lower(param))
+ case tp: AndOrType =>
+ val ps1 = dependentParams(tp.tp1, isUpper)
+ val ps2 = dependentParams(tp.tp2, isUpper)
+ if (isUpper == tp.isAnd) ps1.union(ps2) else ps1.intersect(ps2)
+ case _ =>
+ Nil
+ }
+
+ /** The bound type `tp` without constrained parameters which are clearly
+ * dependent. A parameter in an upper bound is clearly dependent if it appears
+ * in a hole of a context H given by:
+ *
+ * H = []
+ * H & T
+ * T & H
+ *
+ * (the idea is that a parameter P in a H context is guaranteed to be a supertype of the
+ * bounded parameter.)
+ * Analogously, a parameter in a lower bound is clearly dependent if it appears
+ * in a hole of a context H given by:
+ *
+ * L = []
+ * L | T
+ * T | L
+ *
+ * "Clearly dependent" is not synonymous with "dependent" in the sense
+ * it is defined in `dependentParams`. Dependent parameters are handled
+ * in `updateEntry`. The idea of stripping off clearly dependent parameters
+ * and to handle them separately is for efficiency, so that type expressions
+ * used as bounds become smaller.
+ *
+ * @param isUpper If true, `bound` is an upper bound, else a lower bound.
+ */
+ private def stripParams(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
+ isUpper: Boolean)(implicit ctx: Context): Type = tp match {
+ case param: PolyParam if contains(param) =>
+ if (!paramBuf.contains(param)) paramBuf += param
+ NoType
+ case tp: AndOrType if isUpper == tp.isAnd =>
+ val tp1 = stripParams(tp.tp1, paramBuf, isUpper)
+ val tp2 = stripParams(tp.tp2, paramBuf, isUpper)
+ if (tp1.exists)
+ if (tp2.exists) tp.derivedAndOrType(tp1, tp2)
+ else tp1
+ else tp2
+ case _ =>
+ tp
+ }
+
+ /** The bound type `tp` without clearly dependent parameters.
+ * A top or bottom type if type consists only of dependent parameters.
+ * @param isUpper If true, `bound` is an upper bound, else a lower bound.
+ */
+ private def normalizedType(tp: Type, paramBuf: mutable.ListBuffer[PolyParam],
+ isUpper: Boolean)(implicit ctx: Context): Type =
+ stripParams(tp, paramBuf, isUpper)
+ .orElse(if (isUpper) defn.AnyType else defn.NothingType)
+
+ def add(poly: PolyType, tvars: List[TypeVar])(implicit ctx: Context): This = {
+ assert(!contains(poly))
+ val nparams = poly.paramNames.length
+ val entries1 = new Array[Type](nparams * 2)
+ poly.paramBounds.copyToArray(entries1, 0)
+ tvars.copyToArray(entries1, nparams)
+ newConstraint(boundsMap.updated(poly, entries1), lowerMap, upperMap).init(poly)
+ }
+
+ /** Split dependent parameters off the bounds for parameters in `poly`.
+ * Update all bounds to be normalized and update ordering to account for
+ * dependent parameters.
+ */
+ private def init(poly: PolyType)(implicit ctx: Context): This = {
+ var current = this
+ val loBuf, hiBuf = new mutable.ListBuffer[PolyParam]
+ var i = 0
+ while (i < poly.paramNames.length) {
+ val param = PolyParam(poly, i)
+ val bounds = nonParamBounds(param)
+ val lo = normalizedType(bounds.lo, loBuf, isUpper = false)
+ val hi = normalizedType(bounds.hi, hiBuf, isUpper = true)
+ current = updateEntry(current, param, bounds.derivedTypeBounds(lo, hi))
+ current = (current /: loBuf)(order(_, _, param))
+ current = (current /: hiBuf)(order(_, param, _))
+ loBuf.clear()
+ hiBuf.clear()
+ i += 1
+ }
+ if (Config.checkConstraintsNonCyclic) checkNonCyclic()
+ current
+ }
+
+// ---------- Updates ------------------------------------------------------------
+
+ /** Add the fact `param1 <: param2` to the constraint `current` and propagate
+ * `<:<` relationships between parameters ("edges") but not bounds.
+ */
+ private def order(current: This, param1: PolyParam, param2: PolyParam)(implicit ctx: Context): This =
+ if (param1 == param2 || current.isLess(param1, param2)) this
+ else {
+ assert(contains(param1))
+ assert(contains(param2))
+ val newUpper = param2 :: exclusiveUpper(param2, param1)
+ val newLower = param1 :: exclusiveLower(param1, param2)
+ val current1 = (current /: newLower)(upperLens.map(this, _, _, newUpper ::: _))
+ val current2 = (current1 /: newUpper)(lowerLens.map(this, _, _, newLower ::: _))
+ current2
+ }
+
+ def addLess(param1: PolyParam, param2: PolyParam)(implicit ctx: Context): This =
+ order(this, param1, param2)
+
+ def updateEntry(current: This, param: PolyParam, tp: Type)(implicit ctx: Context): This = {
+ var current1 = boundsLens.update(this, current, param, tp)
+ tp match {
+ case TypeBounds(lo, hi) =>
+ for (p <- dependentParams(lo, isUpper = false))
+ current1 = order(current1, p, param)
+ for (p <- dependentParams(hi, isUpper = true))
+ current1 = order(current1, param, p)
+ case _ =>
+ }
+ current1
+ }
+
+ def updateEntry(param: PolyParam, tp: Type)(implicit ctx: Context): This =
+ updateEntry(this, param, tp)
+
+ def unify(p1: PolyParam, p2: PolyParam)(implicit ctx: Context): This = {
+ val p1Bounds = (nonParamBounds(p1) & nonParamBounds(p2)).substParam(p2, p1)
+ updateEntry(p1, p1Bounds).replace(p2, p1)
+ }
+
+ def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This = {
+ val oldBounds @ TypeBounds(lo, hi) = nonParamBounds(param)
+ val newBounds =
+ if (isUpper) oldBounds.derivedTypeBounds(lo, hi & bound)
+ else oldBounds.derivedTypeBounds(lo | bound, hi)
+ updateEntry(param, newBounds)
+ }
+
+// ---------- Removals ------------------------------------------------------------
+
+ /** A new constraint which is derived from this constraint by removing
+ * the type parameter `param` from the domain and replacing all top-level occurrences
+ * of the parameter elsewhere in the constraint by type `tp`, or a conservative
+ * approximation of it if that is needed to avoid cycles.
+ * Occurrences nested inside a refinement or prefix are not affected.
+ *
+ * The reason we need to substitute top-level occurrences of the parameter
+ * is to deal with situations like the following. Say we have in the constraint
+ *
+ * P <: Q & String
+ * Q
+ *
+ * and we replace Q with P. Then substitution gives
+ *
+ * P <: P & String
+ *
+ * this would be a cyclic constraint is therefore changed by `normalize` and
+ * `recombine` below to
+ *
+ * P <: String
+ *
+ * approximating the RHS occurrence of P with Any. Without the substitution we
+ * would not find out where we need to approximate. Occurrences of parameters
+ * that are not top-level are not affected.
+ */
+ def replace(param: PolyParam, tp: Type)(implicit ctx: Context): OrderingConstraint = {
+ val replacement = tp.dealias.stripTypeVar
+ if (param == replacement) this
+ else {
+ assert(replacement.isValueTypeOrLambda)
+ val poly = param.binder
+ val idx = param.paramNum
+
+ def removeParam(ps: List[PolyParam]) =
+ ps.filterNot(p => p.binder.eq(poly) && p.paramNum == idx)
+
+ def replaceParam(tp: Type, atPoly: PolyType, atIdx: Int): Type = tp match {
+ case bounds @ TypeBounds(lo, hi) =>
+
+ def recombine(andor: AndOrType, op: (Type, Boolean) => Type, isUpper: Boolean): Type = {
+ val tp1 = op(andor.tp1, isUpper)
+ val tp2 = op(andor.tp2, isUpper)
+ if ((tp1 eq andor.tp1) && (tp2 eq andor.tp2)) andor
+ else if (andor.isAnd) tp1 & tp2
+ else tp1 | tp2
+ }
+
+ def normalize(tp: Type, isUpper: Boolean): Type = tp match {
+ case p: PolyParam if p.binder == atPoly && p.paramNum == atIdx =>
+ if (isUpper) defn.AnyType else defn.NothingType
+ case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, normalize, isUpper)
+ case _ => tp
+ }
+
+ def replaceIn(tp: Type, isUpper: Boolean): Type = tp match {
+ case `param` => normalize(replacement, isUpper)
+ case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, replaceIn, isUpper)
+ case _ => tp.substParam(param, replacement)
+ }
+
+ bounds.derivedTypeBounds(replaceIn(lo, isUpper = false), replaceIn(hi, isUpper = true))
+ case _ =>
+ tp.substParam(param, replacement)
+ }
+
+ var current =
+ if (isRemovable(poly)) remove(poly) else updateEntry(param, replacement)
+ current.foreachParam {(p, i) =>
+ current = boundsLens.map(this, current, p, i, replaceParam(_, p, i))
+ current = lowerLens.map(this, current, p, i, removeParam)
+ current = upperLens.map(this, current, p, i, removeParam)
+ }
+ current
+ }
+ }
+
+ def remove(pt: PolyType)(implicit ctx: Context): This = {
+ def removeFromOrdering(po: ParamOrdering) = {
+ def removeFromBoundss(key: PolyType, bndss: Array[List[PolyParam]]): Array[List[PolyParam]] = {
+ val bndss1 = bndss.map(_.filterConserve(_.binder ne pt))
+ if (bndss.corresponds(bndss1)(_ eq _)) bndss else bndss1
+ }
+ po.remove(pt).mapValuesNow(removeFromBoundss)
+ }
+ newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap))
+ }
+
+ def isRemovable(pt: PolyType): Boolean = {
+ val entries = boundsMap(pt)
+ @tailrec def allRemovable(last: Int): Boolean =
+ if (last < 0) true
+ else typeVar(entries, last) match {
+ case tv: TypeVar => tv.inst.exists && allRemovable(last - 1)
+ case _ => false
+ }
+ allRemovable(paramCount(entries) - 1)
+ }
+
+// ---------- Exploration --------------------------------------------------------
+
+ def domainPolys: List[PolyType] = boundsMap.keys
+
+ def domainParams: List[PolyParam] =
+ for {
+ (poly, entries) <- boundsMap.toList
+ n <- 0 until paramCount(entries)
+ if entries(n).exists
+ } yield PolyParam(poly, n)
+
+ def forallParams(p: PolyParam => Boolean): Boolean = {
+ boundsMap.foreachBinding { (poly, entries) =>
+ for (i <- 0 until paramCount(entries))
+ if (isBounds(entries(i)) && !p(PolyParam(poly, i))) return false
+ }
+ true
+ }
+
+ def foreachParam(p: (PolyType, Int) => Unit): Unit =
+ boundsMap.foreachBinding { (poly, entries) =>
+ 0.until(poly.paramNames.length).foreach(p(poly, _))
+ }
+
+ def foreachTypeVar(op: TypeVar => Unit): Unit =
+ boundsMap.foreachBinding { (poly, entries) =>
+ for (i <- 0 until paramCount(entries)) {
+ typeVar(entries, i) match {
+ case tv: TypeVar if !tv.inst.exists => op(tv)
+ case _ =>
+ }
+ }
+ }
+
+ def & (other: Constraint)(implicit ctx: Context) = {
+ def merge[T](m1: ArrayValuedMap[T], m2: ArrayValuedMap[T], join: (T, T) => T): ArrayValuedMap[T] = {
+ var merged = m1
+ def mergeArrays(xs1: Array[T], xs2: Array[T]) = {
+ val xs = xs1.clone
+ for (i <- xs.indices) xs(i) = join(xs1(i), xs2(i))
+ xs
+ }
+ m2.foreachBinding { (poly, xs2) =>
+ merged = merged.updated(poly,
+ if (m1.contains(poly)) mergeArrays(m1(poly), xs2) else xs2)
+ }
+ merged
+ }
+
+ def mergeParams(ps1: List[PolyParam], ps2: List[PolyParam]) =
+ (ps1 /: ps2)((ps1, p2) => if (ps1.contains(p2)) ps1 else p2 :: ps1)
+
+ def mergeEntries(e1: Type, e2: Type): Type = e1 match {
+ case e1: TypeBounds =>
+ e2 match {
+ case e2: TypeBounds => e1 & e2
+ case _ if e1 contains e2 => e2
+ case _ => mergeError
+ }
+ case tv1: TypeVar =>
+ e2 match {
+ case tv2: TypeVar if tv1.instanceOpt eq tv2.instanceOpt => e1
+ case _ => mergeError
+ }
+ case _ if e1 eq e2 => e1
+ case _ => mergeError
+ }
+
+ def mergeError = throw new AssertionError(i"cannot merge $this with $other")
+
+ val that = other.asInstanceOf[OrderingConstraint]
+ new OrderingConstraint(
+ merge(this.boundsMap, that.boundsMap, mergeEntries),
+ merge(this.lowerMap, that.lowerMap, mergeParams),
+ merge(this.upperMap, that.upperMap, mergeParams))
+ }
+
+ override def checkClosed()(implicit ctx: Context): Unit = {
+ def isFreePolyParam(tp: Type) = tp match {
+ case PolyParam(binder: PolyType, _) => !contains(binder)
+ case _ => false
+ }
+ def checkClosedType(tp: Type, where: String) =
+ if (tp != null)
+ assert(!tp.existsPart(isFreePolyParam), i"unclosed constraint: $this refers to $tp in $where")
+ boundsMap.foreachBinding((_, tps) => tps.foreach(checkClosedType(_, "bounds")))
+ lowerMap.foreachBinding((_, paramss) => paramss.foreach(_.foreach(checkClosedType(_, "lower"))))
+ upperMap.foreachBinding((_, paramss) => paramss.foreach(_.foreach(checkClosedType(_, "upper"))))
+ }
+
+ private var myUninstVars: mutable.ArrayBuffer[TypeVar] = _
+
+ /** The uninstantiated typevars of this constraint */
+ def uninstVars: collection.Seq[TypeVar] = {
+ if (myUninstVars == null) {
+ myUninstVars = new mutable.ArrayBuffer[TypeVar]
+ boundsMap.foreachBinding { (poly, entries) =>
+ for (i <- 0 until paramCount(entries)) {
+ typeVar(entries, i) match {
+ case tv: TypeVar if !tv.inst.exists && isBounds(entries(i)) => myUninstVars += tv
+ case _ =>
+ }
+ }
+ }
+ }
+ myUninstVars
+ }
+
+// ---------- Cyclic checking -------------------------------------------
+
+ def checkNonCyclic()(implicit ctx: Context): Unit =
+ domainParams.foreach(checkNonCyclic)
+
+ private def checkNonCyclic(param: PolyParam)(implicit ctx: Context): Unit =
+ assert(!isLess(param, param), i"cyclic constraint involving $param in $this")
+
+// ---------- toText -----------------------------------------------------
+
+ override def toText(printer: Printer): Text = {
+ def entryText(tp: Type) = tp match {
+ case tp: TypeBounds =>
+ tp.toText(printer)
+ case _ =>
+ " := " ~ tp.toText(printer)
+ }
+ val indent = 3
+ val header: Text = "Constraint("
+ val uninstVarsText = " uninstVars = " ~
+ Text(uninstVars map (_.toText(printer)), ", ") ~ ";"
+ val constrainedText =
+ " constrained types = " ~ Text(domainPolys map (_.toText(printer)), ", ")
+ val boundsText =
+ " bounds = " ~ {
+ val assocs =
+ for (param <- domainParams)
+ yield (" " * indent) ~ param.toText(printer) ~ entryText(entry(param))
+ Text(assocs, "\n")
+ }
+ val orderingText =
+ " ordering = " ~ {
+ val deps =
+ for {
+ param <- domainParams
+ ups = minUpper(param)
+ if ups.nonEmpty
+ }
+ yield
+ (" " * indent) ~ param.toText(printer) ~ " <: " ~
+ Text(ups.map(_.toText(printer)), ", ")
+ Text(deps, "\n")
+ }
+ Text.lines(List(header, uninstVarsText, constrainedText, boundsText, orderingText, ")"))
+ }
+
+ override def toString: String = {
+ def entryText(tp: Type): String = tp match {
+ case tp: TypeBounds => tp.toString
+ case _ =>" := " + tp
+ }
+ val constrainedText =
+ " constrained types = " + domainPolys.mkString("\n")
+ val boundsText =
+ " bounds = " + {
+ val assocs =
+ for (param <- domainParams)
+ yield
+ param.binder.paramNames(param.paramNum) + ": " + entryText(entry(param))
+ assocs.mkString("\n")
+ }
+ constrainedText + "\n" + boundsText
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala
new file mode 100644
index 000000000..6efadab7f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Periods.scala
@@ -0,0 +1,159 @@
+package dotty.tools.dotc.core
+
+import Contexts._
+import dotty.tools.dotc.util.DotClass
+
+/** Periods are the central "clock" of the compiler.
+ * A period consists of a run id and a phase id.
+ * run ids represent compiler runs
+ * phase ids represent compiler phases
+ */
+abstract class Periods extends DotClass { self: Context =>
+ import Periods._
+
+ /** The current phase identifier */
+ def phaseId: Int = period.phaseId
+
+ /** The current run identifier */
+ def runId: Int = period.runId
+
+ /** Execute `op` at given period */
+ def atPeriod[T](pd: Period)(op: Context => T): T =
+ op(ctx.fresh.setPeriod(pd))
+
+ /** Execute `op` at given phase id */
+ def atPhase[T](pid: PhaseId)(op: Context => T): T =
+ op(ctx.withPhase(pid))
+
+ /** The period containing the current period where denotations do not change.
+ * We compute this by taking as first phase the first phase less or equal to
+ * the current phase that has the same "nextTransformerId". As last phase
+ * we take the next transformer id following the current phase.
+ */
+ def stablePeriod = {
+ var first = phaseId
+ val nxTrans = ctx.base.nextDenotTransformerId(first)
+ while (first - 1 > NoPhaseId && (ctx.base.nextDenotTransformerId(first - 1) == nxTrans)) {
+ first -= 1
+ }
+ Period(runId, first, nxTrans)
+ }
+}
+
+object Periods {
+
+ /** A period is a contiguous sequence of phase ids in some run.
+ * It is coded as follows:
+ *
+ * sign, always 0 1 bit
+ * runid 19 bits
+ * last phase id: 6 bits
+ * #phases before last: 6 bits
+ *
+ * // Dmitry: sign == 0 isn't actually always true, in some cases phaseId == -1 is used for shifts, that easily creates code < 0
+ */
+ class Period(val code: Int) extends AnyVal {
+
+ /** The run identifier of this period. */
+ def runId: RunId = code >>> (PhaseWidth * 2)
+
+ /** The phase identifier of this single-phase period. */
+ def phaseId: PhaseId = (code >>> PhaseWidth) & PhaseMask
+
+ /** The last phase of this period */
+ def lastPhaseId: PhaseId =
+ (code >>> PhaseWidth) & PhaseMask
+
+ /** The first phase of this period */
+ def firstPhaseId = lastPhaseId - (code & PhaseMask)
+
+ def containsPhaseId(id: PhaseId) = firstPhaseId <= id && id <= lastPhaseId
+
+ /** Does this period contain given period? */
+ def contains(that: Period): Boolean = {
+ // Let this = (r1, l1, d1), that = (r2, l2, d2)
+ // where r = runid, l = last phase, d = duration - 1
+ // Then seen as intervals:
+ //
+ // this = r1 / (l1 - d1) .. l1
+ // that = r2 / (l2 - d2) .. l2
+ //
+ // Let's compute:
+ //
+ // lastDiff = X * 2^5 + (l1 - l2) mod 2^5
+ // where X >= 0, X == 0 iff r1 == r2 & l1 - l2 >= 0
+ // result = lastDiff + d2 <= d1
+ // We have:
+ // lastDiff + d2 <= d1
+ // iff X == 0 && l1 - l2 >= 0 && l1 - l2 + d2 <= d1
+ // iff r1 == r2 & l1 >= l2 && l1 - d1 <= l2 - d2
+ // q.e.d
+ val lastDiff = (code - that.code) >>> PhaseWidth
+ lastDiff + (that.code & PhaseMask ) <= (this.code & PhaseMask)
+ }
+
+ /** Does this period overlap with given period? */
+ def overlaps(that: Period): Boolean =
+ this.runId == that.runId &&
+ this.firstPhaseId <= that.lastPhaseId &&
+ that.firstPhaseId <= this.lastPhaseId
+
+ /** The intersection of two periods */
+ def & (that: Period): Period =
+ if (this overlaps that)
+ Period(
+ this.runId,
+ this.firstPhaseId max that.firstPhaseId,
+ this.lastPhaseId min that.lastPhaseId)
+ else
+ Nowhere
+
+ def | (that: Period): Period =
+ Period(this.runId,
+ this.firstPhaseId min that.firstPhaseId,
+ this.lastPhaseId max that.lastPhaseId)
+
+ override def toString = s"Period($firstPhaseId..$lastPhaseId, run = $runId)"
+ }
+
+ object Period {
+
+ /** The single-phase period consisting of given run id and phase id */
+ def apply(rid: RunId, pid: PhaseId): Period = {
+ new Period(((rid << PhaseWidth) | pid) << PhaseWidth)
+ }
+
+ /** The period consisting of given run id, and lo/hi phase ids */
+ def apply(rid: RunId, loPid: PhaseId, hiPid: PhaseId): Period = {
+ new Period(((rid << PhaseWidth) | hiPid) << PhaseWidth | (hiPid - loPid))
+ }
+
+ /** The interval consisting of all periods of given run id */
+ def allInRun(rid: RunId) = {
+ apply(rid, 0, PhaseMask)
+ }
+ }
+
+ final val Nowhere = new Period(0)
+
+ final val InitialPeriod = Period(InitialRunId, FirstPhaseId)
+
+ final val InvalidPeriod = Period(NoRunId, NoPhaseId)
+
+ /** An ordinal number for compiler runs. First run has number 1. */
+ type RunId = Int
+ final val NoRunId = 0
+ final val InitialRunId = 1
+ final val RunWidth = java.lang.Integer.SIZE - PhaseWidth * 2 - 1/* sign */
+ final val MaxPossibleRunId = (1 << RunWidth) - 1
+
+ /** An ordinal number for phases. First phase has number 1. */
+ type PhaseId = Int
+ final val NoPhaseId = 0
+ final val FirstPhaseId = 1
+
+ /** The number of bits needed to encode a phase identifier. */
+ final val PhaseWidth = 6
+ final val PhaseMask = (1 << PhaseWidth) - 1
+ final val MaxPossiblePhaseId = PhaseMask
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala
new file mode 100644
index 000000000..222e2235d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Phases.scala
@@ -0,0 +1,377 @@
+package dotty.tools.dotc
+package core
+
+import Periods._
+import Contexts._
+import dotty.tools.backend.jvm.{LabelDefs, GenBCode}
+import dotty.tools.dotc.core.Symbols.ClassSymbol
+import util.DotClass
+import DenotTransformers._
+import Denotations._
+import Decorators._
+import config.Printers.config
+import scala.collection.mutable.{ListBuffer, ArrayBuffer}
+import dotty.tools.dotc.transform.TreeTransforms.{TreeTransformer, MiniPhase, TreeTransform}
+import dotty.tools.dotc.transform._
+import Periods._
+import typer.{FrontEnd, RefChecks}
+import ast.tpd
+
+trait Phases {
+ self: Context =>
+
+ import Phases._
+
+ def phase: Phase = base.phases(period.firstPhaseId)
+
+ def phasesStack: List[Phase] =
+ if ((this eq NoContext) || !phase.exists) Nil
+ else phase :: outersIterator.dropWhile(_.phase == phase).next.phasesStack
+
+ /** Execute `op` at given phase */
+ def atPhase[T](phase: Phase)(op: Context => T): T =
+ atPhase(phase.id)(op)
+
+ def atNextPhase[T](op: Context => T): T = atPhase(phase.next)(op)
+
+ def atPhaseNotLaterThan[T](limit: Phase)(op: Context => T): T =
+ if (!limit.exists || phase <= limit) op(this) else atPhase(limit)(op)
+
+ def atPhaseNotLaterThanTyper[T](op: Context => T): T =
+ atPhaseNotLaterThan(base.typerPhase)(op)
+
+ def isAfterTyper: Boolean = base.isAfterTyper(phase)
+}
+
+object Phases {
+
+ trait PhasesBase {
+ this: ContextBase =>
+
+ // drop NoPhase at beginning
+ def allPhases = (if (squashedPhases.nonEmpty) squashedPhases else phases).tail
+
+ object NoPhase extends Phase {
+ override def exists = false
+ def phaseName = "<no phase>"
+ def run(implicit ctx: Context): Unit = unsupported("run")
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = unsupported("transform")
+ }
+
+ object SomePhase extends Phase {
+ def phaseName = "<some phase>"
+ def run(implicit ctx: Context): Unit = unsupported("run")
+ }
+
+ /** A sentinel transformer object */
+ class TerminalPhase extends DenotTransformer {
+ def phaseName = "terminal"
+ def run(implicit ctx: Context): Unit = unsupported("run")
+ def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation =
+ unsupported("transform")
+ override def lastPhaseId(implicit ctx: Context) = id
+ }
+
+ def phasePlan = this.phasesPlan
+ def setPhasePlan(phasess: List[List[Phase]]) = this.phasesPlan = phasess
+
+ /** Squash TreeTransform's beloning to same sublist to a single TreeTransformer
+ * Each TreeTransform gets own period,
+ * whereas a combined TreeTransformer gets period equal to union of periods of it's TreeTransforms
+ */
+ def squashPhases(phasess: List[List[Phase]],
+ phasesToSkip: List[String], stopBeforePhases: List[String], stopAfterPhases: List[String], YCheckAfter: List[String]): List[Phase] = {
+ val squashedPhases = ListBuffer[Phase]()
+ var prevPhases: Set[Class[_ <: Phase]] = Set.empty
+ val YCheckAll = YCheckAfter.contains("all")
+
+ var stop = false
+ val filteredPhases = phasess.map(_.filter { p =>
+ val pstop = stop
+ stop = stop | stopBeforePhases.contains(p.phaseName) | stopAfterPhases.contains(p.phaseName)
+ !(pstop || stopBeforePhases.contains(p.phaseName) || phasesToSkip.contains(p.phaseName))
+ })
+
+ var i = 0
+
+ while (i < filteredPhases.length) {
+ if (filteredPhases(i).nonEmpty) { //could be empty due to filtering
+ val filteredPhaseBlock = filteredPhases(i)
+ val phaseToAdd =
+ if (filteredPhaseBlock.length > 1) {
+ val phasesInBlock: Set[String] = filteredPhaseBlock.map(_.phaseName).toSet
+ for (phase <- filteredPhaseBlock) {
+ phase match {
+ case p: MiniPhase =>
+ val unmetRequirements = p.runsAfterGroupsOf &~ prevPhases
+ assert(unmetRequirements.isEmpty,
+ s"${phase.phaseName} requires ${unmetRequirements.mkString(", ")} to be in different TreeTransformer")
+
+ case _ =>
+ assert(false, s"Only tree transforms can be squashed, ${phase.phaseName} can not be squashed")
+ }
+ }
+ val block = new TreeTransformer {
+ override def phaseName: String = miniPhases.map(_.phaseName).mkString("TreeTransform:{", ", ", "}")
+ override def miniPhases: Array[MiniPhase] = filteredPhaseBlock.asInstanceOf[List[MiniPhase]].toArray
+ }
+ prevPhases ++= filteredPhaseBlock.map(_.getClazz)
+ block
+ } else { // block of a single phase, no squashing
+ val phase = filteredPhaseBlock.head
+ prevPhases += phase.getClazz
+ phase
+ }
+ squashedPhases += phaseToAdd
+ val shouldAddYCheck = YCheckAfter.containsPhase(phaseToAdd) || YCheckAll
+ if (shouldAddYCheck) {
+ val checker = new TreeChecker
+ squashedPhases += checker
+ }
+ }
+
+ i += 1
+ }
+ squashedPhases.toList
+ }
+
+ /** Use the following phases in the order they are given.
+ * The list should never contain NoPhase.
+ * if squashing is enabled, phases in same subgroup will be squashed to single phase.
+ */
+ def usePhases(phasess: List[Phase], squash: Boolean = true) = {
+
+ val flatPhases = collection.mutable.ListBuffer[Phase]()
+
+ phasess.foreach(p => p match {
+ case t: TreeTransformer => flatPhases ++= t.miniPhases
+ case _ => flatPhases += p
+ })
+
+ phases = (NoPhase :: flatPhases.toList ::: new TerminalPhase :: Nil).toArray
+ var phasesAfter:Set[Class[_ <: Phase]] = Set.empty
+ nextDenotTransformerId = new Array[Int](phases.length)
+ denotTransformers = new Array[DenotTransformer](phases.length)
+
+ var phaseId = 0
+ def nextPhaseId = {
+ phaseId += 1
+ phaseId // starting from 1 as NoPhase is 0
+ }
+
+ def checkRequirements(p: Phase) = {
+ val unmetPrecedeRequirements = p.runsAfter -- phasesAfter
+ assert(unmetPrecedeRequirements.isEmpty,
+ s"phase ${p} has unmet requirement: ${unmetPrecedeRequirements.mkString(", ")} should precede this phase")
+ phasesAfter += p.getClazz
+
+ }
+ var i = 0
+
+ while (i < phasess.length) {
+ val phase = phasess(i)
+ phase match {
+ case t: TreeTransformer =>
+ val miniPhases = t.miniPhases
+ miniPhases.foreach{ phase =>
+ checkRequirements(phase)
+ phase.init(this, nextPhaseId)}
+ t.init(this, miniPhases.head.id, miniPhases.last.id)
+ case _ =>
+ phase.init(this, nextPhaseId)
+ checkRequirements(phase)
+ }
+
+ i += 1
+ }
+
+ phases.last.init(this, nextPhaseId) // init terminal phase
+
+ i = phases.length
+ var lastTransformerId = i
+ while (i > 0) {
+ i -= 1
+ val phase = phases(i)
+ phase match {
+ case transformer: DenotTransformer =>
+ lastTransformerId = i
+ denotTransformers(i) = transformer
+ case _ =>
+ }
+ nextDenotTransformerId(i) = lastTransformerId
+ }
+
+ if (squash) {
+ this.squashedPhases = (NoPhase :: phasess).toArray
+ } else {
+ this.squashedPhases = this.phases
+ }
+
+ config.println(s"Phases = ${phases.deep}")
+ config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.deep}")
+ }
+
+ def phaseOfClass(pclass: Class[_]) = phases.find(pclass.isInstance).getOrElse(NoPhase)
+
+ private val cachedPhases = collection.mutable.Set[PhaseCache]()
+ private def cleanPhaseCache = cachedPhases.foreach(_.myPhase = NoPhase)
+
+ /** A cache to compute the phase with given name, which
+ * stores the phase as soon as phaseNamed returns something
+ * different from NoPhase.
+ */
+ private class PhaseCache(pclass: Class[_ <: Phase]) {
+ var myPhase: Phase = NoPhase
+ def phase = {
+ if (myPhase eq NoPhase) myPhase = phaseOfClass(pclass)
+ myPhase
+ }
+ cachedPhases += this
+ }
+
+ private val typerCache = new PhaseCache(classOf[FrontEnd])
+ private val picklerCache = new PhaseCache(classOf[Pickler])
+
+ private val refChecksCache = new PhaseCache(classOf[RefChecks])
+ private val elimRepeatedCache = new PhaseCache(classOf[ElimRepeated])
+ private val extensionMethodsCache = new PhaseCache(classOf[ExtensionMethods])
+ private val erasureCache = new PhaseCache(classOf[Erasure])
+ private val elimErasedValueTypeCache = new PhaseCache(classOf[ElimErasedValueType])
+ private val patmatCache = new PhaseCache(classOf[PatternMatcher])
+ private val lambdaLiftCache = new PhaseCache(classOf[LambdaLift])
+ private val flattenCache = new PhaseCache(classOf[Flatten])
+ private val explicitOuterCache = new PhaseCache(classOf[ExplicitOuter])
+ private val gettersCache = new PhaseCache(classOf[Getters])
+ private val genBCodeCache = new PhaseCache(classOf[GenBCode])
+
+ def typerPhase = typerCache.phase
+ def picklerPhase = picklerCache.phase
+ def refchecksPhase = refChecksCache.phase
+ def elimRepeatedPhase = elimRepeatedCache.phase
+ def extensionMethodsPhase = extensionMethodsCache.phase
+ def erasurePhase = erasureCache.phase
+ def elimErasedValueTypePhase = elimErasedValueTypeCache.phase
+ def patmatPhase = patmatCache.phase
+ def lambdaLiftPhase = lambdaLiftCache.phase
+ def flattenPhase = flattenCache.phase
+ def explicitOuterPhase = explicitOuterCache.phase
+ def gettersPhase = gettersCache.phase
+ def genBCodePhase = genBCodeCache.phase
+
+ def isAfterTyper(phase: Phase): Boolean = phase.id > typerPhase.id
+ }
+
+ trait Phase extends DotClass {
+
+ def phaseName: String
+
+ /** List of names of phases that should precede this phase */
+ def runsAfter: Set[Class[_ <: Phase]] = Set.empty
+
+ def run(implicit ctx: Context): Unit
+
+ def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] =
+ units.map { unit =>
+ val unitCtx = ctx.fresh.setPhase(this.start).setCompilationUnit(unit)
+ run(unitCtx)
+ unitCtx.compilationUnit
+ }
+
+ def description: String = phaseName
+
+ /** Output should be checkable by TreeChecker */
+ def isCheckable: Boolean = true
+
+ /** Check what the phase achieves, to be called at any point after it is finished.
+ */
+ def checkPostCondition(tree: tpd.Tree)(implicit ctx: Context): Unit = ()
+
+ /** If set, allow missing or superfluous arguments in applications
+ * and type applications.
+ */
+ def relaxedTyping: Boolean = false
+
+ /** Is this phase the standard typerphase? True for FrontEnd, but
+ * not for other first phases (such as FromTasty). The predicate
+ * is tested in some places that perform checks and corrections. It's
+ * different from isAfterTyper (and cheaper to test).
+ */
+ def isTyper = false
+
+ def exists: Boolean = true
+
+ private var myPeriod: Period = Periods.InvalidPeriod
+ private var myBase: ContextBase = null
+ private var myErasedTypes = false
+ private var myFlatClasses = false
+ private var myRefChecked = false
+ private var mySymbolicRefs = false
+ private var myLabelsReordered = false
+
+
+ /** The sequence position of this phase in the given context where 0
+ * is reserved for NoPhase and the first real phase is at position 1.
+ * -1 if the phase is not installed in the context.
+ */
+ def id = myPeriod.firstPhaseId
+
+ def period = myPeriod
+ def start = myPeriod.firstPhaseId
+ def end = myPeriod.lastPhaseId
+
+ final def erasedTypes = myErasedTypes // Phase is after erasure
+ final def flatClasses = myFlatClasses // Phase is after flatten
+ final def refChecked = myRefChecked // Phase is after RefChecks
+ final def symbolicRefs = mySymbolicRefs // Phase is after ResolveSuper, newly generated TermRefs should be symbolic
+ final def labelsReordered = myLabelsReordered // Phase is after LabelDefs, labels are flattened and owner chains don't mirror this
+
+ protected[Phases] def init(base: ContextBase, start: Int, end:Int): Unit = {
+ if (start >= FirstPhaseId)
+ assert(myPeriod == Periods.InvalidPeriod, s"phase $this has already been used once; cannot be reused")
+ myBase = base
+ myPeriod = Period(NoRunId, start, end)
+ myErasedTypes = prev.getClass == classOf[Erasure] || prev.erasedTypes
+ myFlatClasses = prev.getClass == classOf[Flatten] || prev.flatClasses
+ myRefChecked = prev.getClass == classOf[RefChecks] || prev.refChecked
+ mySymbolicRefs = prev.getClass == classOf[ResolveSuper] || prev.symbolicRefs
+ myLabelsReordered = prev.getClass == classOf[LabelDefs] || prev.labelsReordered
+ }
+
+ protected[Phases] def init(base: ContextBase, id: Int): Unit = init(base, id, id)
+
+ final def <=(that: Phase) =
+ exists && id <= that.id
+
+ final def prev: Phase =
+ if (id > FirstPhaseId) myBase.phases(start - 1) else myBase.NoPhase
+
+ final def next: Phase =
+ if (hasNext) myBase.phases(end + 1) else myBase.NoPhase
+
+ final def hasNext = start >= FirstPhaseId && end + 1 < myBase.phases.length
+
+ final def iterator =
+ Iterator.iterate(this)(_.next) takeWhile (_.hasNext)
+
+ override def toString = phaseName
+ }
+
+ trait NeedsCompanions {
+ def isCompanionNeeded(cls: ClassSymbol)(implicit ctx: Context): Boolean
+ }
+
+ /** Replace all instances of `oldPhaseClass` in `current` phases
+ * by the result of `newPhases` applied to the old phase.
+ */
+ def replace(oldPhaseClass: Class[_ <: Phase], newPhases: Phase => List[Phase], current: List[List[Phase]]): List[List[Phase]] =
+ current.map(_.flatMap(phase =>
+ if (oldPhaseClass.isInstance(phase)) newPhases(phase) else phase :: Nil))
+
+ /** Dotty deviation: getClass yields Class[_], instead of [Class <: <type of receiver>].
+ * We can get back the old behavior using this decorator. We should also use the same
+ * trick for standard getClass.
+ */
+ private implicit class getClassDeco[T](val x: T) extends AnyVal {
+ def getClazz: Class[_ <: T] = x.getClass.asInstanceOf[Class[_ <: T]]
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala
new file mode 100644
index 000000000..3daa8117e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala
@@ -0,0 +1,437 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package dotty.tools.dotc
+package core
+
+import Symbols._
+import Types.{TermRef, NoPrefix}
+import Flags.Implicit
+import Names._
+import Periods._
+import Decorators._
+import Contexts._
+import Denotations._
+import SymDenotations._
+import printing.Texts._
+import printing.Printer
+import util.common._
+import util.DotClass
+import SymDenotations.NoDenotation
+import collection.mutable
+
+object Scopes {
+
+ /** Maximal fill factor of hash table */
+ private final val FillFactor = 2.0/3.0
+
+ /** A hashtable is created once current size exceeds MinHash * FillFactor
+ * The initial hash table has twice that size (i.e 16).
+ * This value must be a power of two, so that the index of an element can
+ * be computed as element.hashCode & (hashTable.length - 1)
+ */
+ private final val MinHash = 8
+
+ /** The maximal permissible number of recursions when creating
+ * a hashtable
+ */
+ private final val MaxRecursions = 1000
+
+ class ScopeEntry private[Scopes] (val name: Name, _sym: Symbol, val owner: Scope) {
+
+ var sym: Symbol = _sym
+
+ /** the next entry in the hash bucket
+ */
+ var tail: ScopeEntry = null
+
+ /** the preceding entry in this scope
+ */
+ var prev: ScopeEntry = null
+
+ override def toString: String = sym.toString
+ }
+
+ /** A scope contains a set of symbols. It can be an extension
+ * of some outer scope, from which it inherits all symbols.
+ * This class does not have any methods to add symbols to a scope
+ * or to delete them. These methods are provided by subclass
+ * MutableScope.
+ */
+ abstract class Scope extends DotClass with printing.Showable with Iterable[Symbol] {
+
+ /** The last scope-entry from which all others are reachable via `prev` */
+ private[dotc] def lastEntry: ScopeEntry
+
+ /** The number of symbols in this scope (including inherited ones
+ * from outer scopes).
+ */
+ def size: Int
+
+ /** The number of outer scopes from which symbols are inherited */
+ def nestingLevel: Int
+
+ /** The symbols in this scope in the order they were entered;
+ * inherited from outer ones first.
+ */
+ def toList: List[Symbol]
+
+ /** Return all symbols as an iterator in the order they were entered in this scope.
+ */
+ def iterator: Iterator[Symbol] = toList.iterator
+
+ /** Returns a new mutable scope with the same content as this one. */
+ def cloneScope(implicit ctx: Context): MutableScope
+
+ /** Is the scope empty? */
+ override def isEmpty: Boolean = lastEntry eq null
+
+ /** Lookup a symbol entry matching given name. */
+ def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry
+
+ /** Lookup next entry with same name as this one */
+ def lookupNextEntry(entry: ScopeEntry)(implicit ctx: Context): ScopeEntry
+
+ /** Lookup a symbol */
+ final def lookup(name: Name)(implicit ctx: Context): Symbol = {
+ val e = lookupEntry(name)
+ if (e eq null) NoSymbol else e.sym
+ }
+
+ /** Returns an iterator yielding every symbol with given name in this scope.
+ */
+ final def lookupAll(name: Name)(implicit ctx: Context): Iterator[Symbol] = new Iterator[Symbol] {
+ var e = lookupEntry(name)
+ def hasNext: Boolean = e ne null
+ def next(): Symbol = { val r = e.sym; e = lookupNextEntry(e); r }
+ }
+
+ /** The denotation set of all the symbols with given name in this scope
+ * Symbols occur in the result in reverse order relative to their occurrence
+ * in `this.toList`.
+ */
+ final def denotsNamed(name: Name, select: SymDenotation => Boolean = selectAll)(implicit ctx: Context): PreDenotation = {
+ var syms: PreDenotation = NoDenotation
+ var e = lookupEntry(name)
+ while (e != null) {
+ val d = e.sym.denot
+ if (select(d)) syms = syms union d
+ e = lookupNextEntry(e)
+ }
+ syms
+ }
+
+ /** The scope that keeps only those symbols from this scope that match the
+ * given predicates. If all symbols match, returns the scope itself, otherwise
+ * a copy with the matching symbols.
+ */
+ final def filteredScope(p: Symbol => Boolean)(implicit ctx: Context): Scope = {
+ var result: MutableScope = null
+ for (sym <- iterator)
+ if (!p(sym)) {
+ if (result == null) result = cloneScope
+ result.unlink(sym)
+ }
+ if (result == null) this else result
+ }
+
+ def implicitDecls(implicit ctx: Context): List[TermRef] = Nil
+
+ def openForMutations: MutableScope = unsupported("openForMutations")
+
+ final def toText(printer: Printer): Text = printer.toText(this)
+
+ def checkConsistent()(implicit ctx: Context) = ()
+ }
+
+ /** A subclass of Scope that defines methods for entering and
+ * unlinking entries.
+ * Note: constructor is protected to force everyone to use the factory methods newScope or newNestedScope instead.
+ * This is necessary because when run from reflection every scope needs to have a
+ * SynchronizedScope as mixin.
+ */
+ class MutableScope protected[Scopes](initElems: ScopeEntry, initSize: Int, val nestingLevel: Int = 0)
+ extends Scope {
+
+ protected[Scopes] def this(base: Scope)(implicit ctx: Context) = {
+ this(base.lastEntry, base.size, base.nestingLevel + 1)
+ ensureCapacity(MinHash)(ctx) // WTH? it seems the implicit is not in scope for a secondary constructor call.
+ }
+
+ def this() = this(null, 0, 0)
+
+ private[dotc] var lastEntry: ScopeEntry = initElems
+
+ /** The size of the scope */
+ private[this] var _size = initSize
+
+ override final def size = _size
+ private def size_= (x: Int) = _size = x
+
+ /** the hash table
+ */
+ private var hashTable: Array[ScopeEntry] = null
+
+ /** a cache for all elements, to be used by symbol iterator.
+ */
+ private var elemsCache: List[Symbol] = null
+
+ /** Clone scope, taking care not to force the denotations of any symbols in the scope.
+ */
+ def cloneScope(implicit ctx: Context): MutableScope = {
+ val entries = new mutable.ArrayBuffer[ScopeEntry]
+ var e = lastEntry
+ while ((e ne null) && e.owner == this) {
+ entries += e
+ e = e.prev
+ }
+ val scope = newScope
+ for (i <- entries.length - 1 to 0 by -1) {
+ val e = entries(i)
+ scope.newScopeEntry(e.name, e.sym)
+ }
+ scope
+ }
+
+ /** create and enter a scope entry with given name and symbol */
+ protected def newScopeEntry(name: Name, sym: Symbol)(implicit ctx: Context): ScopeEntry = {
+ ensureCapacity(if (hashTable ne null) hashTable.length else MinHash)
+ val e = new ScopeEntry(name, sym, this)
+ e.prev = lastEntry
+ lastEntry = e
+ if (hashTable ne null) enterInHash(e)
+ size += 1
+ elemsCache = null
+ e
+ }
+
+ /** create and enter a scope entry */
+ protected def newScopeEntry(sym: Symbol)(implicit ctx: Context): ScopeEntry =
+ newScopeEntry(sym.name, sym)
+
+ private def enterInHash(e: ScopeEntry)(implicit ctx: Context): Unit = {
+ val idx = e.name.hashCode & (hashTable.length - 1)
+ e.tail = hashTable(idx)
+ assert(e.tail != e)
+ hashTable(idx) = e
+ }
+
+ /** enter a symbol in this scope. */
+ final def enter[T <: Symbol](sym: T)(implicit ctx: Context): T = {
+ if (sym.isType && ctx.phaseId <= ctx.typerPhase.id) {
+ assert(lookup(sym.name) == NoSymbol,
+ s"duplicate ${sym.debugString}; previous was ${lookup(sym.name).debugString}") // !!! DEBUG
+ }
+ newScopeEntry(sym)
+ sym
+ }
+
+ /** enter a symbol, asserting that no symbol with same name exists in scope */
+ final def enterUnique(sym: Symbol)(implicit ctx: Context): Unit = {
+ assert(lookup(sym.name) == NoSymbol, (sym.showLocated, lookup(sym.name).showLocated))
+ enter(sym)
+ }
+
+ private def ensureCapacity(tableSize: Int)(implicit ctx: Context): Unit =
+ if (size >= tableSize * FillFactor) createHash(tableSize * 2)
+
+ private def createHash(tableSize: Int)(implicit ctx: Context): Unit =
+ if (size > tableSize * FillFactor) createHash(tableSize * 2)
+ else {
+ hashTable = new Array[ScopeEntry](tableSize)
+ enterAllInHash(lastEntry)
+ // checkConsistent() // DEBUG
+ }
+
+ private def enterAllInHash(e: ScopeEntry, n: Int = 0)(implicit ctx: Context): Unit = {
+ if (e ne null) {
+ if (n < MaxRecursions) {
+ enterAllInHash(e.prev, n + 1)
+ enterInHash(e)
+ } else {
+ var entries: List[ScopeEntry] = List()
+ var ee = e
+ while (ee ne null) {
+ entries = ee :: entries
+ ee = ee.prev
+ }
+ entries foreach enterInHash
+ }
+ }
+ }
+
+ /** Remove entry from this scope (which is required to be present) */
+ final def unlink(e: ScopeEntry)(implicit ctx: Context): Unit = {
+ if (lastEntry == e) {
+ lastEntry = e.prev
+ } else {
+ var e1 = lastEntry
+ while (e1.prev != e) e1 = e1.prev
+ e1.prev = e.prev
+ }
+ if (hashTable ne null) {
+ val index = e.name.hashCode & (hashTable.length - 1)
+ var e1 = hashTable(index)
+ if (e1 == e)
+ hashTable(index) = e.tail
+ else {
+ while (e1.tail != e) e1 = e1.tail
+ e1.tail = e.tail
+ }
+ }
+ elemsCache = null
+ size -= 1
+ }
+
+ /** remove symbol from this scope if it is present */
+ final def unlink(sym: Symbol)(implicit ctx: Context): Unit = {
+ var e = lookupEntry(sym.name)
+ while (e ne null) {
+ if (e.sym == sym) unlink(e)
+ e = lookupNextEntry(e)
+ }
+ }
+
+ /** Replace symbol `prev` (if it exists in current scope) by symbol `replacement`.
+ * @pre `prev` and `replacement` have the same name.
+ */
+ final def replace(prev: Symbol, replacement: Symbol)(implicit ctx: Context): Unit = {
+ require(prev.name == replacement.name)
+ var e = lookupEntry(prev.name)
+ while (e ne null) {
+ if (e.sym == prev) e.sym = replacement
+ e = lookupNextEntry(e)
+ }
+ elemsCache = null
+ }
+
+ /** Lookup a symbol entry matching given name.
+ */
+ override final def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry = {
+ var e: ScopeEntry = null
+ if (hashTable ne null) {
+ e = hashTable(name.hashCode & (hashTable.length - 1))
+ while ((e ne null) && e.name != name) {
+ e = e.tail
+ }
+ } else {
+ e = lastEntry
+ while ((e ne null) && e.name != name) {
+ e = e.prev
+ }
+ }
+ e
+ }
+
+ /** lookup next entry with same name as this one */
+ override final def lookupNextEntry(entry: ScopeEntry)(implicit ctx: Context): ScopeEntry = {
+ var e = entry
+ if (hashTable ne null)
+ do { e = e.tail } while ((e ne null) && e.name != entry.name)
+ else
+ do { e = e.prev } while ((e ne null) && e.name != entry.name)
+ e
+ }
+
+ /** Returns all symbols as a list in the order they were entered in this scope.
+ * Does _not_ include the elements of inherited scopes.
+ */
+ override final def toList: List[Symbol] = {
+ if (elemsCache eq null) {
+ elemsCache = Nil
+ var e = lastEntry
+ while ((e ne null) && e.owner == this) {
+ elemsCache = e.sym :: elemsCache
+ e = e.prev
+ }
+ }
+ elemsCache
+ }
+
+ override def implicitDecls(implicit ctx: Context): List[TermRef] = {
+ var irefs = new mutable.ListBuffer[TermRef]
+ var e = lastEntry
+ while (e ne null) {
+ if (e.sym is Implicit) {
+ val d = e.sym.denot
+ irefs += TermRef.withSigAndDenot(NoPrefix, d.name.asTermName, d.signature, d)
+ }
+ e = e.prev
+ }
+ irefs.toList
+ }
+
+ /** Vanilla scope - symbols are stored in declaration order.
+ */
+ final def sorted: List[Symbol] = toList
+
+ override def foreach[U](p: Symbol => U): Unit = toList foreach p
+
+ override def filter(p: Symbol => Boolean): List[Symbol] = {
+ var syms: List[Symbol] = Nil
+ var e = lastEntry
+ while ((e ne null) && e.owner == this) {
+ val sym = e.sym
+ if (p(sym)) syms = sym :: syms
+ e = e.prev
+ }
+ syms
+ }
+
+ override def openForMutations: MutableScope = this
+
+ /** Check that all symbols in this scope are in their correct hashtable buckets. */
+ override def checkConsistent()(implicit ctx: Context) = {
+ var e = lastEntry
+ while (e != null) {
+ var e1 = lookupEntry(e.name)
+ while (e1 != e && e1 != null) e1 = lookupNextEntry(e1)
+ assert(e1 == e, s"PANIC: Entry ${e.name} is badly linked")
+ e = e.prev
+ }
+ }
+ }
+
+ /** Create a new scope */
+ def newScope: MutableScope = new MutableScope()
+
+ /** Create a new scope nested in another one with which it shares its elements */
+ def newNestedScope(outer: Scope)(implicit ctx: Context): MutableScope = new MutableScope(outer)
+
+ /** Create a new scope with given initial elements */
+ def newScopeWith(elems: Symbol*)(implicit ctx: Context): MutableScope = {
+ val scope = newScope
+ elems foreach scope.enter
+ scope
+ }
+
+ /** Create new scope for the members of package `pkg` */
+ def newPackageScope(pkgClass: Symbol): MutableScope = newScope
+
+ /** Transform scope of members of `owner` using operation `op`
+ * This is overridden by the reflective compiler to avoid creating new scopes for packages
+ */
+ def scopeTransform(owner: Symbol)(op: => MutableScope): MutableScope = op
+
+ val selectAll: SymDenotation => Boolean = alwaysTrue
+ val selectPrivate: SymDenotation => Boolean = d => (d.flagsUNSAFE is Flags.Private)
+ val selectNonPrivate: SymDenotation => Boolean = d => !(d.flagsUNSAFE is Flags.Private)
+
+ /** The empty scope (immutable).
+ */
+ object EmptyScope extends Scope {
+ override private[dotc] def lastEntry = null
+ override def size = 0
+ override def nestingLevel = 0
+ override def toList = Nil
+ override def cloneScope(implicit ctx: Context): MutableScope = unsupported("cloneScope")
+ override def lookupEntry(name: Name)(implicit ctx: Context): ScopeEntry = null
+ override def lookupNextEntry(entry: ScopeEntry)(implicit ctx: Context): ScopeEntry = null
+ }
+
+ /** A class for error scopes (mutable)
+ */
+ class ErrorScope(owner: Symbol) extends MutableScope
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Signature.scala b/compiler/src/dotty/tools/dotc/core/Signature.scala
new file mode 100644
index 000000000..b2e627cbe
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Signature.scala
@@ -0,0 +1,103 @@
+package dotty.tools.dotc
+package core
+
+import Names._, Types._, Contexts._, StdNames._
+import TypeErasure.sigName
+
+/** The signature of a denotation.
+ * Overloaded denotations with the same name are distinguished by
+ * their signatures. A signature of a method (of type PolyType,MethodType, or ExprType) is
+ * composed of a list of signature names, one for each parameter type, plus a signature for
+ * the result type. Methods are uncurried before taking their signatures.
+ * The signature name of a type is the fully qualified name of the type symbol of the type's erasure.
+ *
+ * For instance a definition
+ *
+ * def f(x: Int)(y: List[String]): String
+ *
+ * would have signature
+ *
+ * Signature(
+ * List("scala.Int".toTypeName, "scala.collection.immutable.List".toTypeName),
+ * "scala.String".toTypeName)
+ *
+ * The signatures of non-method types are always `NotAMethod`.
+ *
+ * There are three kinds of "missing" parts of signatures:
+ *
+ * - tpnme.EMPTY Result type marker for NotAMethod and OverloadedSignature
+ * - tpnme.WILDCARD Arises from a Wildcard or error type
+ * - tpnme.Uninstantiated Arises from an uninstantiated type variable
+ */
+case class Signature(paramsSig: List[TypeName], resSig: TypeName) {
+ import Signature._
+
+ /** Two names are consistent if they are the same or one of them is tpnme.Uninstantiated */
+ private def consistent(name1: TypeName, name2: TypeName) =
+ name1 == name2 || name1 == tpnme.Uninstantiated || name2 == tpnme.Uninstantiated
+
+ /** Does this signature coincide with that signature on their parameter parts?
+ * This is the case if all parameter names are _consistent_, i.e. they are either
+ * equal or on of them is tpnme.Uninstantiated.
+ */
+ final def consistentParams(that: Signature): Boolean = {
+ def loop(names1: List[TypeName], names2: List[TypeName]): Boolean =
+ if (names1.isEmpty) names2.isEmpty
+ else names2.nonEmpty && consistent(names1.head, names2.head) && loop(names1.tail, names2.tail)
+ loop(this.paramsSig, that.paramsSig)
+ }
+
+ /** The degree to which this signature matches `that`.
+ * If parameter names are consistent and result types names match (i.e. they are the same
+ * or one is a wildcard), the result is `FullMatch`.
+ * If only the parameter names are consistent, the result is `ParamMatch` before erasure and
+ * `NoMatch` otherwise.
+ * If the parameters are inconsistent, the result is always `NoMatch`.
+ */
+ final def matchDegree(that: Signature)(implicit ctx: Context): MatchDegree =
+ if (consistentParams(that))
+ if (resSig == that.resSig || isWildcard(resSig) || isWildcard(that.resSig)) FullMatch
+ else if (!ctx.erasedTypes) ParamMatch
+ else NoMatch
+ else NoMatch
+
+ /** name.toString == "" or name.toString == "_" */
+ private def isWildcard(name: TypeName) = name.isEmpty || name == tpnme.WILDCARD
+
+ /** Construct a signature by prepending the signature names of the given `params`
+ * to the parameter part of this signature.
+ */
+ def prepend(params: List[Type], isJava: Boolean)(implicit ctx: Context) =
+ Signature((params.map(sigName(_, isJava))) ++ paramsSig, resSig)
+
+ /** A signature is under-defined if its paramsSig part contains at least one
+ * `tpnme.Uninstantiated`. Under-defined signatures arise when taking a signature
+ * of a type that still contains uninstantiated type variables. They are eliminated
+ * by `fixSignature` in `PostTyper`.
+ */
+ def isUnderDefined(implicit ctx: Context) =
+ paramsSig.contains(tpnme.Uninstantiated) || resSig == tpnme.Uninstantiated
+}
+
+object Signature {
+
+ type MatchDegree = Int
+ val NoMatch = 0
+ val ParamMatch = 1
+ val FullMatch = 2
+
+ /** The signature of everything that's not a method, i.e. that has
+ * a type different from PolyType, MethodType, or ExprType.
+ */
+ val NotAMethod = Signature(List(), EmptyTypeName)
+
+ /** The signature of an overloaded denotation.
+ */
+ val OverloadedSignature = Signature(List(tpnme.OVERLOADED), EmptyTypeName)
+
+ /** The signature of a method with no parameters and result type `resultType`. */
+ def apply(resultType: Type, isJava: Boolean)(implicit ctx: Context): Signature = {
+ assert(!resultType.isInstanceOf[ExprType])
+ apply(Nil, sigName(resultType, isJava))
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala
new file mode 100644
index 000000000..c2a14b36f
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala
@@ -0,0 +1,844 @@
+package dotty.tools.dotc
+package core
+
+import scala.language.implicitConversions
+import scala.collection.{mutable, immutable}
+import scala.annotation.switch
+import Names._
+import Symbols._
+import Contexts._
+import Decorators.StringDecorator
+import util.NameTransformer
+import scala.collection.breakOut
+
+object StdNames {
+
+/** Base strings from which synthetic names are derived. */
+
+ abstract class DefinedNames[N <: Name] {
+ protected implicit def fromString(s: String): N
+ protected def fromName(name: Name): N = fromString(name.toString)
+
+ private val kws = mutable.Set[N]()
+ protected def kw(name: N) = { kws += name; name }
+
+ final val keywords: collection.Set[N] = kws
+ }
+
+ abstract class ScalaNames[N <: Name] extends DefinedNames[N] {
+ protected def encode(s: String): N = fromName(fromString(s).encode)
+
+// Keywords, need to come first -----------------------
+
+ final val ABSTRACTkw: N = kw("abstract")
+ final val CASEkw: N = kw("case")
+ final val CLASSkw: N = kw("class")
+ final val CATCHkw: N = kw("catch")
+ final val DEFkw: N = kw("def")
+ final val DOkw: N = kw("do")
+ final val ELSEkw: N = kw("else")
+ final val EXTENDSkw: N = kw("extends")
+ final val FALSEkw: N = kw("false")
+ final val FINALkw: N = kw("final")
+ final val FINALLYkw: N = kw("finally")
+ final val FORkw: N = kw("for")
+ final val FORSOMEkw: N = kw("forSome")
+ final val IFkw: N = kw("if")
+ final val IMPLICITkw: N = kw("implicit")
+ final val IMPORTkw: N = kw("import")
+ final val INLINEkw: N = kw("inline")
+ final val LAZYkw: N = kw("lazy")
+ final val MACROkw: N = kw("macro")
+ final val MATCHkw: N = kw("match")
+ final val NEWkw: N = kw("new")
+ final val NULLkw: N = kw("null")
+ final val OBJECTkw: N = kw("object")
+ final val OVERRIDEkw: N = kw("override")
+ final val PACKAGEkw: N = kw("package")
+ final val PRIVATEkw: N = kw("private")
+ final val PROTECTEDkw: N = kw("protected")
+ final val RETURNkw: N = kw("return")
+ final val SEALEDkw: N = kw("sealed")
+ final val SUPERkw: N = kw("super")
+ final val THENkw: N = kw("then")
+ final val THISkw: N = kw("this")
+ final val THROWkw: N = kw("throw")
+ final val TRAITkw: N = kw("trait")
+ final val TRUEkw: N = kw("true")
+ final val TRYkw: N = kw("try")
+ final val TYPEkw: N = kw("type")
+ final val VALkw: N = kw("val")
+ final val VARkw: N = kw("var")
+ final val WITHkw: N = kw("with")
+ final val WHILEkw: N = kw("while")
+ final val YIELDkw: N = kw("yield")
+ final val DOTkw: N = kw(".")
+ final val USCOREkw: N = kw("_")
+ final val COLONkw: N = kw(":")
+ final val EQUALSkw: N = kw("=")
+ final val ARROWkw: N = kw("=>")
+ final val LARROWkw: N = kw("<-")
+ final val SUBTYPEkw: N = kw("<:")
+ final val VIEWBOUNDkw: N = kw("<%")
+ final val SUPERTYPEkw: N = kw(">:")
+ final val HASHkw: N = kw("#")
+ final val ATkw: N = kw("@")
+
+ val ANON_CLASS: N = "$anon"
+ val ANON_FUN: N = "$anonfun"
+ val BITMAP_PREFIX: N = "bitmap$"
+ val BITMAP_NORMAL: N = BITMAP_PREFIX // initialization bitmap for public/protected lazy vals
+ val BITMAP_TRANSIENT: N = BITMAP_PREFIX + "trans$" // initialization bitmap for transient lazy vals
+ val BITMAP_CHECKINIT: N = BITMAP_PREFIX + "init$" // initialization bitmap for checkinit values
+ val BITMAP_CHECKINIT_TRANSIENT: N = BITMAP_PREFIX + "inittrans$" // initialization bitmap for transient checkinit values
+ val DEFAULT_GETTER: N = "$default$"
+ val DEFAULT_GETTER_INIT: N = NameTransformer.encode("<init>")
+ val DO_WHILE_PREFIX: N = "doWhile$"
+ val EMPTY: N = ""
+ val EMPTY_PACKAGE: N = Names.EMPTY_PACKAGE.toString
+ val EVIDENCE_PARAM_PREFIX: N = "evidence$"
+ val EXCEPTION_RESULT_PREFIX: N = "exceptionResult"
+ val EXPAND_SEPARATOR: N = "$$"
+ val IMPL_CLASS_SUFFIX: N = "$class"
+ val IMPORT: N = "<import>"
+ val INLINE_ACCESSOR_PREFIX = "$inlineAccessor$"
+ val INTERPRETER_IMPORT_WRAPPER: N = "$iw"
+ val INTERPRETER_LINE_PREFIX: N = "line"
+ val INTERPRETER_VAR_PREFIX: N = "res"
+ val INTERPRETER_WRAPPER_SUFFIX: N = "$object"
+ val LOCALDUMMY_PREFIX: N = "<local " // owner of local blocks
+ val MODULE_SUFFIX: N = NameTransformer.MODULE_SUFFIX_STRING
+ val AVOID_CLASH_SUFFIX: N = "$_avoid_name_clash_$"
+ val MODULE_VAR_SUFFIX: N = "$module"
+ val NAME_JOIN: N = NameTransformer.NAME_JOIN_STRING
+ val USCORE_PARAM_PREFIX: N = "_$"
+ val OPS_PACKAGE: N = "<special-ops>"
+ val OVERLOADED: N = "<overloaded>"
+ val PACKAGE: N = "package"
+ val PACKAGE_CLS: N = "package$"
+ val PROTECTED_PREFIX: N = "protected$"
+ val PROTECTED_SET_PREFIX: N = PROTECTED_PREFIX + "set"
+ val ROOT: N = "<root>"
+ val SHADOWED: N = "(shadowed)" // tag to be used until we have proper name kinds
+ val SINGLETON_SUFFIX: N = ".type"
+ val SPECIALIZED_SUFFIX: N = "$sp"
+ val SUPER_PREFIX: N = "super$"
+ val WHILE_PREFIX: N = "while$"
+ val DEFAULT_EXCEPTION_NAME: N = "ex$"
+ val INITIALIZER_PREFIX: N = "initial$"
+ val COMPANION_MODULE_METHOD: N = "companion$module"
+ val COMPANION_CLASS_METHOD: N = "companion$class"
+ val TRAIT_SETTER_SEPARATOR: N = "$_setter_$"
+
+ // value types (and AnyRef) are all used as terms as well
+ // as (at least) arguments to the @specialize annotation.
+ final val Boolean: N = "Boolean"
+ final val Byte: N = "Byte"
+ final val Char: N = "Char"
+ final val Double: N = "Double"
+ final val Float: N = "Float"
+ final val Int: N = "Int"
+ final val Long: N = "Long"
+ final val Short: N = "Short"
+ final val Unit: N = "Unit"
+
+ final val ScalaValueNames: scala.List[N] =
+ scala.List(Byte, Char, Short, Int, Long, Float, Double, Boolean, Unit)
+
+ // some types whose companions we utilize
+ final val AnyRef: N = "AnyRef"
+ final val Array: N = "Array"
+ final val List: N = "List"
+ final val Seq: N = "Seq"
+ final val Symbol: N = "Symbol"
+ final val ClassTag: N = "ClassTag"
+ final val classTag: N = "classTag"
+ final val WeakTypeTag: N = "WeakTypeTag"
+ final val TypeTag : N = "TypeTag"
+ final val typeTag: N = "typeTag"
+ final val Expr: N = "Expr"
+ final val String: N = "String"
+ final val Annotation: N = "Annotation"
+
+ // fictions we use as both types and terms
+ final val ERROR: N = "<error>"
+ final val ERRORenc: N = encode("<error>")
+ final val NO_NAME: N = "<none>" // formerly NOSYMBOL
+ final val WILDCARD: N = "_"
+
+// ----- Type names -----------------------------------------
+
+ final val BYNAME_PARAM_CLASS: N = "<byname>"
+ final val EQUALS_PATTERN: N = "<equals>"
+ final val LOCAL_CHILD: N = "<local child>"
+ final val REPEATED_PARAM_CLASS: N = "<repeated>"
+ final val WILDCARD_STAR: N = "_*"
+ final val REIFY_TREECREATOR_PREFIX: N = "$treecreator"
+ final val REIFY_TYPECREATOR_PREFIX: N = "$typecreator"
+
+ final val AbstractFunction: N = "AbstractFunction"
+ final val Any: N = "Any"
+ final val AnyVal: N = "AnyVal"
+ final val ExprApi: N = "ExprApi"
+ final val Function: N = "Function"
+ final val Mirror: N = "Mirror"
+ final val Nothing: N = "Nothing"
+ final val Null: N = "Null"
+ final val Object: N = "Object"
+ final val PartialFunction: N = "PartialFunction"
+ final val PrefixType: N = "PrefixType"
+ final val Product: N = "Product"
+ final val Serializable: N = "Serializable"
+ final val Singleton: N = "Singleton"
+ final val Throwable: N = "Throwable"
+ final val Tuple: N = "Tuple"
+
+ final val ClassfileAnnotation: N = "ClassfileAnnotation"
+ final val ClassManifest: N = "ClassManifest"
+ final val Enum: N = "Enum"
+ final val Group: N = "Group"
+ final val Tree: N = "Tree"
+ final val Type : N = "Type"
+ final val TypeTree: N = "TypeTree"
+
+ // Annotation simple names, used in Namer
+ final val BeanPropertyAnnot: N = "BeanProperty"
+ final val BooleanBeanPropertyAnnot: N = "BooleanBeanProperty"
+ final val bridgeAnnot: N = "bridge"
+
+ // Classfile Attributes
+ final val AnnotationDefaultATTR: N = "AnnotationDefault"
+ final val BridgeATTR: N = "Bridge"
+ final val ClassfileAnnotationATTR: N = "RuntimeInvisibleAnnotations" // RetentionPolicy.CLASS. Currently not used (Apr 2009).
+ final val CodeATTR: N = "Code"
+ final val ConstantValueATTR: N = "ConstantValue"
+ final val DeprecatedATTR: N = "Deprecated"
+ final val ExceptionsATTR: N = "Exceptions"
+ final val InnerClassesATTR: N = "InnerClasses"
+ final val LineNumberTableATTR: N = "LineNumberTable"
+ final val LocalVariableTableATTR: N = "LocalVariableTable"
+ final val RuntimeAnnotationATTR: N = "RuntimeVisibleAnnotations" // RetentionPolicy.RUNTIME
+ final val RuntimeParamAnnotationATTR: N = "RuntimeVisibleParameterAnnotations" // RetentionPolicy.RUNTIME (annotations on parameters)
+ final val ScalaATTR: N = "Scala"
+ final val ScalaSignatureATTR: N = "ScalaSig"
+ final val TASTYATTR: N = "TASTY"
+ final val SignatureATTR: N = "Signature"
+ final val SourceFileATTR: N = "SourceFile"
+ final val SyntheticATTR: N = "Synthetic"
+
+// ----- Term names -----------------------------------------
+
+ // Compiler-internal
+ val ANYname: N = "<anyname>"
+ val CONSTRUCTOR: N = Names.CONSTRUCTOR.toString
+ val DEFAULT_CASE: N = "defaultCase$"
+ val EVT2U: N = "evt2u$"
+ val EQEQ_LOCAL_VAR: N = "eqEqTemp$"
+ val FAKE_LOCAL_THIS: N = "this$"
+ val LAZY_LOCAL: N = "$lzy"
+ val LAZY_LOCAL_INIT: N = "$lzyINIT"
+ val LAZY_FIELD_OFFSET: N = "OFFSET$"
+ val LAZY_SLOW_SUFFIX: N = "$lzycompute"
+ val LOCAL_SUFFIX: N = "$$local"
+ val UNIVERSE_BUILD_PREFIX: N = "$u.build."
+ val UNIVERSE_BUILD: N = "$u.build"
+ val UNIVERSE_PREFIX: N = "$u."
+ val UNIVERSE_SHORT: N = "$u"
+ val MIRROR_PREFIX: N = "$m."
+ val MIRROR_SHORT: N = "$m"
+ val MIRROR_UNTYPED: N = "$m$untyped"
+ val REIFY_FREE_PREFIX: N = "free$"
+ val REIFY_FREE_THIS_SUFFIX: N = "$this"
+ val REIFY_FREE_VALUE_SUFFIX: N = "$value"
+ val REIFY_SYMDEF_PREFIX: N = "symdef$"
+ val MODULE_INSTANCE_FIELD: N = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
+ val OUTER: N = "$outer"
+ val OUTER_LOCAL: N = "$outer "
+ val OUTER_SELECT: N = "_<outer>" // emitted by inliner, replaced by outer path in explicitouter
+ val REFINE_CLASS: N = "<refinement>"
+ val ROOTPKG: N = "_root_"
+ val SELECTOR_DUMMY: N = "<unapply-selector>"
+ val SELF: N = "$this"
+ val SETTER_SUFFIX: N = encode("_=")
+ val SKOLEM: N = "<skolem>"
+ val SPECIALIZED_INSTANCE: N = "specInstance$"
+ val THIS: N = "_$this"
+ val TRAIT_CONSTRUCTOR: N = "$init$"
+ val U2EVT: N = "u2evt$"
+
+ final val Nil: N = "Nil"
+ final val Predef: N = "Predef"
+ final val ScalaRunTime: N = "ScalaRunTime"
+ final val Some: N = "Some"
+
+ val x_0 : N = "x$0"
+ val x_1 : N = "x$1"
+ val x_2 : N = "x$2"
+ val x_3 : N = "x$3"
+ val x_4 : N = "x$4"
+ val x_5 : N = "x$5"
+ val x_6 : N = "x$6"
+ val x_7 : N = "x$7"
+ val x_8 : N = "x$8"
+ val x_9 : N = "x$9"
+ val _1 : N = "_1"
+ val _2 : N = "_2"
+ val _3 : N = "_3"
+ val _4 : N = "_4"
+ val _5 : N = "_5"
+ val _6 : N = "_6"
+ val _7 : N = "_7"
+ val _8 : N = "_8"
+ val _9 : N = "_9"
+ val _10 : N = "_10"
+ val _11 : N = "_11"
+ val _12 : N = "_12"
+ val _13 : N = "_13"
+ val _14 : N = "_14"
+ val _15 : N = "_15"
+ val _16 : N = "_16"
+ val _17 : N = "_17"
+ val _18 : N = "_18"
+ val _19 : N = "_19"
+ val _20 : N = "_20"
+ val _21 : N = "_21"
+ val _22 : N = "_22"
+
+ val ??? = encode("???")
+
+ val genericWrapArray: N = "genericWrapArray"
+ def wrapRefArray: N = "wrapRefArray"
+ def wrapXArray(clsName: Name): N = "wrap" + clsName + "Array"
+
+ // Compiler utilized names
+
+ val AnnotatedType: N = "AnnotatedType"
+ val AppliedTypeTree: N = "AppliedTypeTree"
+ val ArrayAnnotArg: N = "ArrayAnnotArg"
+ val Constant: N = "Constant"
+ val ConstantType: N = "ConstantType"
+ val ExistentialTypeTree: N = "ExistentialTypeTree"
+ val Flag : N = "Flag"
+ val Ident: N = "Ident"
+ val Import: N = "Import"
+ val Literal: N = "Literal"
+ val LiteralAnnotArg: N = "LiteralAnnotArg"
+ val Modifiers: N = "Modifiers"
+ val NestedAnnotArg: N = "NestedAnnotArg"
+ val NoFlags: N = "NoFlags"
+ val NoPrefix: N = "NoPrefix"
+ val NoSymbol: N = "NoSymbol"
+ val NoType: N = "NoType"
+ val Pair: N = "Pair"
+ val Ref: N = "Ref"
+ val RootPackage: N = "RootPackage"
+ val RootClass: N = "RootClass"
+ val Scala2: N = "Scala2"
+ val Select: N = "Select"
+ val StringContext: N = "StringContext"
+ val This: N = "This"
+ val ThisType: N = "ThisType"
+ val Tuple2: N = "Tuple2"
+ val TYPE_ : N = "TYPE"
+ val TypeApply: N = "TypeApply"
+ val TypeRef: N = "TypeRef"
+ val UNIT : N = "UNIT"
+ val add_ : N = "add"
+ val annotation: N = "annotation"
+ val anyValClass: N = "anyValClass"
+ val append: N = "append"
+ val apply: N = "apply"
+ val applyDynamic: N = "applyDynamic"
+ val applyDynamicNamed: N = "applyDynamicNamed"
+ val applyOrElse: N = "applyOrElse"
+ val args : N = "args"
+ val argv : N = "argv"
+ val arrayClass: N = "arrayClass"
+ val arrayElementClass: N = "arrayElementClass"
+ val arrayValue: N = "arrayValue"
+ val array_apply : N = "array_apply"
+ val array_clone : N = "array_clone"
+ val array_length : N = "array_length"
+ val array_update : N = "array_update"
+ val arraycopy: N = "arraycopy"
+ val asTerm: N = "asTerm"
+ val asModule: N = "asModule"
+ val asMethod: N = "asMethod"
+ val asType: N = "asType"
+ val asClass: N = "asClass"
+ val asInstanceOf_ : N = "asInstanceOf"
+ val assert_ : N = "assert"
+ val assume_ : N = "assume"
+ val box: N = "box"
+ val build : N = "build"
+ val bytes: N = "bytes"
+ val canEqual_ : N = "canEqual"
+ val checkInitialized: N = "checkInitialized"
+ val ClassManifestFactory: N = "ClassManifestFactory"
+ val classOf: N = "classOf"
+ val clone_ : N = "clone"
+ // val conforms : N = "conforms" // Dotty deviation: no special treatment of conforms, so the occurrence of the name here would cause to unintended implicit shadowing. Should find a less common name for it in Predef.
+ val copy: N = "copy"
+ val currentMirror: N = "currentMirror"
+ val create: N = "create"
+ val definitions: N = "definitions"
+ val delayedInit: N = "delayedInit"
+ val delayedInitArg: N = "delayedInit$body"
+ val drop: N = "drop"
+ val dynamics: N = "dynamics"
+ val dummyApply: N = "<dummy-apply>"
+ val elem: N = "elem"
+ val emptyValDef: N = "emptyValDef"
+ val ensureAccessible : N = "ensureAccessible"
+ val eq: N = "eq"
+ val equalsNumChar : N = "equalsNumChar"
+ val equalsNumNum : N = "equalsNumNum"
+ val equalsNumObject : N = "equalsNumObject"
+ val equals_ : N = "equals"
+ val error: N = "error"
+ val eval: N = "eval"
+ val eqAny: N = "eqAny"
+ val ex: N = "ex"
+ val experimental: N = "experimental"
+ val f: N = "f"
+ val false_ : N = "false"
+ val filter: N = "filter"
+ val finalize_ : N = "finalize"
+ val find_ : N = "find"
+ val flagsFromBits : N = "flagsFromBits"
+ val flatMap: N = "flatMap"
+ val foreach: N = "foreach"
+ val genericArrayOps: N = "genericArrayOps"
+ val get: N = "get"
+ val getClass_ : N = "getClass"
+ val getOrElse: N = "getOrElse"
+ val hasNext: N = "hasNext"
+ val hashCode_ : N = "hashCode"
+ val hash_ : N = "hash"
+ val head: N = "head"
+ val higherKinds: N = "higherKinds"
+ val identity: N = "identity"
+ val implicitly: N = "implicitly"
+ val in: N = "in"
+ val info: N = "info"
+ val inlinedEquals: N = "inlinedEquals"
+ val isArray: N = "isArray"
+ val isDefined: N = "isDefined"
+ val isDefinedAt: N = "isDefinedAt"
+ val isDefinedAtImpl: N = "$isDefinedAt"
+ val isEmpty: N = "isEmpty"
+ val isInstanceOf_ : N = "isInstanceOf"
+ val java: N = "java"
+ val key: N = "key"
+ val lang: N = "lang"
+ val length: N = "length"
+ val lengthCompare: N = "lengthCompare"
+ val liftedTree: N = "liftedTree"
+ val `macro` : N = "macro"
+ val macroThis : N = "_this"
+ val macroContext : N = "c"
+ val main: N = "main"
+ val manifest: N = "manifest"
+ val ManifestFactory: N = "ManifestFactory"
+ val manifestToTypeTag: N = "manifestToTypeTag"
+ val map: N = "map"
+ val materializeClassTag: N = "materializeClassTag"
+ val materializeWeakTypeTag: N = "materializeWeakTypeTag"
+ val materializeTypeTag: N = "materializeTypeTag"
+ val mirror : N = "mirror"
+ val moduleClass : N = "moduleClass"
+ val name: N = "name"
+ val ne: N = "ne"
+ val newFreeTerm: N = "newFreeTerm"
+ val newFreeType: N = "newFreeType"
+ val newNestedSymbol: N = "newNestedSymbol"
+ val newScopeWith: N = "newScopeWith"
+ val next: N = "next"
+ val nmeNewTermName: N = "newTermName"
+ val nmeNewTypeName: N = "newTypeName"
+ val noAutoTupling: N = "noAutoTupling"
+ val normalize: N = "normalize"
+ val notifyAll_ : N = "notifyAll"
+ val notify_ : N = "notify"
+ val null_ : N = "null"
+ val ofDim: N = "ofDim"
+ val origin: N = "origin"
+ val prefix : N = "prefix"
+ val productArity: N = "productArity"
+ val productElement: N = "productElement"
+ val productIterator: N = "productIterator"
+ val productPrefix: N = "productPrefix"
+ val readResolve: N = "readResolve"
+ val reflect : N = "reflect"
+ val reify : N = "reify"
+ val rootMirror : N = "rootMirror"
+ val runOrElse: N = "runOrElse"
+ val runtime: N = "runtime"
+ val runtimeClass: N = "runtimeClass"
+ val runtimeMirror: N = "runtimeMirror"
+ val sameElements: N = "sameElements"
+ val scala_ : N = "scala"
+ val selectDynamic: N = "selectDynamic"
+ val selectOverloadedMethod: N = "selectOverloadedMethod"
+ val selectTerm: N = "selectTerm"
+ val selectType: N = "selectType"
+ val self: N = "self"
+ val seqToArray: N = "seqToArray"
+ val setAccessible: N = "setAccessible"
+ val setAnnotations: N = "setAnnotations"
+ val setSymbol: N = "setSymbol"
+ val setType: N = "setType"
+ val setTypeSignature: N = "setTypeSignature"
+ val splice: N = "splice"
+ val staticClass : N = "staticClass"
+ val staticModule : N = "staticModule"
+ val staticPackage : N = "staticPackage"
+ val synchronized_ : N = "synchronized"
+ val tail: N = "tail"
+ val `then` : N = "then"
+ val this_ : N = "this"
+ val thisPrefix : N = "thisPrefix"
+ val throw_ : N = "throw"
+ val toArray: N = "toArray"
+ val toList: N = "toList"
+ val toObjectArray : N = "toObjectArray"
+ val toSeq: N = "toSeq"
+ val toString_ : N = "toString"
+ val toTypeConstructor: N = "toTypeConstructor"
+ val tpe : N = "tpe"
+ val tree : N = "tree"
+ val true_ : N = "true"
+ val typedProductIterator: N = "typedProductIterator"
+ val typeTagToManifest: N = "typeTagToManifest"
+ val unapply: N = "unapply"
+ val unapplySeq: N = "unapplySeq"
+ val unbox: N = "unbox"
+ val universe: N = "universe"
+ val update: N = "update"
+ val updateDynamic: N = "updateDynamic"
+ val value: N = "value"
+ val valueOf : N = "valueOf"
+ val values : N = "values"
+ val view_ : N = "view"
+ val wait_ : N = "wait"
+ val withFilter: N = "withFilter"
+ val withFilterIfRefutable: N = "withFilterIfRefutable$"
+ val wrap: N = "wrap"
+ val zero: N = "zero"
+ val zip: N = "zip"
+ val nothingRuntimeClass: N = "scala.runtime.Nothing$"
+ val nullRuntimeClass: N = "scala.runtime.Null$"
+
+ val synthSwitch: N = "$synthSwitch"
+
+ // unencoded operators
+ object raw {
+ final val AMP : N = "&"
+ final val BANG : N = "!"
+ final val BAR : N = "|"
+ final val DOLLAR: N = "$"
+ final val GE: N = ">="
+ final val LE: N = "<="
+ final val MINUS: N = "-"
+ final val NE: N = "!="
+ final val PLUS : N = "+"
+ final val SLASH: N = "/"
+ final val STAR : N = "*"
+ final val TILDE: N = "~"
+
+ final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG)
+ }
+
+ object specializedTypeNames {
+ final val Boolean: N = "Z"
+ final val Byte: N = "B"
+ final val Char: N = "C"
+ final val Short: N = "S"
+ final val Int: N = "I"
+ final val Long: N = "J"
+ final val Float: N = "F"
+ final val Double: N = "D"
+ final val Void: N = "V"
+ final val Object: N = "L"
+
+ final val prefix: N = "$m"
+ final val separator: N = "c"
+ final val suffix: N = "$sp"
+ }
+
+ // value-conversion methods
+ val toByte: N = "toByte"
+ val toShort: N = "toShort"
+ val toChar: N = "toChar"
+ val toInt: N = "toInt"
+ val toLong: N = "toLong"
+ val toFloat: N = "toFloat"
+ val toDouble: N = "toDouble"
+
+ // primitive operation methods for structural types mostly
+ // overlap with the above, but not for these two.
+ val toCharacter: N = "toCharacter"
+ val toInteger: N = "toInteger"
+
+ def newLazyValSlowComputeName(lzyValName: N) = lzyValName ++ LAZY_SLOW_SUFFIX
+
+ // ASCII names for operators
+ val ADD = encode("+")
+ val AND = encode("&")
+ val ASR = encode(">>")
+ val DIV = encode("/")
+ val EQ = encode("==")
+ val EQL = encode("=")
+ val GE = encode(">=")
+ val GT = encode(">")
+ val HASHHASH = encode("##")
+ val LE = encode("<=")
+ val LSL = encode("<<")
+ val LSR = encode(">>>")
+ val LT = encode("<")
+ val MINUS = encode("-")
+ val MOD = encode("%")
+ val MUL = encode("*")
+ val NE = encode("!=")
+ val OR = encode("|")
+ val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
+ val SUB = MINUS // ... as does SUB with PLUS
+ val XOR = encode("^")
+ val ZAND = encode("&&")
+ val ZOR = encode("||")
+
+ // unary operators
+ val UNARY_PREFIX: N = "unary_"
+ val UNARY_~ = encode("unary_~")
+ val UNARY_+ = encode("unary_+")
+ val UNARY_- = encode("unary_-")
+ val UNARY_! = encode("unary_!")
+
+ // Grouped here so Cleanup knows what tests to perform.
+ val CommonOpNames = Set[Name](OR, XOR, AND, EQ, NE)
+ val ConversionNames = Set[Name](toByte, toChar, toDouble, toFloat, toInt, toLong, toShort)
+ val BooleanOpNames = Set[Name](ZOR, ZAND, UNARY_!) ++ CommonOpNames
+ val NumberOpNames = (
+ Set[Name](ADD, SUB, MUL, DIV, MOD, LSL, LSR, ASR, LT, LE, GE, GT)
+ ++ Set(UNARY_+, UNARY_-, UNARY_!)
+ ++ ConversionNames
+ ++ CommonOpNames
+ )
+
+ val add: N = "add"
+ val complement: N = "complement"
+ val divide: N = "divide"
+ val multiply: N = "multiply"
+ val negate: N = "negate"
+ val positive: N = "positive"
+ val shiftLogicalRight: N = "shiftLogicalRight"
+ val shiftSignedLeft: N = "shiftSignedLeft"
+ val shiftSignedRight: N = "shiftSignedRight"
+ val subtract: N = "subtract"
+ val takeAnd: N = "takeAnd"
+ val takeConditionalAnd: N = "takeConditionalAnd"
+ val takeConditionalOr: N = "takeConditionalOr"
+ val takeModulo: N = "takeModulo"
+ val takeNot: N = "takeNot"
+ val takeOr: N = "takeOr"
+ val takeXor: N = "takeXor"
+ val testEqual: N = "testEqual"
+ val testGreaterOrEqualThan: N = "testGreaterOrEqualThan"
+ val testGreaterThan: N = "testGreaterThan"
+ val testLessOrEqualThan: N = "testLessOrEqualThan"
+ val testLessThan: N = "testLessThan"
+ val testNotEqual: N = "testNotEqual"
+
+ val isBoxedNumberOrBoolean: N = "isBoxedNumberOrBoolean"
+ val isBoxedNumber: N = "isBoxedNumber"
+
+ val reflPolyCacheName: N = "reflPoly$Cache"
+ val reflClassCacheName: N = "reflClass$Cache"
+ val reflParamsCacheName: N = "reflParams$Cache"
+ val reflMethodCacheName: N = "reflMethod$Cache"
+ val reflMethodName: N = "reflMethod$Method"
+
+ private val reflectionCacheNames = Set[N](
+ reflPolyCacheName,
+ reflClassCacheName,
+ reflParamsCacheName,
+ reflMethodCacheName,
+ reflMethodName
+ )
+
+ def isReflectionCacheName(name: Name) = reflectionCacheNames exists (name startsWith _)
+ }
+
+ class ScalaTermNames extends ScalaNames[TermName] {
+ protected implicit def fromString(s: String): TermName = termName(s)
+
+ @switch def syntheticParamName(i: Int): TermName = i match {
+ case 0 => x_0
+ case 1 => x_1
+ case 2 => x_2
+ case 3 => x_3
+ case 4 => x_4
+ case 5 => x_5
+ case 6 => x_6
+ case 7 => x_7
+ case 8 => x_8
+ case 9 => x_9
+ case _ => termName("x$" + i)
+ }
+
+ @switch def productAccessorName(j: Int): TermName = j match {
+ case 1 => nme._1
+ case 2 => nme._2
+ case 3 => nme._3
+ case 4 => nme._4
+ case 5 => nme._5
+ case 6 => nme._6
+ case 7 => nme._7
+ case 8 => nme._8
+ case 9 => nme._9
+ case 10 => nme._10
+ case 11 => nme._11
+ case 12 => nme._12
+ case 13 => nme._13
+ case 14 => nme._14
+ case 15 => nme._15
+ case 16 => nme._16
+ case 17 => nme._17
+ case 18 => nme._18
+ case 19 => nme._19
+ case 20 => nme._20
+ case 21 => nme._21
+ case 22 => nme._22
+ case _ => termName("_" + j)
+ }
+
+ def syntheticParamNames(num: Int): List[TermName] =
+ (0 until num).map(syntheticParamName)(breakOut)
+
+ def localDummyName(clazz: Symbol)(implicit ctx: Context): TermName =
+ LOCALDUMMY_PREFIX ++ clazz.name ++ ">"
+
+ def newBitmapName(bitmapPrefix: TermName, n: Int): TermName = bitmapPrefix ++ n.toString
+
+ def selectorName(n: Int): TermName = "_" + (n + 1)
+
+ object primitive {
+ val arrayApply: TermName = "[]apply"
+ val arrayUpdate: TermName = "[]update"
+ val arrayLength: TermName = "[]length"
+ val names: Set[Name] = Set(arrayApply, arrayUpdate, arrayLength)
+ }
+
+ def isPrimitiveName(name: Name) = primitive.names.contains(name)
+ }
+
+ class ScalaTypeNames extends ScalaNames[TypeName] {
+ protected implicit def fromString(s: String): TypeName = typeName(s)
+
+ def syntheticTypeParamName(i: Int): TypeName = "X" + i
+
+ def syntheticTypeParamNames(num: Int): List[TypeName] =
+ (0 until num).map(syntheticTypeParamName)(breakOut)
+
+ final val Conforms = encode("<:<")
+
+ final val Uninstantiated: TypeName = "?$"
+ }
+
+ abstract class JavaNames[N <: Name] extends DefinedNames[N] {
+ final val ABSTRACTkw: N = kw("abstract")
+ final val ASSERTkw: N = kw("assert")
+ final val BOOLEANkw: N = kw("boolean")
+ final val BREAKkw: N = kw("break")
+ final val BYTEkw: N = kw("byte")
+ final val CASEkw: N = kw("case")
+ final val CATCHkw: N = kw("catch")
+ final val CHARkw: N = kw("char")
+ final val CLASSkw: N = kw("class")
+ final val CONSTkw: N = kw("const")
+ final val CONTINUEkw: N = kw("continue")
+ final val DEFAULTkw: N = kw("default")
+ final val DOkw: N = kw("do")
+ final val DOUBLEkw: N = kw("double")
+ final val ELSEkw: N = kw("else")
+ final val ENUMkw: N = kw("enum")
+ final val EXTENDSkw: N = kw("extends")
+ final val FINALkw: N = kw("final")
+ final val FINALLYkw: N = kw("finally")
+ final val FLOATkw: N = kw("float")
+ final val FORkw: N = kw("for")
+ final val IFkw: N = kw("if")
+ final val GOTOkw: N = kw("goto")
+ final val IMPLEMENTSkw: N = kw("implements")
+ final val IMPORTkw: N = kw("import")
+ final val INSTANCEOFkw: N = kw("instanceof")
+ final val INTkw: N = kw("int")
+ final val INTERFACEkw: N = kw("interface")
+ final val LONGkw: N = kw("long")
+ final val NATIVEkw: N = kw("native")
+ final val NEWkw: N = kw("new")
+ final val PACKAGEkw: N = kw("package")
+ final val PRIVATEkw: N = kw("private")
+ final val PROTECTEDkw: N = kw("protected")
+ final val PUBLICkw: N = kw("public")
+ final val RETURNkw: N = kw("return")
+ final val SHORTkw: N = kw("short")
+ final val STATICkw: N = kw("static")
+ final val STRICTFPkw: N = kw("strictfp")
+ final val SUPERkw: N = kw("super")
+ final val SWITCHkw: N = kw("switch")
+ final val SYNCHRONIZEDkw: N = kw("synchronized")
+ final val THISkw: N = kw("this")
+ final val THROWkw: N = kw("throw")
+ final val THROWSkw: N = kw("throws")
+ final val TRANSIENTkw: N = kw("transient")
+ final val TRYkw: N = kw("try")
+ final val VOIDkw: N = kw("void")
+ final val VOLATILEkw: N = kw("volatile")
+ final val WHILEkw: N = kw("while")
+
+ final val BoxedBoolean: N = "java.lang.Boolean"
+ final val BoxedByte: N = "java.lang.Byte"
+ final val BoxedCharacter: N = "java.lang.Character"
+ final val BoxedDouble: N = "java.lang.Double"
+ final val BoxedFloat: N = "java.lang.Float"
+ final val BoxedInteger: N = "java.lang.Integer"
+ final val BoxedLong: N = "java.lang.Long"
+ final val BoxedNumber: N = "java.lang.Number"
+ final val BoxedShort: N = "java.lang.Short"
+ final val Class: N = "java.lang.Class"
+ final val IOOBException: N = "java.lang.IndexOutOfBoundsException"
+ final val InvTargetException: N = "java.lang.reflect.InvocationTargetException"
+ final val MethodAsObject: N = "java.lang.reflect.Method"
+ final val NPException: N = "java.lang.NullPointerException"
+ final val Object: N = "java.lang.Object"
+ final val String: N = "java.lang.String"
+ final val Throwable: N = "java.lang.Throwable"
+
+ final val ForName: N = "forName"
+ final val GetCause: N = "getCause"
+ final val GetClass: N = "getClass"
+ final val GetClassLoader: N = "getClassLoader"
+ final val GetComponentType: N = "getComponentType"
+ final val GetMethod: N = "getMethod"
+ final val Invoke: N = "invoke"
+ final val JavaLang: N = "java.lang"
+
+ final val BeanProperty: N = "scala.beans.BeanProperty"
+ final val BooleanBeanProperty: N = "scala.beans.BooleanBeanProperty"
+ final val JavaSerializable: N = "java.io.Serializable"
+ }
+
+ class JavaTermNames extends JavaNames[TermName] {
+ protected def fromString(s: String): TermName = termName(s)
+ }
+ class JavaTypeNames extends JavaNames[TypeName] {
+ protected def fromString(s: String): TypeName = typeName(s)
+ }
+
+ val nme = new ScalaTermNames
+ val tpnme = new ScalaTypeNames
+ val jnme = new JavaTermNames
+ val jtpnme = new JavaTypeNames
+
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala
new file mode 100644
index 000000000..23683608a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala
@@ -0,0 +1,306 @@
+package dotty.tools.dotc.core
+
+import Types._, Symbols._, Contexts._, Names._
+
+/** Substitution operations on types. See the corresponding `subst` and
+ * `substThis` methods on class Type for an explanation.
+ */
+trait Substituters { this: Context =>
+
+ final def subst(tp: Type, from: BindingType, to: BindingType, theMap: SubstBindingMap): Type =
+ tp match {
+ case tp: BoundType =>
+ if (tp.binder eq from) tp.copyBoundType(to.asInstanceOf[tp.BT]) else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStatic) tp
+ else tp.derivedSelect(subst(tp.prefix, from, to, theMap))
+ case _: ThisType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(subst(tp.parent, from, to, theMap), tp.refinedName, subst(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(subst(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstBindingMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def subst1(tp: Type, from: Symbol, to: Type, theMap: Subst1Map): Type = {
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ if (sym eq from) return to
+ if (sym.isStatic && !from.isStatic) tp
+ else tp.derivedSelect(subst1(tp.prefix, from, to, theMap))
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(subst1(tp.parent, from, to, theMap), tp.refinedName, subst1(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(subst1(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new Subst1Map(from, to))
+ .mapOver(tp)
+ }
+ }
+
+ final def subst2(tp: Type, from1: Symbol, to1: Type, from2: Symbol, to2: Type, theMap: Subst2Map): Type = {
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ if (sym eq from1) return to1
+ if (sym eq from2) return to2
+ if (sym.isStatic && !from1.isStatic && !from2.isStatic) tp
+ else tp.derivedSelect(subst2(tp.prefix, from1, to1, from2, to2, theMap))
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(subst2(tp.parent, from1, to1, from2, to2, theMap), tp.refinedName, subst2(tp.refinedInfo, from1, to1, from2, to2, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(subst2(tp.alias, from1, to1, from2, to2, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new Subst2Map(from1, to1, from2, to2))
+ .mapOver(tp)
+ }
+ }
+
+ final def subst(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstMap): Type = {
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head eq sym) return ts.head
+ fs = fs.tail
+ ts = ts.tail
+ }
+ if (sym.isStatic && !existsStatic(from)) tp
+ else tp.derivedSelect(subst(tp.prefix, from, to, theMap))
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(subst(tp.parent, from, to, theMap), tp.refinedName, subst(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(subst(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstMap(from, to))
+ .mapOver(tp)
+ }
+ }
+
+ final def substDealias(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstDealiasMap): Type = {
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head eq sym) return ts.head
+ fs = fs.tail
+ ts = ts.tail
+ }
+ if (sym.isStatic && !existsStatic(from)) tp
+ else {
+ tp.info match {
+ case TypeAlias(alias) =>
+ val alias1 = substDealias(alias, from, to, theMap)
+ if (alias1 ne alias) return alias1
+ case _ =>
+ }
+ tp.derivedSelect(substDealias(tp.prefix, from, to, theMap))
+ }
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substDealias(tp.parent, from, to, theMap), tp.refinedName, substDealias(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substDealias(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstDealiasMap(from, to))
+ .mapOver(tp)
+ }
+ }
+
+ final def substSym(tp: Type, from: List[Symbol], to: List[Symbol], theMap: SubstSymMap): Type =
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head eq sym)
+ return tp match {
+ case tp: WithFixedSym => NamedType.withFixedSym(tp.prefix, ts.head)
+ case _ => substSym(tp.prefix, from, to, theMap) select ts.head
+ }
+ fs = fs.tail
+ ts = ts.tail
+ }
+ if (sym.isStatic && !existsStatic(from)) tp
+ else tp.derivedSelect(substSym(tp.prefix, from, to, theMap))
+ case tp: ThisType =>
+ val sym = tp.cls
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head eq sym) return ts.head.asClass.thisType
+ fs = fs.tail
+ ts = ts.tail
+ }
+ tp
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substSym(tp.parent, from, to, theMap), tp.refinedName, substSym(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substSym(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstSymMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def substThis(tp: Type, from: ClassSymbol, to: Type, theMap: SubstThisMap): Type =
+ tp match {
+ case tp: ThisType =>
+ if (tp.cls eq from) to else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStaticOwner) tp
+ else tp.derivedSelect(substThis(tp.prefix, from, to, theMap))
+ case _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substThis(tp.parent, from, to, theMap), tp.refinedName, substThis(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substThis(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstThisMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap): Type =
+ tp match {
+ case tp @ RecThis(binder) =>
+ if (binder eq from) to else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStatic) tp
+ else tp.derivedSelect(substRecThis(tp.prefix, from, to, theMap))
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substRecThis(tp.parent, from, to, theMap), tp.refinedName, substRecThis(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substRecThis(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstRecThisMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def substParam(tp: Type, from: ParamType, to: Type, theMap: SubstParamMap): Type =
+ tp match {
+ case tp: BoundType =>
+ if (tp == from) to else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStatic) tp
+ else tp.derivedSelect(substParam(tp.prefix, from, to, theMap))
+ case _: ThisType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substParam(tp.parent, from, to, theMap), tp.refinedName, substParam(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substParam(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstParamMap(from, to))
+ .mapOver(tp)
+ }
+
+ final def substParams(tp: Type, from: BindingType, to: List[Type], theMap: SubstParamsMap): Type =
+ tp match {
+ case tp: ParamType =>
+ if (tp.binder == from) to(tp.paramNum) else tp
+ case tp: NamedType =>
+ if (tp.currentSymbol.isStatic) tp
+ else tp.derivedSelect(substParams(tp.prefix, from, to, theMap))
+ case _: ThisType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(substParams(tp.parent, from, to, theMap), tp.refinedName, substParams(tp.refinedInfo, from, to, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(substParams(tp.alias, from, to, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new SubstParamsMap(from, to))
+ .mapOver(tp)
+ }
+
+ private def existsStatic(syms: List[Symbol]): Boolean = syms match {
+ case sym :: syms1 => sym.isStatic || existsStatic(syms1)
+ case nil => false
+ }
+
+ final class SubstBindingMap(from: BindingType, to: BindingType) extends DeepTypeMap {
+ def apply(tp: Type) = subst(tp, from, to, this)
+ }
+
+ final class Subst1Map(from: Symbol, to: Type) extends DeepTypeMap {
+ def apply(tp: Type) = subst1(tp, from, to, this)
+ }
+
+ final class Subst2Map(from1: Symbol, to1: Type, from2: Symbol, to2: Type) extends DeepTypeMap {
+ def apply(tp: Type) = subst2(tp, from1, to1, from2, to2, this)
+ }
+
+ final class SubstMap(from: List[Symbol], to: List[Type]) extends DeepTypeMap {
+ def apply(tp: Type): Type = subst(tp, from, to, this)
+ }
+
+ final class SubstDealiasMap(from: List[Symbol], to: List[Type]) extends DeepTypeMap {
+ override def apply(tp: Type): Type = substDealias(tp, from, to, this)
+ }
+
+ final class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends DeepTypeMap {
+ def apply(tp: Type): Type = substSym(tp, from, to, this)
+ }
+
+ final class SubstThisMap(from: ClassSymbol, to: Type) extends DeepTypeMap {
+ def apply(tp: Type): Type = substThis(tp, from, to, this)
+ }
+
+ final class SubstRecThisMap(from: Type, to: Type) extends DeepTypeMap {
+ def apply(tp: Type): Type = substRecThis(tp, from, to, this)
+ }
+
+ final class SubstParamMap(from: ParamType, to: Type) extends DeepTypeMap {
+ def apply(tp: Type) = substParam(tp, from, to, this)
+ }
+
+ final class SubstParamsMap(from: BindingType, to: List[Type]) extends DeepTypeMap {
+ def apply(tp: Type) = substParams(tp, from, to, this)
+ }
+
+ /** A map for "cycle safe substitutions" which do not force the denotation
+ * of a TypeRef unless the name matches up with one of the substituted symbols.
+ */
+ final class SafeSubstMap(from: List[Symbol], to: List[Type]) extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case tp: NamedType =>
+ try {
+ var sym: Symbol = null
+ var fs = from
+ var ts = to
+ while (fs.nonEmpty) {
+ if (fs.head.name == tp.name) {
+ if (sym == null) sym = tp.symbol
+ if (fs.head eq sym) return ts.head
+ }
+ fs = fs.tail
+ ts = ts.tail
+ }
+ tp.newLikeThis(apply(tp.prefix))
+ }
+ catch {
+ case ex: CyclicReference => tp.derivedSelect(apply(tp.prefix))
+ }
+ case _ => mapOver(tp)
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
new file mode 100644
index 000000000..8b7c28e19
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala
@@ -0,0 +1,2004 @@
+package dotty.tools
+package dotc
+package core
+
+import Periods._, Contexts._, Symbols._, Denotations._, Names._, NameOps._, Annotations._
+import Types._, Flags._, Decorators._, DenotTransformers._, StdNames._, Scopes._, Comments._
+import NameOps._
+import Scopes.Scope
+import collection.mutable
+import collection.immutable.BitSet
+import scala.reflect.io.AbstractFile
+import Decorators.SymbolIteratorDecorator
+import ast._
+import annotation.tailrec
+import CheckRealizable._
+import util.SimpleMap
+import util.Stats
+import config.Config
+import config.Printers.{completions, incremental, noPrinter}
+
+trait SymDenotations { this: Context =>
+ import SymDenotations._
+
+ /** Factory method for SymDenotion creation. All creations
+ * should be done via this method.
+ */
+ def SymDenotation(
+ symbol: Symbol,
+ owner: Symbol,
+ name: Name,
+ initFlags: FlagSet,
+ initInfo: Type,
+ initPrivateWithin: Symbol = NoSymbol)(implicit ctx: Context): SymDenotation = {
+ val result =
+ if (symbol.isClass)
+ if (initFlags is Package) new PackageClassDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin, ctx.runId)
+ else new ClassDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin, ctx.runId)
+ else new SymDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin)
+ result.validFor = stablePeriod
+ result
+ }
+
+ def stillValid(denot: SymDenotation): Boolean =
+ if (denot.is(ValidForever) || denot.isRefinementClass || denot.isImport) true
+ else {
+ val initial = denot.initial
+ val firstPhaseId = initial.validFor.firstPhaseId.max(ctx.typerPhase.id)
+ if ((initial ne denot) || ctx.phaseId != firstPhaseId)
+ ctx.withPhase(firstPhaseId).stillValidInOwner(initial)
+ else
+ stillValidInOwner(denot)
+ }
+
+ private[SymDenotations] def stillValidInOwner(denot: SymDenotation): Boolean = try {
+ val owner = denot.owner.denot
+ stillValid(owner) && (
+ !owner.isClass
+ || owner.isRefinementClass
+ || owner.is(Scala2x)
+ || (owner.unforcedDecls.lookupAll(denot.name) contains denot.symbol)
+ || denot.isSelfSym)
+ } catch {
+ case ex: StaleSymbol => false
+ }
+
+ /** Explain why symbol is invalid; used for debugging only */
+ def traceInvalid(denot: Denotation): Boolean = {
+ def show(d: Denotation) = s"$d#${d.symbol.id}"
+ def explain(msg: String) = {
+ println(s"${show(denot)} is invalid at ${this.period} because $msg")
+ false
+ }
+ denot match {
+ case denot: SymDenotation =>
+ def explainSym(msg: String) = explain(s"$msg\n defined = ${denot.definedPeriodsString}")
+ if (denot.is(ValidForever) || denot.isRefinementClass) true
+ else {
+ implicit val ctx: Context = this
+ val initial = denot.initial
+ if ((initial ne denot) || ctx.phaseId != initial.validFor.firstPhaseId) {
+ ctx.withPhase(initial.validFor.firstPhaseId).traceInvalid(initial)
+ } else try {
+ val owner = denot.owner.denot
+ if (!traceInvalid(owner)) explainSym("owner is invalid")
+ else if (!owner.isClass || owner.isRefinementClass || denot.isSelfSym) true
+ else if (owner.unforcedDecls.lookupAll(denot.name) contains denot.symbol) true
+ else explainSym(s"decls of ${show(owner)} are ${owner.unforcedDecls.lookupAll(denot.name).toList}, do not contain ${denot.symbol}")
+ } catch {
+ case ex: StaleSymbol => explainSym(s"$ex was thrown")
+ }
+ }
+ case _ =>
+ explain("denotation is not a SymDenotation")
+ }
+ }
+}
+
+object SymDenotations {
+
+ /** A sym-denotation represents the contents of a definition
+ * during a period.
+ */
+ class SymDenotation private[SymDenotations] (
+ symbol: Symbol,
+ ownerIfExists: Symbol,
+ final val name: Name,
+ initFlags: FlagSet,
+ initInfo: Type,
+ initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation(symbol) {
+
+ //assert(symbol.id != 4940, name)
+
+ override def hasUniqueSym: Boolean = exists
+
+ /** Debug only
+ override def validFor_=(p: Period) = {
+ super.validFor_=(p)
+ }
+ */
+ if (Config.checkNoSkolemsInInfo) assertNoSkolems(initInfo)
+
+ // ------ Getting and setting fields -----------------------------
+
+ private[this] var myFlags: FlagSet = adaptFlags(initFlags)
+ private[this] var myInfo: Type = initInfo
+ private[this] var myPrivateWithin: Symbol = initPrivateWithin
+ private[this] var myAnnotations: List[Annotation] = Nil
+
+ /** The owner of the symbol; overridden in NoDenotation */
+ def owner: Symbol = ownerIfExists
+
+ /** Same as owner, except returns NoSymbol for NoSymbol */
+ def maybeOwner: Symbol = if (exists) owner else NoSymbol
+
+ /** The flag set */
+ final def flags(implicit ctx: Context): FlagSet = { ensureCompleted(); myFlags }
+
+ /** The flag set without forcing symbol completion.
+ * Should be used only for printing.
+ */
+ private[dotc] final def flagsUNSAFE = myFlags
+
+ /** Adapt flag set to this denotation's term or type nature */
+ private def adaptFlags(flags: FlagSet) = if (isType) flags.toTypeFlags else flags.toTermFlags
+
+ /** Update the flag set */
+ final def flags_=(flags: FlagSet): Unit =
+ myFlags = adaptFlags(flags)
+
+ /** Set given flags(s) of this denotation */
+ final def setFlag(flags: FlagSet): Unit = { myFlags |= flags }
+
+ /** Unset given flags(s) of this denotation */
+ final def resetFlag(flags: FlagSet): Unit = { myFlags &~= flags }
+
+ /** Set applicable flags from `flags` which is a subset of {NoInits, PureInterface} */
+ final def setApplicableFlags(flags: FlagSet): Unit = {
+ val mask = if (myFlags.is(Trait)) NoInitsInterface else NoInits
+ setFlag(flags & mask)
+ }
+
+ /** Has this denotation one of the flags in `fs` set? */
+ final def is(fs: FlagSet)(implicit ctx: Context) = {
+ (if (fs <= FromStartFlags) myFlags else flags) is fs
+ }
+
+ /** Has this denotation one of the flags in `fs` set, whereas none of the flags
+ * in `butNot` are set?
+ */
+ final def is(fs: FlagSet, butNot: FlagSet)(implicit ctx: Context) =
+ (if (fs <= FromStartFlags && butNot <= FromStartFlags) myFlags else flags) is (fs, butNot)
+
+ /** Has this denotation all of the flags in `fs` set? */
+ final def is(fs: FlagConjunction)(implicit ctx: Context) =
+ (if (fs <= FromStartFlags) myFlags else flags) is fs
+
+ /** Has this denotation all of the flags in `fs` set, whereas none of the flags
+ * in `butNot` are set?
+ */
+ final def is(fs: FlagConjunction, butNot: FlagSet)(implicit ctx: Context) =
+ (if (fs <= FromStartFlags && butNot <= FromStartFlags) myFlags else flags) is (fs, butNot)
+
+ /** The type info.
+ * The info is an instance of TypeType iff this is a type denotation
+ * Uncompleted denotations set myInfo to a LazyType.
+ */
+ final def info(implicit ctx: Context): Type = myInfo match {
+ case myInfo: LazyType => completeFrom(myInfo); info
+ case _ => myInfo
+ }
+
+ /** The type info, or, if symbol is not yet completed, the completer */
+ final def infoOrCompleter = myInfo
+
+ /** Optionally, the info if it is completed */
+ final def unforcedInfo: Option[Type] = myInfo match {
+ case myInfo: LazyType => None
+ case _ => Some(myInfo)
+ }
+
+ private def completeFrom(completer: LazyType)(implicit ctx: Context): Unit = {
+ if (completions ne noPrinter) {
+ completions.println(i"${" " * indent}completing ${if (isType) "type" else "val"} $name")
+ indent += 1
+ }
+ if (myFlags is Touched) throw CyclicReference(this)
+ myFlags |= Touched
+
+ // completions.println(s"completing ${this.debugString}")
+ try completer.complete(this)(ctx.withPhase(validFor.firstPhaseId))
+ catch {
+ case ex: CyclicReference =>
+ completions.println(s"error while completing ${this.debugString}")
+ throw ex
+ }
+ finally
+ if (completions ne noPrinter) {
+ indent -= 1
+ completions.println(i"${" " * indent}completed $name in $owner")
+ }
+ // completions.println(s"completed ${this.debugString}")
+ }
+
+ protected[dotc] def info_=(tp: Type) = {
+ /* // DEBUG
+ def illegal: String = s"illegal type for $this: $tp"
+ if (this is Module) // make sure module invariants that allow moduleClass and sourceModule to work are kept.
+ tp match {
+ case tp: ClassInfo => assert(tp.selfInfo.isInstanceOf[TermRefBySym], illegal)
+ case tp: NamedType => assert(tp.isInstanceOf[TypeRefBySym], illegal)
+ case tp: ExprType => assert(tp.resultType.isInstanceOf[TypeRefBySym], illegal)
+ case _ =>
+ }
+ */
+ if (Config.checkNoSkolemsInInfo) assertNoSkolems(tp)
+ myInfo = tp
+ }
+
+ /** The name, except
+ * - if this is a module class, strip the module class suffix
+ * - if this is a companion object with a clash-avoiding name, strip the
+ * "avoid clash" suffix
+ */
+ def effectiveName(implicit ctx: Context) =
+ if (this is ModuleClass) name.stripModuleClassSuffix
+ else name.stripAvoidClashSuffix
+
+ /** The privateWithin boundary, NoSymbol if no boundary is given.
+ */
+ final def privateWithin(implicit ctx: Context): Symbol = { ensureCompleted(); myPrivateWithin }
+
+ /** Set privateWithin. */
+ protected[core] final def privateWithin_=(sym: Symbol): Unit =
+ myPrivateWithin = sym
+
+ /** The annotations of this denotation */
+ final def annotations(implicit ctx: Context): List[Annotation] = {
+ ensureCompleted(); myAnnotations
+ }
+
+ /** Update the annotations of this denotation */
+ private[core] final def annotations_=(annots: List[Annotation]): Unit =
+ myAnnotations = annots
+
+ /** Does this denotation have an annotation matching the given class symbol? */
+ final def hasAnnotation(cls: Symbol)(implicit ctx: Context) =
+ dropOtherAnnotations(annotations, cls).nonEmpty
+
+ /** Apply transform `f` to all annotations of this denotation */
+ final def transformAnnotations(f: Annotation => Annotation)(implicit ctx: Context): Unit =
+ annotations = annotations.mapConserve(f)
+
+ /** Keep only those annotations that satisfy `p` */
+ final def filterAnnotations(p: Annotation => Boolean)(implicit ctx: Context): Unit =
+ annotations = annotations.filterConserve(p)
+
+ /** Optionally, the annotation matching the given class symbol */
+ final def getAnnotation(cls: Symbol)(implicit ctx: Context): Option[Annotation] =
+ dropOtherAnnotations(annotations, cls) match {
+ case annot :: _ => Some(annot)
+ case nil => None
+ }
+
+ /** The same as getAnnotation, but without ensuring
+ * that the symbol carrying the annotation is completed
+ */
+ final def unforcedAnnotation(cls: Symbol)(implicit ctx: Context): Option[Annotation] =
+ dropOtherAnnotations(myAnnotations, cls) match {
+ case annot :: _ => Some(annot)
+ case nil => None
+ }
+
+ /** Add given annotation to the annotations of this denotation */
+ final def addAnnotation(annot: Annotation): Unit =
+ annotations = annot :: myAnnotations
+
+ /** Remove annotation with given class from this denotation */
+ final def removeAnnotation(cls: Symbol)(implicit ctx: Context): Unit =
+ annotations = myAnnotations.filterNot(_ matches cls)
+
+ /** Remove any annotations with same class as `annot`, and add `annot` */
+ final def updateAnnotation(annot: Annotation)(implicit ctx: Context): Unit = {
+ removeAnnotation(annot.symbol)
+ addAnnotation(annot)
+ }
+
+ /** Add all given annotations to this symbol */
+ final def addAnnotations(annots: TraversableOnce[Annotation])(implicit ctx: Context): Unit =
+ annots.foreach(addAnnotation)
+
+ @tailrec
+ private def dropOtherAnnotations(anns: List[Annotation], cls: Symbol)(implicit ctx: Context): List[Annotation] = anns match {
+ case ann :: rest => if (ann matches cls) anns else dropOtherAnnotations(rest, cls)
+ case Nil => Nil
+ }
+
+ /** The denotation is completed: info is not a lazy type and attributes have defined values */
+ final def isCompleted: Boolean = !myInfo.isInstanceOf[LazyType]
+
+ /** The denotation is in train of being completed */
+ final def isCompleting: Boolean = (myFlags is Touched) && !isCompleted
+
+ /** The completer of this denotation. @pre: Denotation is not yet completed */
+ final def completer: LazyType = myInfo.asInstanceOf[LazyType]
+
+ /** Make sure this denotation is completed */
+ final def ensureCompleted()(implicit ctx: Context): Unit = info
+
+ /** The symbols defined in this class or object.
+ * Careful! This does not force the type, so is compilation order dependent.
+ * This method should be used only in the following circumstances:
+ *
+ * 1. When accessing type parameters or type parameter accessors (both are entered before
+ * completion).
+ * 2. When obtaining the current scope in order to enter, rename or delete something there.
+ * 3. When playing it safe in order not to raise CylicReferences, e.g. for printing things
+ * or taking more efficient shortcuts (e.g. the stillValid test).
+ */
+ final def unforcedDecls(implicit ctx: Context): Scope = myInfo match {
+ case cinfo: LazyType =>
+ val knownDecls = cinfo.decls
+ if (knownDecls ne EmptyScope) knownDecls
+ else { completeFrom(cinfo); unforcedDecls } // complete-once
+ case _ => info.decls
+ }
+
+ /** If this is a package class, the symbols entered in it
+ * before it is completed. (this is needed to eagerly enter synthetic
+ * aliases such as AnyRef into a package class without forcing it.
+ * Right now, the only usage is for the AnyRef alias in Definitions.
+ */
+ final private[core] def currentPackageDecls(implicit ctx: Context): MutableScope = myInfo match {
+ case pinfo: SymbolLoaders # PackageLoader => pinfo.currentDecls
+ case _ => unforcedDecls.openForMutations
+ }
+
+ // ------ Names ----------------------------------------------
+
+ /** The expanded name of this denotation. */
+ final def expandedName(implicit ctx: Context) =
+ if (is(ExpandedName) || isConstructor) name
+ else {
+ def legalize(name: Name): Name = // JVM method names may not contain `<' or `>' characters
+ if (is(Method)) name.replace('<', '(').replace('>', ')') else name
+ legalize(name.expandedName(initial.owner))
+ }
+ // need to use initial owner to disambiguate, as multiple private symbols with the same name
+ // might have been moved from different origins into the same class
+
+ /** The name with which the denoting symbol was created */
+ final def originalName(implicit ctx: Context) = {
+ val d = initial
+ if (d is ExpandedName) d.name.unexpandedName else d.name // !!!DEBUG, was: effectiveName
+ }
+
+ /** The encoded full path name of this denotation, where outer names and inner names
+ * are separated by `separator` strings.
+ * Never translates expansions of operators back to operator symbol.
+ * Drops package objects. Represents terms in the owner chain by a simple `~`.
+ * (Note: scalac uses nothing to represent terms, which can cause name clashes
+ * between same-named definitions in different enclosing methods. Before this commit
+ * we used `$' but this can cause ambiguities with the class separator '$').
+ * A separator "" means "flat name"; the real separator in this case is "$" and
+ * enclosing packages do not form part of the name.
+ */
+ def fullNameSeparated(separator: String)(implicit ctx: Context): Name = {
+ var sep = separator
+ var stopAtPackage = false
+ if (sep.isEmpty) {
+ sep = "$"
+ stopAtPackage = true
+ }
+ if (symbol == NoSymbol ||
+ owner == NoSymbol ||
+ owner.isEffectiveRoot ||
+ stopAtPackage && owner.is(PackageClass)) name
+ else {
+ var encl = owner
+ while (!encl.isClass && !encl.isPackageObject) {
+ encl = encl.owner
+ sep += "~"
+ }
+ if (owner.is(ModuleClass, butNot = Package) && sep == "$") sep = "" // duplicate scalac's behavior: don't write a double '$$' for module class members.
+ val fn = encl.fullNameSeparated(separator) ++ sep ++ name
+ if (isType) fn.toTypeName else fn.toTermName
+ }
+ }
+
+ /** The encoded flat name of this denotation, where joined names are separated by `separator` characters. */
+ def flatName(implicit ctx: Context): Name = fullNameSeparated("")
+
+ /** `fullName` where `.' is the separator character */
+ def fullName(implicit ctx: Context): Name = fullNameSeparated(".")
+
+ // ----- Tests -------------------------------------------------
+
+ /** Is this denotation a type? */
+ override def isType: Boolean = name.isTypeName
+
+ /** Is this denotation a class? */
+ final def isClass: Boolean = isInstanceOf[ClassDenotation]
+
+ /** Is this denotation a non-trait class? */
+ final def isRealClass(implicit ctx: Context) = isClass && !is(Trait)
+
+ /** Cast to class denotation */
+ final def asClass: ClassDenotation = asInstanceOf[ClassDenotation]
+
+ /** is this symbol the result of an erroneous definition? */
+ def isError: Boolean = false
+
+ /** Make denotation not exist */
+ final def markAbsent(): Unit =
+ myInfo = NoType
+
+ /** Is symbol known to not exist? */
+ final def isAbsent(implicit ctx: Context): Boolean =
+ myInfo == NoType ||
+ (this is (ModuleVal, butNot = Package)) && moduleClass.isAbsent
+
+ /** Is this symbol the root class or its companion object? */
+ final def isRoot: Boolean =
+ (name.toTermName == nme.ROOT || name == nme.ROOTPKG) && (owner eq NoSymbol)
+
+ /** Is this symbol the empty package class or its companion object? */
+ final def isEmptyPackage(implicit ctx: Context): Boolean =
+ name.toTermName == nme.EMPTY_PACKAGE && owner.isRoot
+
+ /** Is this symbol the empty package class or its companion object? */
+ final def isEffectiveRoot(implicit ctx: Context) = isRoot || isEmptyPackage
+
+ /** Is this symbol an anonymous class? */
+ final def isAnonymousClass(implicit ctx: Context): Boolean =
+ isClass && (initial.name startsWith tpnme.ANON_CLASS)
+
+ final def isAnonymousFunction(implicit ctx: Context) =
+ this.symbol.is(Method) && (initial.name startsWith nme.ANON_FUN)
+
+ final def isAnonymousModuleVal(implicit ctx: Context) =
+ this.symbol.is(ModuleVal) && (initial.name startsWith nme.ANON_CLASS)
+
+ /** Is this a companion class method or companion object method?
+ * These methods are generated by Symbols#synthesizeCompanionMethod
+ * and used in SymDenotations#companionClass and
+ * SymDenotations#companionModule .
+ */
+ final def isCompanionMethod(implicit ctx: Context) =
+ name.toTermName == nme.COMPANION_CLASS_METHOD ||
+ name.toTermName == nme.COMPANION_MODULE_METHOD
+
+ /** Is this a syntetic method that represents conversions between representations of a value class
+ * These methods are generated in ExtensionMethods
+ * and used in ElimErasedValueType.
+ */
+ final def isValueClassConvertMethod(implicit ctx: Context) =
+ name.toTermName == nme.U2EVT ||
+ name.toTermName == nme.EVT2U
+
+ /** Is symbol a primitive value class? */
+ def isPrimitiveValueClass(implicit ctx: Context) =
+ maybeOwner == defn.ScalaPackageClass && defn.ScalaValueClasses().contains(symbol)
+
+ /** Is symbol a primitive numeric value class? */
+ def isNumericValueClass(implicit ctx: Context) =
+ maybeOwner == defn.ScalaPackageClass && defn.ScalaNumericValueClasses().contains(symbol)
+
+ /** Is symbol a phantom class for which no runtime representation exists? */
+ def isPhantomClass(implicit ctx: Context) = defn.PhantomClasses contains symbol
+
+ /** Is this symbol a class representing a refinement? These classes
+ * are used only temporarily in Typer and Unpickler as an intermediate
+ * step for creating Refinement types.
+ */
+ final def isRefinementClass(implicit ctx: Context): Boolean =
+ name.decode == tpnme.REFINE_CLASS
+
+ /** Is this symbol a package object or its module class? */
+ def isPackageObject(implicit ctx: Context): Boolean = {
+ val poName = if (isType) nme.PACKAGE_CLS else nme.PACKAGE
+ (name.toTermName == poName) && (owner is Package) && (this is Module)
+ }
+
+ /** Is this symbol an abstract type? */
+ final def isAbstractType(implicit ctx: Context) = isType && (this is Deferred)
+
+ /** Is this symbol an alias type? */
+ final def isAliasType(implicit ctx: Context) = isAbstractOrAliasType && !(this is Deferred)
+
+ /** Is this symbol an abstract or alias type? */
+ final def isAbstractOrAliasType = isType & !isClass
+
+ /** Is this the denotation of a self symbol of some class?
+ * This is the case if one of two conditions holds:
+ * 1. It is the symbol referred to in the selfInfo part of the ClassInfo
+ * which is the type of this symbol's owner.
+ * 2. This symbol is owned by a class, it's selfInfo field refers to a type
+ * (indicating the self definition does not introduce a name), and the
+ * symbol's name is "_".
+ * TODO: Find a more robust way to characterize self symbols, maybe by
+ * spending a Flag on them?
+ */
+ final def isSelfSym(implicit ctx: Context) = owner.infoOrCompleter match {
+ case ClassInfo(_, _, _, _, selfInfo) =>
+ selfInfo == symbol ||
+ selfInfo.isInstanceOf[Type] && name == nme.WILDCARD
+ case _ => false
+ }
+
+ /** Is this definition contained in `boundary`?
+ * Same as `ownersIterator contains boundary` but more efficient.
+ */
+ final def isContainedIn(boundary: Symbol)(implicit ctx: Context): Boolean = {
+ def recur(sym: Symbol): Boolean =
+ if (sym eq boundary) true
+ else if (sym eq NoSymbol) false
+ else if ((sym is PackageClass) && !(boundary is PackageClass)) false
+ else recur(sym.owner)
+ recur(symbol)
+ }
+
+ final def isProperlyContainedIn(boundary: Symbol)(implicit ctx: Context): Boolean =
+ symbol != boundary && isContainedIn(boundary)
+
+ /** Is this denotation static (i.e. with no outer instance)? */
+ final def isStatic(implicit ctx: Context) =
+ (this is JavaStatic) || this.exists && owner.isStaticOwner || this.isRoot
+
+ /** Is this a package class or module class that defines static symbols? */
+ final def isStaticOwner(implicit ctx: Context): Boolean =
+ (this is PackageClass) || (this is ModuleClass) && isStatic
+
+ /** Is this denotation defined in the same scope and compilation unit as that symbol? */
+ final def isCoDefinedWith(that: Symbol)(implicit ctx: Context) =
+ (this.effectiveOwner == that.effectiveOwner) &&
+ ( !(this.effectiveOwner is PackageClass)
+ || this.isAbsent || that.isAbsent
+ || { // check if they are defined in the same file(or a jar)
+ val thisFile = this.symbol.associatedFile
+ val thatFile = that.symbol.associatedFile
+ ( thisFile == null
+ || thatFile == null
+ || thisFile.path == thatFile.path // Cheap possibly wrong check, then expensive normalization
+ || thisFile.canonicalPath == thatFile.canonicalPath
+ )
+ }
+ )
+
+ /** Is this a denotation of a stable term (or an arbitrary type)? */
+ final def isStable(implicit ctx: Context) =
+ isType || is(Stable) || !(is(UnstableValue) || info.isInstanceOf[ExprType])
+
+ /** Is this a "real" method? A real method is a method which is:
+ * - not an accessor
+ * - not a label
+ * - not an anonymous function
+ * - not a companion method
+ */
+ final def isRealMethod(implicit ctx: Context) =
+ this.is(Method, butNot = AccessorOrLabel) &&
+ !isAnonymousFunction &&
+ !isCompanionMethod
+
+ /** Is this a getter? */
+ final def isGetter(implicit ctx: Context) =
+ (this is Accessor) && !originalName.isSetterName && !originalName.isScala2LocalSuffix
+
+ /** Is this a setter? */
+ final def isSetter(implicit ctx: Context) =
+ (this is Accessor) &&
+ originalName.isSetterName &&
+ (!isCompleted || info.firstParamTypes.nonEmpty) // to avoid being fooled by var x_= : Unit = ...
+
+ /** is this a symbol representing an import? */
+ final def isImport = name == nme.IMPORT
+
+ /** is this the constructor of a class? */
+ final def isClassConstructor = name == nme.CONSTRUCTOR
+
+ /** Is this the constructor of a trait? */
+ final def isImplClassConstructor = name == nme.TRAIT_CONSTRUCTOR
+
+ /** Is this the constructor of a trait or a class */
+ final def isConstructor = name.isConstructorName
+
+ /** Is this a local template dummmy? */
+ final def isLocalDummy: Boolean = name.isLocalDummyName
+
+ /** Does this symbol denote the primary constructor of its enclosing class? */
+ final def isPrimaryConstructor(implicit ctx: Context) =
+ isConstructor && owner.primaryConstructor == symbol
+
+ /** Does this symbol denote the static constructor of its enclosing class? */
+ final def isStaticConstructor(implicit ctx: Context) =
+ name.isStaticConstructorName
+
+ /** Is this a subclass of the given class `base`? */
+ def isSubClass(base: Symbol)(implicit ctx: Context) = false
+
+ /** Is this a subclass of `base`,
+ * and is the denoting symbol also different from `Null` or `Nothing`?
+ * @note erroneous classes are assumed to derive from all other classes
+ * and all classes derive from them.
+ */
+ def derivesFrom(base: Symbol)(implicit ctx: Context) = false
+
+ /** Is this symbol a class that extends `AnyVal`? */
+ final def isValueClass(implicit ctx: Context): Boolean = {
+ val di = initial
+ di.isClass &&
+ di.derivesFrom(defn.AnyValClass)(ctx.withPhase(di.validFor.firstPhaseId))
+ // We call derivesFrom at the initial phase both because AnyVal does not exist
+ // after Erasure and to avoid cyclic references caused by forcing denotations
+ }
+
+ /** Is this symbol a class references to which that are supertypes of null? */
+ final def isNullableClass(implicit ctx: Context): Boolean =
+ isClass && !isValueClass && !(this is ModuleClass) && symbol != defn.NothingClass
+
+ /** Is this definition accessible as a member of tree with type `pre`?
+ * @param pre The type of the tree from which the selection is made
+ * @param superAccess Access is via super
+ * Everything is accessible if `pre` is `NoPrefix`.
+ * A symbol with type `NoType` is not accessible for any other prefix.
+ */
+ final def isAccessibleFrom(pre: Type, superAccess: Boolean = false, whyNot: StringBuffer = null)(implicit ctx: Context): Boolean = {
+
+ /** Are we inside definition of `boundary`? */
+ def accessWithin(boundary: Symbol) =
+ ctx.owner.isContainedIn(boundary) &&
+ (!(this is JavaDefined) || // disregard package nesting for Java
+ ctx.owner.enclosingPackageClass == boundary.enclosingPackageClass)
+
+ /** Are we within definition of linked class of `boundary`? */
+ def accessWithinLinked(boundary: Symbol) = {
+ val linked = boundary.linkedClass
+ (linked ne NoSymbol) && accessWithin(linked)
+ }
+
+ /** Is `pre` the same as C.thisThis, where C is exactly the owner of this symbol,
+ * or, if this symbol is protected, a subclass of the owner?
+ */
+ def isCorrectThisType(pre: Type): Boolean = pre match {
+ case pre: ThisType =>
+ (pre.cls eq owner) || (this is Protected) && pre.cls.derivesFrom(owner)
+ case pre: TermRef =>
+ pre.symbol.moduleClass == owner
+ case _ =>
+ false
+ }
+
+ /** Is protected access to target symbol permitted? */
+ def isProtectedAccessOK = {
+ def fail(str: => String): Boolean = {
+ if (whyNot != null) whyNot append str
+ false
+ }
+ val cls = owner.enclosingSubClass
+ if (!cls.exists)
+ fail(
+ i"""
+ | Access to protected $this not permitted because enclosing ${ctx.owner.enclosingClass.showLocated}
+ | is not a subclass of ${owner.showLocated} where target is defined""")
+ else if (
+ !( isType // allow accesses to types from arbitrary subclasses fixes #4737
+ || pre.baseTypeRef(cls).exists // ??? why not use derivesFrom ???
+ || isConstructor
+ || (owner is ModuleClass) // don't perform this check for static members
+ ))
+ fail(
+ i"""
+ | Access to protected ${symbol.show} not permitted because prefix type ${pre.widen.show}
+ | does not conform to ${cls.showLocated} where the access takes place""")
+ else true
+ }
+
+ if (pre eq NoPrefix) true
+ else if (info eq NoType) false
+ else {
+ val boundary = accessBoundary(owner)
+
+ ( boundary.isTerm
+ || boundary.isRoot
+ || (accessWithin(boundary) || accessWithinLinked(boundary)) &&
+ ( !(this is Local)
+ || (owner is ImplClass) // allow private local accesses to impl class members
+ || isCorrectThisType(pre)
+ )
+ || (this is Protected) &&
+ ( superAccess
+ || pre.isInstanceOf[ThisType]
+ || ctx.phase.erasedTypes
+ || isProtectedAccessOK
+ )
+ )
+ }
+ }
+
+ /** Do members of this symbol need translation via asSeenFrom when
+ * accessed via prefix `pre`?
+ */
+ def membersNeedAsSeenFrom(pre: Type)(implicit ctx: Context) =
+ !( this.isTerm
+ || this.isStaticOwner
+ || ctx.erasedTypes
+ || (pre eq NoPrefix) || (pre eq thisType)
+ )
+
+ /** Is this symbol concrete, or that symbol deferred? */
+ def isAsConcrete(that: Symbol)(implicit ctx: Context): Boolean =
+ !(this is Deferred) || (that is Deferred)
+
+ /** Does this symbol have defined or inherited default parameters? */
+ def hasDefaultParams(implicit ctx: Context): Boolean =
+ if (this is HasDefaultParams) true
+ else if (this is NoDefaultParams) false
+ else {
+ val result = allOverriddenSymbols exists (_.hasDefaultParams)
+ setFlag(if (result) InheritedDefaultParams else NoDefaultParams)
+ result
+ }
+
+ /** Symbol is an owner that would be skipped by effectiveOwner. Skipped are
+ * - package objects
+ * - labels
+ * - non-lazy valdefs
+ */
+ def isWeakOwner(implicit ctx: Context): Boolean =
+ isPackageObject ||
+ isTerm && !is(MethodOrLazy, butNot = Label) && !isLocalDummy
+
+ // def isOverridable: Boolean = !!! need to enforce that classes cannot be redefined
+ def isSkolem: Boolean = name == nme.SKOLEM
+
+ def isInlineMethod(implicit ctx: Context): Boolean = is(InlineMethod, butNot = Accessor)
+
+ // ------ access to related symbols ---------------------------------
+
+ /* Modules and module classes are represented as follows:
+ *
+ * object X extends Y { def f() }
+ *
+ * <module> lazy val X: X$ = new X$
+ * <module> class X$ extends Y { this: X.type => def f() }
+ *
+ * During completion, references to moduleClass and sourceModules are stored in
+ * the completers.
+ */
+ /** The class implementing this module, NoSymbol if not applicable. */
+ final def moduleClass(implicit ctx: Context): Symbol = {
+ def notFound = { println(s"missing module class for $name: $myInfo"); NoSymbol }
+ if (this is ModuleVal)
+ myInfo match {
+ case info: TypeRef => info.symbol
+ case ExprType(info: TypeRef) => info.symbol // needed after uncurry, when module terms might be accessor defs
+ case info: LazyType => info.moduleClass
+ case t: MethodType =>
+ t.resultType match {
+ case info: TypeRef => info.symbol
+ case _ => notFound
+ }
+ case _ => notFound
+ }
+ else NoSymbol
+ }
+
+ /** The module implemented by this module class, NoSymbol if not applicable. */
+ final def sourceModule(implicit ctx: Context): Symbol = myInfo match {
+ case ClassInfo(_, _, _, _, selfType) if this is ModuleClass =>
+ selfType match {
+ case selfType: TermRef => selfType.symbol
+ case selfType: Symbol => selfType.info.asInstanceOf[TermRef].symbol
+ }
+ case info: LazyType =>
+ info.sourceModule
+ case _ =>
+ NoSymbol
+ }
+
+ /** The field accessed by this getter or setter, or if it does not exist, the getter */
+ def accessedFieldOrGetter(implicit ctx: Context): Symbol = {
+ val fieldName = if (isSetter) name.asTermName.getterName else name
+ val d = owner.info.decl(fieldName)
+ val field = d.suchThat(!_.is(Method)).symbol
+ def getter = d.suchThat(_.info.isParameterless).symbol
+ field orElse getter
+ }
+
+ /** The field accessed by a getter or setter, or
+ * if it does not exists, the getter of a setter, or
+ * if that does not exist the symbol itself.
+ */
+ def underlyingSymbol(implicit ctx: Context): Symbol =
+ if (is(Accessor)) accessedFieldOrGetter orElse symbol else symbol
+
+ /** The chain of owners of this denotation, starting with the denoting symbol itself */
+ final def ownersIterator(implicit ctx: Context) = new Iterator[Symbol] {
+ private[this] var current = symbol
+ def hasNext = current.exists
+ def next: Symbol = {
+ val result = current
+ current = current.owner
+ result
+ }
+ }
+
+ /** If this is a weak owner, its owner, otherwise the denoting symbol. */
+ final def skipWeakOwner(implicit ctx: Context): Symbol =
+ if (isWeakOwner) owner.skipWeakOwner else symbol
+
+ /** The owner, skipping package objects, labels and non-lazy valdefs. */
+ final def effectiveOwner(implicit ctx: Context) = owner.skipWeakOwner
+
+ /** The class containing this denotation.
+ * If this denotation is already a class, return itself
+ * Definitions flagged with InSuperCall are treated specially.
+ * Their enclosing class is not the lexically enclosing class,
+ * but in turn the enclosing class of the latter. This reflects
+ * the context created by `Context#superCallContext`, `Context#thisCallArgContext`
+ * for these definitions.
+ *
+ * Note, that as packages have ClassSymbols, top level classes will have an `enclosingClass`
+ * with Package flag set.
+ */
+ final def enclosingClass(implicit ctx: Context): Symbol = {
+ def enclClass(sym: Symbol, skip: Boolean): Symbol = {
+ def newSkip = sym.is(InSuperCall) || sym.is(JavaStaticTerm)
+ if (!sym.exists)
+ NoSymbol
+ else if (sym.isClass)
+ if (skip) enclClass(sym.owner, newSkip) else sym
+ else
+ enclClass(sym.owner, skip || newSkip)
+ }
+ enclClass(symbol, false)
+ }
+
+ /** A class that in source code would be lexically enclosing */
+ final def lexicallyEnclosingClass(implicit ctx: Context): Symbol =
+ if (!exists || isClass) symbol else owner.lexicallyEnclosingClass
+
+ /** A symbol is effectively final if it cannot be overridden in a subclass */
+ final def isEffectivelyFinal(implicit ctx: Context): Boolean =
+ is(PrivateOrFinalOrInline) || !owner.isClass || owner.is(ModuleOrFinal) || owner.isAnonymousClass
+
+ /** The class containing this denotation which has the given effective name. */
+ final def enclosingClassNamed(name: Name)(implicit ctx: Context): Symbol = {
+ val cls = enclosingClass
+ if (cls.effectiveName == name || !cls.exists) cls else cls.owner.enclosingClassNamed(name)
+ }
+
+ /** The closest enclosing method containing this definition.
+ * A local dummy owner is mapped to the primary constructor of the class.
+ */
+ final def enclosingMethod(implicit ctx: Context): Symbol =
+ if (this is (Method, butNot = Label)) symbol
+ else if (this.isClass) primaryConstructor
+ else if (this.exists) owner.enclosingMethod
+ else NoSymbol
+
+ /** The top-level class containing this denotation,
+ * except for a toplevel module, where its module class is returned.
+ */
+ final def topLevelClass(implicit ctx: Context): Symbol = {
+ def topLevel(d: SymDenotation): Symbol = {
+ if (d.isEffectiveRoot || (d is PackageClass) || (d.owner is PackageClass)) d.symbol
+ else topLevel(d.owner)
+ }
+ val sym = topLevel(this)
+ if (sym.isClass) sym else sym.moduleClass
+ }
+
+ /** The package class containing this denotation */
+ final def enclosingPackageClass(implicit ctx: Context): Symbol =
+ if (this is PackageClass) symbol else owner.enclosingPackageClass
+
+ /** The module object with the same (term-) name as this class or module class,
+ * and which is also defined in the same scope and compilation unit.
+ * NoSymbol if this module does not exist.
+ */
+ final def companionModule(implicit ctx: Context): Symbol = {
+ if (this.flagsUNSAFE is Flags.Module) this.sourceModule
+ else {
+ val companionMethod = info.decls.denotsNamed(nme.COMPANION_MODULE_METHOD, selectPrivate).first
+ if (companionMethod.exists)
+ companionMethod.info.resultType.classSymbol.sourceModule
+ else
+ NoSymbol
+ }
+ }
+
+
+ /** The class with the same (type-) name as this module or module class,
+ * and which is also defined in the same scope and compilation unit.
+ * NoSymbol if this class does not exist.
+ */
+ final def companionClass(implicit ctx: Context): Symbol = {
+ val companionMethod = info.decls.denotsNamed(nme.COMPANION_CLASS_METHOD, selectPrivate).first
+
+ if (companionMethod.exists)
+ companionMethod.info.resultType.classSymbol
+ else
+ NoSymbol
+ }
+
+ final def scalacLinkedClass(implicit ctx: Context): Symbol =
+ if (this is ModuleClass) companionNamed(effectiveName.toTypeName)
+ else if (this.isClass) companionNamed(effectiveName.moduleClassName).sourceModule.moduleClass
+ else NoSymbol
+
+
+ /** Find companion class symbol with given name, or NoSymbol if none exists.
+ * Three alternative strategies:
+ * 1. If owner is a class, look in its members, otherwise
+ * 2. If current compilation unit has a typed tree,
+ * determine the defining statement sequence and search its trees, otherwise
+ * 3. If context has an enclosing scope which defines this symbol,
+ * lookup its companion in the same scope.
+ */
+ private def companionNamed(name: TypeName)(implicit ctx: Context): Symbol =
+ if (owner.isClass)
+ owner.info.decl(name).suchThat(_.isCoDefinedWith(symbol)).symbol
+ else if (!owner.exists || ctx.compilationUnit == null)
+ NoSymbol
+ else if (!ctx.compilationUnit.tpdTree.isEmpty)
+ tpd.definingStats(symbol).iterator
+ .map(tpd.definedSym)
+ .find(_.name == name)
+ .getOrElse(NoSymbol)
+ else if (ctx.scope == null)
+ NoSymbol
+ else if (ctx.scope.lookup(this.name) == symbol)
+ ctx.scope.lookup(name)
+ else
+ companionNamed(name)(ctx.outersIterator.dropWhile(_.scope eq ctx.scope).next)
+
+ /** If this is a class, the module class of its companion object.
+ * If this is a module class, its companion class.
+ * NoSymbol otherwise.
+ */
+ final def linkedClass(implicit ctx: Context): Symbol =
+ if (this is ModuleClass) companionClass
+ else if (this.isClass) companionModule.moduleClass
+ else NoSymbol
+
+ /** The class that encloses the owner of the current context
+ * and that is a subclass of this class. NoSymbol if no such class exists.
+ */
+ final def enclosingSubClass(implicit ctx: Context) =
+ ctx.owner.ownersIterator.findSymbol(_.isSubClass(symbol))
+
+ /** The non-private symbol whose name and type matches the type of this symbol
+ * in the given class.
+ * @param inClass The class containing the result symbol's definition
+ * @param site The base type from which member types are computed
+ *
+ * inClass <-- find denot.symbol class C { <-- symbol is here
+ *
+ * site: Subtype of both inClass and C
+ */
+ final def matchingDecl(inClass: Symbol, site: Type)(implicit ctx: Context): Symbol = {
+ var denot = inClass.info.nonPrivateDecl(name)
+ if (denot.isTerm) // types of the same name always match
+ denot = denot.matchingDenotation(site, site.memberInfo(symbol))
+ denot.symbol
+ }
+
+ /** The non-private member of `site` whose name and type matches the type of this symbol
+ */
+ final def matchingMember(site: Type)(implicit ctx: Context): Symbol = {
+ var denot = site.nonPrivateMember(name)
+ if (denot.isTerm) // types of the same name always match
+ denot = denot.matchingDenotation(site, site.memberInfo(symbol))
+ denot.symbol
+ }
+
+ /** If false, this symbol cannot possibly participate in an override,
+ * either as overrider or overridee.
+ */
+ final def canMatchInheritedSymbols(implicit ctx: Context): Boolean =
+ maybeOwner.isClass && memberCanMatchInheritedSymbols
+
+ /** If false, this class member cannot possibly participate in an override,
+ * either as overrider or overridee.
+ */
+ final def memberCanMatchInheritedSymbols(implicit ctx: Context): Boolean =
+ !isConstructor && !is(Private)
+
+ /** The symbol, in class `inClass`, that is overridden by this denotation. */
+ final def overriddenSymbol(inClass: ClassSymbol)(implicit ctx: Context): Symbol =
+ if (!canMatchInheritedSymbols && (owner ne inClass)) NoSymbol
+ else matchingDecl(inClass, owner.thisType)
+
+ /** All symbols overriden by this denotation. */
+ final def allOverriddenSymbols(implicit ctx: Context): Iterator[Symbol] =
+ if (!canMatchInheritedSymbols) Iterator.empty
+ else overriddenFromType(owner.info)
+
+ /** Returns all matching symbols defined in parents of the selftype. */
+ final def extendedOverriddenSymbols(implicit ctx: Context): Iterator[Symbol] =
+ if (!canMatchInheritedSymbols) Iterator.empty
+ else overriddenFromType(owner.asClass.classInfo.selfType)
+
+ private def overriddenFromType(tp: Type)(implicit ctx: Context): Iterator[Symbol] =
+ tp.baseClasses.tail.iterator map overriddenSymbol filter (_.exists)
+
+ /** The symbol overriding this symbol in given subclass `ofclazz`.
+ *
+ * @param ofclazz is a subclass of this symbol's owner
+ */
+ final def overridingSymbol(inClass: ClassSymbol)(implicit ctx: Context): Symbol =
+ if (canMatchInheritedSymbols) matchingDecl(inClass, inClass.thisType)
+ else NoSymbol
+
+ /** The symbol accessed by a super in the definition of this symbol when
+ * seen from class `base`. This symbol is always concrete.
+ * pre: `this.owner` is in the base class sequence of `base`.
+ */
+ final def superSymbolIn(base: Symbol)(implicit ctx: Context): Symbol = {
+ def loop(bcs: List[ClassSymbol]): Symbol = bcs match {
+ case bc :: bcs1 =>
+ val sym = matchingDecl(bcs.head, base.thisType)
+ .suchThat(alt => !(alt is Deferred)).symbol
+ if (sym.exists) sym else loop(bcs.tail)
+ case _ =>
+ NoSymbol
+ }
+ loop(base.info.baseClasses.dropWhile(owner != _).tail)
+ }
+
+ /** A member of class `base` is incomplete if
+ * (1) it is declared deferred or
+ * (2) it is abstract override and its super symbol in `base` is
+ * nonexistent or incomplete.
+ */
+ final def isIncompleteIn(base: Symbol)(implicit ctx: Context): Boolean =
+ (this is Deferred) ||
+ (this is AbsOverride) && {
+ val supersym = superSymbolIn(base)
+ supersym == NoSymbol || supersym.isIncompleteIn(base)
+ }
+
+ /** The class or term symbol up to which this symbol is accessible,
+ * or RootClass if it is public. As java protected statics are
+ * otherwise completely inaccessible in scala, they are treated
+ * as public.
+ * @param base The access boundary to assume if this symbol is protected
+ */
+ final def accessBoundary(base: Symbol)(implicit ctx: Context): Symbol = {
+ val fs = flags
+ if (fs is Private) owner
+ else if (fs is StaticProtected) defn.RootClass
+ else if (privateWithin.exists && !ctx.phase.erasedTypes) privateWithin
+ else if (fs is Protected) base
+ else defn.RootClass
+ }
+
+ /** The primary constructor of a class or trait, NoSymbol if not applicable. */
+ def primaryConstructor(implicit ctx: Context): Symbol = NoSymbol
+
+ // ----- type-related ------------------------------------------------
+
+ /** The type parameters of a class symbol, Nil for all other symbols */
+ def typeParams(implicit ctx: Context): List[TypeSymbol] = Nil
+
+ /** The named type parameters declared or inherited by this symbol */
+ def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = Set()
+
+ /** The type This(cls), where cls is this class, NoPrefix for all other symbols */
+ def thisType(implicit ctx: Context): Type = NoPrefix
+
+ override def typeRef(implicit ctx: Context): TypeRef =
+ TypeRef(owner.thisType, name.asTypeName, this)
+
+ override def termRef(implicit ctx: Context): TermRef =
+ TermRef(owner.thisType, name.asTermName, this)
+
+ override def valRef(implicit ctx: Context): TermRef =
+ TermRef.withSigAndDenot(owner.thisType, name.asTermName, Signature.NotAMethod, this)
+
+ override def termRefWithSig(implicit ctx: Context): TermRef =
+ TermRef.withSigAndDenot(owner.thisType, name.asTermName, signature, this)
+
+ def nonMemberTermRef(implicit ctx: Context): TermRef =
+ TermRef.withFixedSym(owner.thisType, name.asTermName, symbol.asTerm)
+
+ /** The variance of this type parameter or type member as an Int, with
+ * +1 = Covariant, -1 = Contravariant, 0 = Nonvariant, or not a type parameter
+ */
+ final def variance(implicit ctx: Context): Int =
+ if (this is Covariant) 1
+ else if (this is Contravariant) -1
+ else 0
+
+ /** The flags to be used for a type parameter owned by this symbol.
+ * Overridden by ClassDenotation.
+ */
+ def typeParamCreationFlags: FlagSet = TypeParam
+
+ override def toString = {
+ val kindString =
+ if (myFlags is ModuleClass) "module class"
+ else if (isClass) "class"
+ else if (isType) "type"
+ else if (myFlags is Module) "module"
+ else if (myFlags is Method) "method"
+ else "val"
+ s"$kindString $name"
+ }
+
+ // ----- Sanity checks and debugging */
+
+ def debugString = toString + "#" + symbol.id // !!! DEBUG
+
+ def hasSkolems(tp: Type): Boolean = tp match {
+ case tp: SkolemType => true
+ case tp: NamedType => hasSkolems(tp.prefix)
+ case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo)
+ case tp: RecType => hasSkolems(tp.parent)
+ case tp: PolyType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType)
+ case tp: MethodType => tp.paramTypes.exists(hasSkolems) || hasSkolems(tp.resType)
+ case tp: ExprType => hasSkolems(tp.resType)
+ case tp: HKApply => hasSkolems(tp.tycon) || tp.args.exists(hasSkolems)
+ case tp: AndOrType => hasSkolems(tp.tp1) || hasSkolems(tp.tp2)
+ case tp: TypeBounds => hasSkolems(tp.lo) || hasSkolems(tp.hi)
+ case tp: AnnotatedType => hasSkolems(tp.tpe)
+ case tp: TypeVar => hasSkolems(tp.inst)
+ case _ => false
+ }
+
+ def assertNoSkolems(tp: Type) =
+ if (!this.isSkolem)
+ assert(!hasSkolems(tp), s"assigning type $tp containing skolems to $this")
+
+ // ----- copies and transforms ----------------------------------------
+
+ protected def newLikeThis(s: Symbol, i: Type): SingleDenotation = new UniqueRefDenotation(s, i, validFor)
+
+ /** Copy this denotation, overriding selective fields */
+ final def copySymDenotation(
+ symbol: Symbol = this.symbol,
+ owner: Symbol = this.owner,
+ name: Name = this.name,
+ initFlags: FlagSet = UndefinedFlags,
+ info: Type = null,
+ privateWithin: Symbol = null,
+ annotations: List[Annotation] = null)(implicit ctx: Context) =
+ { // simulate default parameters, while also passing implicit context ctx to the default values
+ val initFlags1 = (if (initFlags != UndefinedFlags) initFlags else this.flags) &~ Frozen
+ val info1 = if (info != null) info else this.info
+ val privateWithin1 = if (privateWithin != null) privateWithin else this.privateWithin
+ val annotations1 = if (annotations != null) annotations else this.annotations
+ val d = ctx.SymDenotation(symbol, owner, name, initFlags1, info1, privateWithin1)
+ d.annotations = annotations1
+ d
+ }
+
+ override def initial: SymDenotation = super.initial.asSymDenotation
+
+ /** Install this denotation as the result of the given denotation transformer. */
+ override def installAfter(phase: DenotTransformer)(implicit ctx: Context): Unit =
+ super.installAfter(phase)
+
+ /** Apply a transformation `f` to all denotations in this group that start at or after
+ * given phase. Denotations are replaced while keeping the same validity periods.
+ */
+ override def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(implicit ctx: Context): Unit =
+ super.transformAfter(phase, f)
+
+ /** If denotation is private, remove the Private flag and expand the name if necessary */
+ def ensureNotPrivate(implicit ctx: Context) =
+ if (is(Private))
+ copySymDenotation(
+ name = expandedName,
+ initFlags = this.flags &~ Private | ExpandedName)
+ else this
+ }
+
+ /** The contents of a class definition during a period
+ */
+ class ClassDenotation private[SymDenotations] (
+ symbol: Symbol,
+ ownerIfExists: Symbol,
+ name: Name,
+ initFlags: FlagSet,
+ initInfo: Type,
+ initPrivateWithin: Symbol,
+ initRunId: RunId)
+ extends SymDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin) {
+
+ import util.LRUCache
+
+ // ----- denotation fields and accessors ------------------------------
+
+ if (initFlags is (Module, butNot = Package)) assert(name.isModuleClassName, s"module naming inconsistency: $name")
+
+ /** The symbol asserted to have type ClassSymbol */
+ def classSymbol: ClassSymbol = symbol.asInstanceOf[ClassSymbol]
+
+ /** The info asserted to have type ClassInfo */
+ def classInfo(implicit ctx: Context): ClassInfo = info.asInstanceOf[ClassInfo]
+
+ /** TODO: Document why caches are supposedly safe to use */
+ private[this] var myTypeParams: List[TypeSymbol] = _
+
+ private[this] var myNamedTypeParams: Set[TypeSymbol] = _
+
+ /** The type parameters in this class, in the order they appear in the current
+ * scope `decls`. This might be temporarily the incorrect order when
+ * reading Scala2 pickled info. The problem is fixed by `updateTypeParams`
+ * which is called once an unpickled symbol has been completed.
+ */
+ private def typeParamsFromDecls(implicit ctx: Context) =
+ unforcedDecls.filter(sym =>
+ (sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]]
+
+ /** The type parameters of this class */
+ override final def typeParams(implicit ctx: Context): List[TypeSymbol] = {
+ if (myTypeParams == null)
+ myTypeParams =
+ if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls
+ else {
+ val di = initial
+ if (this ne di) di.typeParams
+ else infoOrCompleter match {
+ case info: TypeParamsCompleter => info.completerTypeParams(symbol)
+ case _ => typeParamsFromDecls
+ }
+ }
+ myTypeParams
+ }
+
+ /** The named type parameters declared or inherited by this class */
+ override final def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = {
+ def computeNamedTypeParams: Set[TypeSymbol] =
+ if (ctx.erasedTypes || is(Module)) Set() // fast return for modules to avoid scanning package decls
+ else memberNames(abstractTypeNameFilter).map(name =>
+ info.member(name).symbol.asType).filter(_.is(TypeParam, butNot = ExpandedName)).toSet
+ if (myNamedTypeParams == null) myNamedTypeParams = computeNamedTypeParams
+ myNamedTypeParams
+ }
+
+ override protected[dotc] final def info_=(tp: Type) = {
+ super.info_=(tp)
+ myTypeParams = null // changing the info might change decls, and with it typeParams
+ }
+
+ /** The denotations of all parents in this class. */
+ def classParents(implicit ctx: Context): List[TypeRef] = info match {
+ case classInfo: ClassInfo => classInfo.classParents
+ case _ => Nil
+ }
+
+ /** The symbol of the superclass, NoSymbol if no superclass exists */
+ def superClass(implicit ctx: Context): Symbol = classParents match {
+ case parent :: _ =>
+ val cls = parent.classSymbol
+ if (cls is Trait) NoSymbol else cls
+ case _ =>
+ NoSymbol
+ }
+
+ /** The denotation is fully completed: all attributes are fully defined.
+ * ClassDenotations compiled from source are first completed, then fully completed.
+ * Packages are never fully completed since members can be added at any time.
+ * @see Namer#ClassCompleter
+ */
+ private def isFullyCompleted(implicit ctx: Context): Boolean = {
+ def isFullyCompletedRef(tp: TypeRef) = tp.denot match {
+ case d: ClassDenotation => d.isFullyCompleted
+ case _ => false
+ }
+ def testFullyCompleted =
+ if (classParents.isEmpty) !is(Package) && symbol.eq(defn.AnyClass)
+ else classParents.forall(isFullyCompletedRef)
+ flagsUNSAFE.is(FullyCompleted) ||
+ isCompleted && testFullyCompleted && { setFlag(FullyCompleted); true }
+ }
+
+ // ------ syncing inheritance-related info -----------------------------
+
+ private var firstRunId: RunId = initRunId
+
+ /** invalidate caches influenced by parent classes if one of the parents
+ * is younger than the denotation itself.
+ */
+ override def syncWithParents(implicit ctx: Context): SingleDenotation = {
+ def isYounger(tref: TypeRef) = tref.symbol.denot match {
+ case denot: ClassDenotation =>
+ if (denot.validFor.runId < ctx.runId) denot.current // syncs with its parents in turn
+ val result = denot.firstRunId > this.firstRunId
+ if (result) incremental.println(s"$denot is younger than $this")
+ result
+ case _ => false
+ }
+ val parentIsYounger = (firstRunId < ctx.runId) && {
+ infoOrCompleter match {
+ case cinfo: ClassInfo => cinfo.classParents exists isYounger
+ case _ => false
+ }
+ }
+ if (parentIsYounger) {
+ incremental.println(s"parents of $this are invalid; symbol id = ${symbol.id}, copying ...\n")
+ invalidateInheritedInfo()
+ }
+ firstRunId = ctx.runId
+ this
+ }
+
+ /** Invalidate all caches and fields that depend on base classes and their contents */
+ override def invalidateInheritedInfo(): Unit = {
+ myBaseClasses = null
+ mySuperClassBits = null
+ myMemberFingerPrint = FingerPrint.unknown
+ myMemberCache = null
+ myMemberCachePeriod = Nowhere
+ memberNamesCache = SimpleMap.Empty
+ }
+
+ // ------ class-specific operations -----------------------------------
+
+ private[this] var myThisType: Type = null
+
+ /** The this-type depends on the kind of class:
+ * - for a package class `p`: ThisType(TypeRef(Noprefix, p))
+ * - for a module class `m`: A term ref to m's source module.
+ * - for all other classes `c` with owner `o`: ThisType(TypeRef(o.thisType, c))
+ */
+ override def thisType(implicit ctx: Context): Type = {
+ if (myThisType == null) myThisType = computeThisType
+ myThisType
+ }
+
+ private def computeThisType(implicit ctx: Context): Type =
+ ThisType.raw(
+ TypeRef(if (this is Package) NoPrefix else owner.thisType, symbol.asType))
+/* else {
+ val pre = owner.thisType
+ if (this is Module)
+ if (isMissing(pre)) TermRef(pre, sourceModule.asTerm)
+ else TermRef.withSig(pre, name.sourceModuleName, Signature.NotAMethod)
+ else ThisType.raw(TypeRef(pre, symbol.asType))
+ }
+*/
+ private[this] var myTypeRef: TypeRef = null
+
+ override def typeRef(implicit ctx: Context): TypeRef = {
+ if (myTypeRef == null) myTypeRef = super.typeRef
+ myTypeRef
+ }
+
+ private[this] var myBaseClasses: List[ClassSymbol] = null
+ private[this] var mySuperClassBits: BitSet = null
+
+ /** Invalidate baseTypeRefCache, baseClasses and superClassBits on new run */
+ private def checkBasesUpToDate()(implicit ctx: Context) =
+ if (baseTypeRefValid != ctx.runId) {
+ baseTypeRefCache = new java.util.HashMap[CachedType, Type]
+ myBaseClasses = null
+ mySuperClassBits = null
+ baseTypeRefValid = ctx.runId
+ }
+
+ private def computeBases(implicit ctx: Context): (List[ClassSymbol], BitSet) = {
+ if (myBaseClasses eq Nil) throw CyclicReference(this)
+ myBaseClasses = Nil
+ val seen = new mutable.BitSet
+ val locked = new mutable.BitSet
+ def addBaseClasses(bcs: List[ClassSymbol], to: List[ClassSymbol])
+ : List[ClassSymbol] = bcs match {
+ case bc :: bcs1 =>
+ val bcs1added = addBaseClasses(bcs1, to)
+ val id = bc.superId
+ if (seen contains id) bcs1added
+ else {
+ seen += id
+ bc :: bcs1added
+ }
+ case nil =>
+ to
+ }
+ def addParentBaseClasses(ps: List[Type], to: List[ClassSymbol]): List[ClassSymbol] = ps match {
+ case p :: ps1 =>
+ addParentBaseClasses(ps1, addBaseClasses(p.baseClasses, to))
+ case nil =>
+ to
+ }
+ val bcs = classSymbol :: addParentBaseClasses(classParents, Nil)
+ val scbits = seen.toImmutable
+ if (isFullyCompleted) {
+ myBaseClasses = bcs
+ mySuperClassBits = scbits
+ }
+ else myBaseClasses = null
+ (bcs, scbits)
+ }
+
+ /** A bitset that contains the superId's of all base classes */
+ private def superClassBits(implicit ctx: Context): BitSet =
+ if (classParents.isEmpty) BitSet() // can happen when called too early in Namers
+ else {
+ checkBasesUpToDate()
+ if (mySuperClassBits != null) mySuperClassBits else computeBases._2
+ }
+
+ /** The base classes of this class in linearization order,
+ * with the class itself as first element.
+ */
+ def baseClasses(implicit ctx: Context): List[ClassSymbol] =
+ if (classParents.isEmpty) classSymbol :: Nil // can happen when called too early in Namers
+ else {
+ checkBasesUpToDate()
+ if (myBaseClasses != null) myBaseClasses else computeBases._1
+ }
+
+ final override def derivesFrom(base: Symbol)(implicit ctx: Context): Boolean =
+ !isAbsent &&
+ base.isClass &&
+ ( (symbol eq base)
+ || (superClassBits contains base.superId)
+ || (this is Erroneous)
+ || (base is Erroneous)
+ )
+
+ final override def isSubClass(base: Symbol)(implicit ctx: Context) =
+ derivesFrom(base) ||
+ base.isClass && (
+ (symbol eq defn.NothingClass) ||
+ (symbol eq defn.NullClass) && (base ne defn.NothingClass))
+
+ final override def typeParamCreationFlags = ClassTypeParamCreationFlags
+
+ private[this] var myMemberFingerPrint: FingerPrint = FingerPrint.unknown
+
+ private def computeMemberFingerPrint(implicit ctx: Context): FingerPrint = {
+ var fp = FingerPrint()
+ var e = info.decls.lastEntry
+ while (e != null) {
+ fp.include(e.name)
+ e = e.prev
+ }
+ var ps = classParents
+ while (ps.nonEmpty) {
+ val parent = ps.head.typeSymbol
+ parent.denot match {
+ case parentDenot: ClassDenotation =>
+ fp.include(parentDenot.memberFingerPrint)
+ if (parentDenot.isFullyCompleted) parentDenot.setFlag(Frozen)
+ case _ =>
+ }
+ ps = ps.tail
+ }
+ fp
+ }
+
+ /** A bloom filter for the names of all members in this class.
+ * Makes sense only for parent classes, and should definitely
+ * not be used for package classes because cache never
+ * gets invalidated.
+ */
+ def memberFingerPrint(implicit ctx: Context): FingerPrint =
+ if (myMemberFingerPrint != FingerPrint.unknown) myMemberFingerPrint
+ else {
+ val fp = computeMemberFingerPrint
+ if (isFullyCompleted) myMemberFingerPrint = fp
+ fp
+ }
+
+ private[this] var myMemberCache: LRUCache[Name, PreDenotation] = null
+ private[this] var myMemberCachePeriod: Period = Nowhere
+
+ private def memberCache(implicit ctx: Context): LRUCache[Name, PreDenotation] = {
+ if (myMemberCachePeriod != ctx.period) {
+ myMemberCache = new LRUCache
+ myMemberCachePeriod = ctx.period
+ }
+ myMemberCache
+ }
+
+ /** Enter a symbol in current scope, and future scopes of same denotation.
+ * Note: We require that this does not happen after the first time
+ * someone does a findMember on a subclass.
+ * @param scope The scope in which symbol should be entered.
+ * If this is EmptyScope, the scope is `decls`.
+ */
+ def enter(sym: Symbol, scope: Scope = EmptyScope)(implicit ctx: Context): Unit = {
+ val mscope = scope match {
+ case scope: MutableScope =>
+ // if enter gets a scope as an argument,
+ // than this is a scope that will eventually become decls of this symbol.
+ // And this should only happen if this is first time the scope of symbol
+ // is computed, ie symbol yet has no future.
+ assert(this.nextInRun.validFor.code <= this.validFor.code)
+ scope
+ case _ => unforcedDecls.openForMutations
+ }
+ if (this is PackageClass) {
+ val entry = mscope.lookupEntry(sym.name)
+ if (entry != null) {
+ if (entry.sym == sym) return
+ mscope.unlink(entry)
+ entry.sym.denot = sym.denot // to avoid stale symbols
+ }
+ }
+ enterNoReplace(sym, mscope)
+ val nxt = this.nextInRun
+ if (nxt.validFor.code > this.validFor.code) {
+ this.nextInRun.asSymDenotation.asClass.enter(sym)
+ }
+ }
+
+ /** Enter a symbol in given `scope` without potentially replacing the old copy. */
+ def enterNoReplace(sym: Symbol, scope: MutableScope)(implicit ctx: Context): Unit = {
+ def isUsecase = ctx.docCtx.isDefined && sym.name.show.takeRight(4) == "$doc"
+ require(
+ (sym.denot.flagsUNSAFE is Private) ||
+ !(this is Frozen) ||
+ (scope ne this.unforcedDecls) ||
+ sym.hasAnnotation(defn.ScalaStaticAnnot) ||
+ sym.name.isInlineAccessor ||
+ isUsecase)
+
+ scope.enter(sym)
+
+ if (myMemberFingerPrint != FingerPrint.unknown)
+ myMemberFingerPrint.include(sym.name)
+ if (myMemberCache != null)
+ myMemberCache invalidate sym.name
+ }
+
+ /** Replace symbol `prev` (if defined in current class) by symbol `replacement`.
+ * If `prev` is not defined in current class, do nothing.
+ * @pre `prev` and `replacement` have the same name.
+ */
+ def replace(prev: Symbol, replacement: Symbol)(implicit ctx: Context): Unit = {
+ require(!(this is Frozen))
+ unforcedDecls.openForMutations.replace(prev, replacement)
+ if (myMemberCache != null)
+ myMemberCache invalidate replacement.name
+ }
+
+ /** Delete symbol from current scope.
+ * Note: We require that this does not happen after the first time
+ * someone does a findMember on a subclass.
+ */
+ def delete(sym: Symbol)(implicit ctx: Context) = {
+ require(!(this is Frozen))
+ info.decls.openForMutations.unlink(sym)
+ myMemberFingerPrint = FingerPrint.unknown
+ if (myMemberCache != null) myMemberCache invalidate sym.name
+ }
+
+ /** Make sure the type parameters of this class appear in the order given
+ * by `typeParams` in the scope of the class. Reorder definitions in scope if necessary.
+ */
+ def ensureTypeParamsInCorrectOrder()(implicit ctx: Context): Unit = {
+ val tparams = typeParams
+ if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(tparams)(_.name == _.name)) {
+ val decls = info.decls
+ val decls1 = newScope
+ for (tparam <- typeParams) decls1.enter(decls.lookup(tparam.name))
+ for (sym <- decls) if (!tparams.contains(sym)) decls1.enter(sym)
+ info = classInfo.derivedClassInfo(decls = decls1)
+ myTypeParams = null
+ }
+ }
+
+ /** All members of this class that have the given name.
+ * The elements of the returned pre-denotation all
+ * have existing symbols.
+ */
+ final def membersNamed(name: Name)(implicit ctx: Context): PreDenotation = {
+ val privates = info.decls.denotsNamed(name, selectPrivate)
+ privates union nonPrivateMembersNamed(name).filterDisjoint(privates)
+ }
+
+ /** All non-private members of this class that have the given name.
+ * The elements of the returned pre-denotation all
+ * have existing symbols.
+ * @param inherited The method is called on a parent class from computeNPMembersNamed
+ */
+ final def nonPrivateMembersNamed(name: Name, inherited: Boolean = false)(implicit ctx: Context): PreDenotation = {
+ Stats.record("nonPrivateMembersNamed")
+ if (Config.cacheMembersNamed) {
+ var denots: PreDenotation = memberCache lookup name
+ if (denots == null) {
+ denots = computeNPMembersNamed(name, inherited)
+ if (isFullyCompleted) memberCache.enter(name, denots)
+ } else if (Config.checkCacheMembersNamed) {
+ val denots1 = computeNPMembersNamed(name, inherited)
+ assert(denots.exists == denots1.exists, s"cache inconsistency: cached: $denots, computed $denots1, name = $name, owner = $this")
+ }
+ denots
+ } else computeNPMembersNamed(name, inherited)
+ }
+
+ private[core] def computeNPMembersNamed(name: Name, inherited: Boolean)(implicit ctx: Context): PreDenotation = /*>|>*/ Stats.track("computeNPMembersNamed") /*<|<*/ {
+ if (!inherited ||
+ !Config.useFingerPrints ||
+ (memberFingerPrint contains name)) {
+ Stats.record("computeNPMembersNamed after fingerprint")
+ ensureCompleted()
+ val ownDenots = info.decls.denotsNamed(name, selectNonPrivate)
+ if (debugTrace) // DEBUG
+ println(s"$this.member($name), ownDenots = $ownDenots")
+ def collect(denots: PreDenotation, parents: List[TypeRef]): PreDenotation = parents match {
+ case p :: ps =>
+ val denots1 = collect(denots, ps)
+ p.symbol.denot match {
+ case parentd: ClassDenotation =>
+ denots1 union
+ parentd.nonPrivateMembersNamed(name, inherited = true)
+ .mapInherited(ownDenots, denots1, thisType)
+ case _ =>
+ denots1
+ }
+ case nil =>
+ denots
+ }
+ if (name.isConstructorName) ownDenots
+ else collect(ownDenots, classParents)
+ } else NoDenotation
+ }
+
+ override final def findMember(name: Name, pre: Type, excluded: FlagSet)(implicit ctx: Context): Denotation = {
+ val raw = if (excluded is Private) nonPrivateMembersNamed(name) else membersNamed(name)
+ raw.filterExcluded(excluded).asSeenFrom(pre).toDenot(pre)
+ }
+
+ private[this] var baseTypeRefCache: java.util.HashMap[CachedType, Type] = null
+ private[this] var baseTypeRefValid: RunId = NoRunId
+
+ /** Compute tp.baseTypeRef(this) */
+ final def baseTypeRefOf(tp: Type)(implicit ctx: Context): Type = {
+
+ def foldGlb(bt: Type, ps: List[Type]): Type = ps match {
+ case p :: ps1 => foldGlb(bt & baseTypeRefOf(p), ps1)
+ case _ => bt
+ }
+
+ def inCache(tp: Type) = baseTypeRefCache.containsKey(tp)
+
+ /** We cannot cache:
+ * - type variables which are uninstantiated or whose instances can
+ * change, depending on typerstate.
+ * - types where the underlying type is an ErasedValueType, because
+ * this underlying type will change after ElimErasedValueType,
+ * and this changes subtyping relations. As a shortcut, we do not
+ * cache ErasedValueType at all.
+ */
+ def isCachable(tp: Type): Boolean = tp match {
+ case _: TypeErasure.ErasedValueType => false
+ case tp: TypeRef if tp.symbol.isClass => true
+ case tp: TypeVar => tp.inst.exists && inCache(tp.inst)
+ case tp: TypeProxy => inCache(tp.underlying)
+ case tp: AndOrType => inCache(tp.tp1) && inCache(tp.tp2)
+ case _ => true
+ }
+
+ def computeBaseTypeRefOf(tp: Type): Type = {
+ Stats.record("computeBaseTypeOf")
+ if (symbol.isStatic && tp.derivesFrom(symbol))
+ symbol.typeRef
+ else tp match {
+ case tp: TypeRef =>
+ val subcls = tp.symbol
+ if (subcls eq symbol)
+ tp
+ else subcls.denot match {
+ case cdenot: ClassDenotation =>
+ if (cdenot.superClassBits contains symbol.superId) foldGlb(NoType, tp.parents)
+ else NoType
+ case _ =>
+ baseTypeRefOf(tp.superType)
+ }
+ case tp: TypeProxy =>
+ baseTypeRefOf(tp.superType)
+ case AndType(tp1, tp2) =>
+ baseTypeRefOf(tp1) & baseTypeRefOf(tp2)
+ case OrType(tp1, tp2) =>
+ baseTypeRefOf(tp1) | baseTypeRefOf(tp2)
+ case JavaArrayType(_) if symbol == defn.ObjectClass =>
+ this.typeRef
+ case _ =>
+ NoType
+ }
+ }
+
+ /*>|>*/ ctx.debugTraceIndented(s"$tp.baseTypeRef($this)") /*<|<*/ {
+ tp match {
+ case tp: CachedType =>
+ checkBasesUpToDate()
+ var basetp = baseTypeRefCache get tp
+ if (basetp == null) {
+ baseTypeRefCache.put(tp, NoPrefix)
+ basetp = computeBaseTypeRefOf(tp)
+ if (isCachable(tp)) baseTypeRefCache.put(tp, basetp)
+ else baseTypeRefCache.remove(tp)
+ } else if (basetp == NoPrefix) {
+ baseTypeRefCache.put(tp, null)
+ throw CyclicReference(this)
+ }
+ basetp
+ case _ =>
+ computeBaseTypeRefOf(tp)
+ }
+ }
+ }
+
+ private[this] var memberNamesCache: SimpleMap[NameFilter, Set[Name]] = SimpleMap.Empty
+
+ def memberNames(keepOnly: NameFilter)(implicit ctx: Context): Set[Name] = {
+ def computeMemberNames: Set[Name] = {
+ var names = Set[Name]()
+ def maybeAdd(name: Name) = if (keepOnly(thisType, name)) names += name
+ for (p <- classParents)
+ for (name <- p.memberNames(keepOnly, thisType)) maybeAdd(name)
+ val ownSyms =
+ if (keepOnly == implicitFilter)
+ if (this is Package) Iterator.empty
+ else info.decls.iterator filter (_ is Implicit)
+ else info.decls.iterator
+ for (sym <- ownSyms) maybeAdd(sym.name)
+ names
+ }
+ if ((this is PackageClass) || !Config.cacheMemberNames)
+ computeMemberNames // don't cache package member names; they might change
+ else {
+ val cached = memberNamesCache(keepOnly)
+ if (cached != null) cached
+ else {
+ val names = computeMemberNames
+ if (isFullyCompleted) {
+ setFlag(Frozen)
+ memberNamesCache = memberNamesCache.updated(keepOnly, names)
+ }
+ names
+ }
+ }
+ }
+
+ private[this] var fullNameCache: SimpleMap[String, Name] = SimpleMap.Empty
+ override final def fullNameSeparated(separator: String)(implicit ctx: Context): Name = {
+ val cached = fullNameCache(separator)
+ if (cached != null) cached
+ else {
+ val fn = super.fullNameSeparated(separator)
+ fullNameCache = fullNameCache.updated(separator, fn)
+ fn
+ }
+ }
+
+ // to avoid overloading ambiguities
+ override def fullName(implicit ctx: Context): Name = super.fullName
+
+ override def primaryConstructor(implicit ctx: Context): Symbol = {
+ def constrNamed(cname: TermName) = info.decls.denotsNamed(cname).last.symbol
+ // denotsNamed returns Symbols in reverse order of occurrence
+ if (this.is(ImplClass)) constrNamed(nme.TRAIT_CONSTRUCTOR) // ignore normal constructor
+ else
+ constrNamed(nme.CONSTRUCTOR).orElse(constrNamed(nme.TRAIT_CONSTRUCTOR))
+ }
+
+ /** The parameter accessors of this class. Term and type accessors,
+ * getters and setters are all returned int his list
+ */
+ def paramAccessors(implicit ctx: Context): List[Symbol] =
+ unforcedDecls.filter(_ is ParamAccessor).toList
+
+ /** If this class has the same `decls` scope reference in `phase` and
+ * `phase.next`, install a new denotation with a cloned scope in `phase.next`.
+ */
+ def ensureFreshScopeAfter(phase: DenotTransformer)(implicit ctx: Context): Unit =
+ if (ctx.phaseId != phase.next.id) ensureFreshScopeAfter(phase)(ctx.withPhase(phase.next))
+ else {
+ val prevCtx = ctx.withPhase(phase)
+ val ClassInfo(pre, _, ps, decls, selfInfo) = classInfo
+ if (classInfo(prevCtx).decls eq decls)
+ copySymDenotation(info = ClassInfo(pre, classSymbol, ps, decls.cloneScope, selfInfo))
+ .installAfter(phase)
+ }
+ }
+
+ /** The denotation of a package class.
+ * It overrides ClassDenotation to take account of package objects when looking for members
+ */
+ class PackageClassDenotation private[SymDenotations] (
+ symbol: Symbol,
+ ownerIfExists: Symbol,
+ name: Name,
+ initFlags: FlagSet,
+ initInfo: Type,
+ initPrivateWithin: Symbol,
+ initRunId: RunId)
+ extends ClassDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin, initRunId) {
+
+ private[this] var packageObjCache: SymDenotation = _
+ private[this] var packageObjRunId: RunId = NoRunId
+
+ /** The package object in this class, of one exists */
+ def packageObj(implicit ctx: Context): SymDenotation = {
+ if (packageObjRunId != ctx.runId) {
+ packageObjRunId = ctx.runId
+ packageObjCache = NoDenotation // break cycle in case we are looking for package object itself
+ packageObjCache = findMember(nme.PACKAGE, thisType, EmptyFlags).asSymDenotation
+ }
+ packageObjCache
+ }
+
+ /** Look first for members in package; if none are found look in package object */
+ override def computeNPMembersNamed(name: Name, inherited: Boolean)(implicit ctx: Context): PreDenotation = {
+ val denots = super.computeNPMembersNamed(name, inherited)
+ if (denots.exists) denots
+ else packageObj.moduleClass.denot match {
+ case pcls: ClassDenotation => pcls.computeNPMembersNamed(name, inherited)
+ case _ => denots
+ }
+ }
+
+ /** The union of the member names of the package and the package object */
+ override def memberNames(keepOnly: NameFilter)(implicit ctx: Context): Set[Name] = {
+ val ownNames = super.memberNames(keepOnly)
+ packageObj.moduleClass.denot match {
+ case pcls: ClassDenotation => ownNames union pcls.memberNames(keepOnly)
+ case _ => ownNames
+ }
+ }
+ }
+
+ class NoDenotation extends SymDenotation(
+ NoSymbol, NoSymbol, "<none>".toTermName, Permanent, NoType) {
+ override def exists = false
+ override def isTerm = false
+ override def isType = false
+ override def owner: Symbol = throw new AssertionError("NoDenotation.owner")
+ override def computeAsSeenFrom(pre: Type)(implicit ctx: Context): SingleDenotation = this
+ override def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation = this
+ validFor = Period.allInRun(NoRunId) // will be brought forward automatically
+ }
+
+ @sharable val NoDenotation = new NoDenotation
+
+ // ---- Completion --------------------------------------------------------
+
+ /** Instances of LazyType are carried by uncompleted symbols.
+ * Note: LazyTypes double up as (constant) functions from Symbol and
+ * from (TermSymbol, ClassSymbol) to LazyType. That way lazy types can be
+ * directly passed to symbol creation methods in Symbols that demand instances
+ * of these function types.
+ */
+ abstract class LazyType extends UncachedGroundType
+ with (Symbol => LazyType)
+ with ((TermSymbol, ClassSymbol) => LazyType) { self =>
+
+ /** Sets all missing fields of given denotation */
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit
+
+ def apply(sym: Symbol) = this
+ def apply(module: TermSymbol, modcls: ClassSymbol) = this
+
+ private var myDecls: Scope = EmptyScope
+ private var mySourceModuleFn: Context => Symbol = NoSymbolFn
+ private var myModuleClassFn: Context => Symbol = NoSymbolFn
+
+ /** A proxy to this lazy type that keeps the complete operation
+ * but provides fresh slots for scope/sourceModule/moduleClass
+ */
+ def proxy: LazyType = new LazyType {
+ override def complete(denot: SymDenotation)(implicit ctx: Context) = self.complete(denot)
+ }
+
+ def decls: Scope = myDecls
+ def sourceModule(implicit ctx: Context): Symbol = mySourceModuleFn(ctx)
+ def moduleClass(implicit ctx: Context): Symbol = myModuleClassFn(ctx)
+
+ def withDecls(decls: Scope): this.type = { myDecls = decls; this }
+ def withSourceModule(sourceModuleFn: Context => Symbol): this.type = { mySourceModuleFn = sourceModuleFn; this }
+ def withModuleClass(moduleClassFn: Context => Symbol): this.type = { myModuleClassFn = moduleClassFn; this }
+ }
+
+ /** A subclass of LazyTypes where type parameters can be completed independently of
+ * the info.
+ */
+ trait TypeParamsCompleter extends LazyType {
+ /** The type parameters computed by the completer before completion has finished */
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol]
+ }
+
+ val NoSymbolFn = (ctx: Context) => NoSymbol
+
+ /** A missing completer */
+ @sharable class NoCompleter extends LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = unsupported("complete")
+ }
+
+ object NoCompleter extends NoCompleter
+
+ /** A lazy type for modules that points to the module class.
+ * Needed so that `moduleClass` works before completion.
+ * Completion of modules is always completion of the underlying
+ * module class, followed by copying the relevant fields to the module.
+ */
+ class ModuleCompleter(_moduleClass: ClassSymbol) extends LazyType {
+ override def moduleClass(implicit ctx: Context) = _moduleClass
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val from = moduleClass.denot.asClass
+ denot.setFlag(from.flags.toTermFlags & RetainedModuleValFlags)
+ denot.annotations = from.annotations filter (_.appliesToModule)
+ // !!! ^^^ needs to be revised later. The problem is that annotations might
+ // only apply to the module but not to the module class. The right solution
+ // is to have the module class completer set the annotations of both the
+ // class and the module.
+ denot.info = moduleClass.typeRef
+ denot.privateWithin = from.privateWithin
+ }
+ }
+
+ /** A completer for missing references */
+ class StubInfo() extends LazyType {
+
+ def initializeToDefaults(denot: SymDenotation)(implicit ctx: Context) = {
+ denot.info = denot match {
+ case denot: ClassDenotation =>
+ ClassInfo(denot.owner.thisType, denot.classSymbol, Nil, EmptyScope)
+ case _ =>
+ ErrorType
+ }
+ denot.privateWithin = NoSymbol
+ }
+
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val sym = denot.symbol
+ val file = sym.associatedFile
+ val (location, src) =
+ if (file != null) (s" in $file", file.toString)
+ else ("", "the signature")
+ val name = ctx.fresh.setSetting(ctx.settings.debugNames, true).nameString(denot.name)
+ ctx.error(
+ i"""bad symbolic reference. A signature$location
+ |refers to $name in ${denot.owner.showKind} ${denot.owner.showFullName} which is not available.
+ |It may be completely missing from the current classpath, or the version on
+ |the classpath might be incompatible with the version used when compiling $src.""")
+ if (ctx.debug) throw new Error()
+ initializeToDefaults(denot)
+ }
+ }
+
+ // ---- Fingerprints -----------------------------------------------------
+
+ /** A fingerprint is a bitset that acts as a bloom filter for sets
+ * of names.
+ */
+ class FingerPrint(val bits: Array[Long]) extends AnyVal {
+ import FingerPrint._
+
+ /** Include some bits of name's hashcode in set */
+ def include(name: Name): Unit = {
+ val hash = name.hashCode & Mask
+ bits(hash >> WordSizeLog) |= (1L << hash)
+ }
+
+ /** Include all bits of `that` fingerprint in set */
+ def include(that: FingerPrint): Unit =
+ for (i <- 0 until NumWords) bits(i) |= that.bits(i)
+
+ /** Does set contain hash bits of given name? */
+ def contains(name: Name): Boolean = {
+ val hash = name.hashCode & Mask
+ (bits(hash >> WordSizeLog) & (1L << hash)) != 0
+ }
+ }
+
+ object FingerPrint {
+ def apply() = new FingerPrint(new Array[Long](NumWords))
+ val unknown = new FingerPrint(null)
+ private final val WordSizeLog = 6
+ private final val NumWords = 32
+ private final val NumBits = NumWords << WordSizeLog
+ private final val Mask = NumBits - 1
+ }
+
+ private val AccessorOrLabel = Accessor | Label
+
+ @sharable private var indent = 0 // for completions printing
+}
diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
new file mode 100644
index 000000000..4ae28c10b
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala
@@ -0,0 +1,267 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package dotty.tools
+package dotc
+package core
+
+import java.io.IOException
+import scala.compat.Platform.currentTime
+import dotty.tools.io.{ ClassPath, AbstractFile }
+import Contexts._, Symbols._, Flags._, SymDenotations._, Types._, Scopes._, util.Positions._, Names._
+import StdNames._, NameOps._
+import Decorators.{StringDecorator, StringInterpolators}
+import classfile.ClassfileParser
+import scala.util.control.NonFatal
+
+object SymbolLoaders {
+ /** A marker trait for a completer that replaces the original
+ * Symbol loader for an unpickled root.
+ */
+ trait SecondCompleter
+}
+
+/** A base class for Symbol loaders with some overridable behavior */
+class SymbolLoaders {
+
+ protected def enterNew(
+ owner: Symbol, member: Symbol,
+ completer: SymbolLoader, scope: Scope = EmptyScope)(implicit ctx: Context): Symbol = {
+ assert(scope.lookup(member.name) == NoSymbol, s"${owner.fullName}.${member.name} already has a symbol")
+ owner.asClass.enter(member, scope)
+ member
+ }
+
+ /** Enter class with given `name` into scope of `owner`.
+ */
+ def enterClass(
+ owner: Symbol, name: PreName, completer: SymbolLoader,
+ flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(implicit ctx: Context): Symbol = {
+ val cls = ctx.newClassSymbol(owner, name.toTypeName, flags, completer, assocFile = completer.sourceFileOrNull)
+ enterNew(owner, cls, completer, scope)
+ }
+
+ /** Enter module with given `name` into scope of `owner`.
+ */
+ def enterModule(
+ owner: Symbol, name: PreName, completer: SymbolLoader,
+ modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(implicit ctx: Context): Symbol = {
+ val module = ctx.newModuleSymbol(
+ owner, name.toTermName, modFlags, clsFlags,
+ (module, _) => completer.proxy withDecls newScope withSourceModule (_ => module),
+ assocFile = completer.sourceFileOrNull)
+ enterNew(owner, module, completer, scope)
+ enterNew(owner, module.moduleClass, completer, scope)
+ }
+
+ /** Enter package with given `name` into scope of `owner`
+ * and give them `completer` as type.
+ */
+ def enterPackage(owner: Symbol, pkg: ClassPath)(implicit ctx: Context): Symbol = {
+ val pname = pkg.name.toTermName
+ val preExisting = owner.info.decls lookup pname
+ if (preExisting != NoSymbol) {
+ // Some jars (often, obfuscated ones) include a package and
+ // object with the same name. Rather than render them unusable,
+ // offer a setting to resolve the conflict one way or the other.
+ // This was motivated by the desire to use YourKit probes, which
+ // require yjp.jar at runtime. See SI-2089.
+ if (ctx.settings.termConflict.isDefault)
+ throw new TypeError(
+ i"""$owner contains object and package with same name: $pname
+ |one of them needs to be removed from classpath""")
+ else if (ctx.settings.termConflict.value == "package") {
+ ctx.warning(
+ s"Resolving package/object name conflict in favor of package ${preExisting.fullName}. The object will be inaccessible.")
+ owner.asClass.delete(preExisting)
+ } else {
+ ctx.warning(
+ s"Resolving package/object name conflict in favor of object ${preExisting.fullName}. The package will be inaccessible.")
+ return NoSymbol
+ }
+ }
+ ctx.newModuleSymbol(owner, pname, PackageCreationFlags, PackageCreationFlags,
+ (module, modcls) => new PackageLoader(module, pkg)).entered
+ }
+
+ /** Enter class and module with given `name` into scope of `owner`
+ * and give them `completer` as type.
+ */
+ def enterClassAndModule(
+ owner: Symbol, name: PreName, completer: SymbolLoader,
+ flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(implicit ctx: Context): Unit = {
+ val clazz = enterClass(owner, name, completer, flags, scope)
+ val module = enterModule(
+ owner, name, completer,
+ modFlags = flags.toTermFlags & RetainedModuleValFlags,
+ clsFlags = flags.toTypeFlags & RetainedModuleClassFlags,
+ scope = scope)
+ }
+
+ /** In batch mode: Enter class and module with given `name` into scope of `owner`
+ * and give them a source completer for given `src` as type.
+ * In IDE mode: Find all toplevel definitions in `src` and enter then into scope of `owner`
+ * with source completer for given `src` as type.
+ * (overridden in interactive.Global).
+ */
+ def enterToplevelsFromSource(
+ owner: Symbol, name: PreName, src: AbstractFile,
+ scope: Scope = EmptyScope)(implicit ctx: Context): Unit = {
+ enterClassAndModule(owner, name, new SourcefileLoader(src), scope = scope)
+ }
+
+ /** The package objects of scala and scala.reflect should always
+ * be loaded in binary if classfiles are available, even if sourcefiles
+ * are newer. Late-compiling these objects from source leads to compilation
+ * order issues.
+ * Note: We do a name-base comparison here because the method is called before we even
+ * have ReflectPackage defined.
+ */
+ def binaryOnly(owner: Symbol, name: String)(implicit ctx: Context): Boolean =
+ name == "package" &&
+ (owner.fullName.toString == "scala" || owner.fullName.toString == "scala.reflect")
+
+ /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep`
+ */
+ def initializeFromClassPath(owner: Symbol, classRep: ClassPath#ClassRep)(implicit ctx: Context): Unit = {
+ ((classRep.binary, classRep.source): @unchecked) match {
+ case (Some(bin), Some(src)) if needCompile(bin, src) && !binaryOnly(owner, classRep.name) =>
+ if (ctx.settings.verbose.value) ctx.inform("[symloader] picked up newer source file for " + src.path)
+ enterToplevelsFromSource(owner, classRep.name, src)
+ case (None, Some(src)) =>
+ if (ctx.settings.verbose.value) ctx.inform("[symloader] no class, picked up source file for " + src.path)
+ enterToplevelsFromSource(owner, classRep.name, src)
+ case (Some(bin), _) =>
+ enterClassAndModule(owner, classRep.name, ctx.platform.newClassLoader(bin))
+ }
+ }
+
+ def needCompile(bin: AbstractFile, src: AbstractFile) =
+ src.lastModified >= bin.lastModified
+
+ /** Load contents of a package
+ */
+ class PackageLoader(_sourceModule: TermSymbol, classpath: ClassPath)
+ extends SymbolLoader {
+ override def sourceModule(implicit ctx: Context) = _sourceModule
+ def description = "package loader " + classpath.name
+
+ private[core] val currentDecls: MutableScope = newScope
+
+ def doComplete(root: SymDenotation)(implicit ctx: Context): Unit = {
+ assert(root is PackageClass, root)
+ def maybeModuleClass(classRep: ClassPath#ClassRep) = classRep.name.last == '$'
+ val pre = root.owner.thisType
+ root.info = ClassInfo(pre, root.symbol.asClass, Nil, currentDecls, pre select sourceModule)
+ if (!sourceModule.isCompleted)
+ sourceModule.completer.complete(sourceModule)
+ if (!root.isRoot) {
+ for (classRep <- classpath.classes)
+ if (!maybeModuleClass(classRep))
+ initializeFromClassPath(root.symbol, classRep)
+ for (classRep <- classpath.classes)
+ if (maybeModuleClass(classRep) && !root.unforcedDecls.lookup(classRep.name.toTypeName).exists)
+ initializeFromClassPath(root.symbol, classRep)
+ }
+ if (!root.isEmptyPackage)
+ for (pkg <- classpath.packages)
+ enterPackage(root.symbol, pkg)
+ }
+ }
+}
+
+/** A lazy type that completes itself by calling parameter doComplete.
+ * Any linked modules/classes or module classes are also initialized.
+ */
+abstract class SymbolLoader extends LazyType {
+
+ /** Load source or class file for `root`, return */
+ def doComplete(root: SymDenotation)(implicit ctx: Context): Unit
+
+ def sourceFileOrNull: AbstractFile = null
+
+ /** Description of the resource (ClassPath, AbstractFile)
+ * being processed by this loader
+ */
+ def description: String
+
+ override def complete(root: SymDenotation)(implicit ctx: Context): Unit = {
+ def signalError(ex: Exception): Unit = {
+ if (ctx.debug) ex.printStackTrace()
+ val msg = ex.getMessage()
+ ctx.error(
+ if (msg eq null) "i/o error while loading " + root.name
+ else "error while loading " + root.name + ",\n " + msg)
+ }
+ try {
+ val start = currentTime
+ if (ctx.settings.debugTrace.value)
+ ctx.doTraceIndented(s">>>> loading ${root.debugString}", _ => s"<<<< loaded ${root.debugString}") {
+ doComplete(root)
+ }
+ else
+ doComplete(root)
+ ctx.informTime("loaded " + description, start)
+ } catch {
+ case ex: IOException =>
+ signalError(ex)
+ case NonFatal(ex) =>
+ println(s"exception caught when loading $root: $ex")
+ throw ex
+ } finally {
+ def postProcess(denot: SymDenotation) =
+ if (!denot.isCompleted &&
+ !denot.completer.isInstanceOf[SymbolLoaders.SecondCompleter])
+ denot.markAbsent()
+ postProcess(root)
+ if (!root.isRoot)
+ postProcess(root.scalacLinkedClass.denot)
+ }
+ }
+}
+
+class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader {
+
+ override def sourceFileOrNull: AbstractFile = classfile
+
+ def description = "class file " + classfile.toString
+
+ def rootDenots(rootDenot: ClassDenotation)(implicit ctx: Context): (ClassDenotation, ClassDenotation) = {
+ val linkedDenot = rootDenot.scalacLinkedClass.denot match {
+ case d: ClassDenotation => d
+ case d =>
+ // this can happen if the companion if shadowed by a val or type
+ // in a package object; in this case, we make up some dummy denotation
+ // as a stand in for loading.
+ // An example for this situation is scala.reflect.Manifest, which exists
+ // as a class in scala.reflect and as a val in scala.reflect.package.
+ if (rootDenot is ModuleClass)
+ ctx.newClassSymbol(
+ rootDenot.owner, rootDenot.name.stripModuleClassSuffix.asTypeName, Synthetic,
+ _ => NoType).classDenot
+ else
+ ctx.newModuleSymbol(
+ rootDenot.owner, rootDenot.name.toTermName, Synthetic, Synthetic,
+ (module, _) => new NoCompleter() withDecls newScope withSourceModule (_ => module))
+ .moduleClass.denot.asClass
+ }
+ if (rootDenot is ModuleClass) (linkedDenot, rootDenot)
+ else (rootDenot, linkedDenot)
+ }
+
+ override def doComplete(root: SymDenotation)(implicit ctx: Context): Unit =
+ load(root)
+
+ def load(root: SymDenotation)(implicit ctx: Context): Option[ClassfileParser.Embedded] = {
+ val (classRoot, moduleRoot) = rootDenots(root.asClass)
+ new ClassfileParser(classfile, classRoot, moduleRoot)(ctx).run()
+ }
+}
+
+class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader {
+ def description = "source file " + srcfile.toString
+ override def sourceFileOrNull = srcfile
+ def doComplete(root: SymDenotation)(implicit ctx: Context): Unit = unsupported("doComplete")
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala
new file mode 100644
index 000000000..b5bd196d2
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala
@@ -0,0 +1,602 @@
+package dotty.tools
+package dotc
+package core
+
+import Periods._
+import Names._
+import Scopes._
+import Flags._
+import java.lang.AssertionError
+import Decorators._
+import Symbols._
+import Contexts._
+import SymDenotations._
+import printing.Texts._
+import printing.Printer
+import Types._
+import Annotations._
+import util.Positions._
+import DenotTransformers._
+import StdNames._
+import NameOps._
+import ast.tpd.Tree
+import ast.TreeTypeMap
+import Constants.Constant
+import Denotations.{ Denotation, SingleDenotation, MultiDenotation }
+import collection.mutable
+import io.AbstractFile
+import language.implicitConversions
+import util.{NoSource, DotClass}
+
+/** Creation methods for symbols */
+trait Symbols { this: Context =>
+
+// ---- Factory methods for symbol creation ----------------------
+//
+// All symbol creations should be done via the next two methods.
+
+ /** Create a symbol without a denotation.
+ * Note this uses a cast instead of a direct type refinement because
+ * it's debug-friendlier not to create an anonymous class here.
+ */
+ def newNakedSymbol[N <: Name](coord: Coord = NoCoord)(implicit ctx: Context): Symbol { type ThisName = N } =
+ new Symbol(coord, ctx.nextId).asInstanceOf[Symbol { type ThisName = N }]
+
+ /** Create a class symbol without a denotation. */
+ def newNakedClassSymbol(coord: Coord = NoCoord, assocFile: AbstractFile = null)(implicit ctx: Context) =
+ new ClassSymbol(coord, assocFile, ctx.nextId)
+
+// ---- Symbol creation methods ----------------------------------
+
+ /** Create a symbol from its fields (info may be lazy) */
+ def newSymbol[N <: Name](
+ owner: Symbol,
+ name: N,
+ flags: FlagSet,
+ info: Type,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord): Symbol { type ThisName = N } = {
+ val sym = newNakedSymbol[N](coord)
+ val denot = SymDenotation(sym, owner, name, flags, info, privateWithin)
+ sym.denot = denot
+ sym
+ }
+
+ /** Create a class symbol from a function producing its denotation */
+ def newClassSymbolDenoting(denotFn: ClassSymbol => SymDenotation, coord: Coord = NoCoord, assocFile: AbstractFile = null): ClassSymbol = {
+ val cls = newNakedClassSymbol(coord, assocFile)
+ cls.denot = denotFn(cls)
+ cls
+ }
+
+ /** Create a class symbol from its non-info fields and a function
+ * producing its info (the produced info may be lazy).
+ */
+ def newClassSymbol(
+ owner: Symbol,
+ name: TypeName,
+ flags: FlagSet,
+ infoFn: ClassSymbol => Type,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): ClassSymbol
+ = {
+ val cls = newNakedClassSymbol(coord, assocFile)
+ val denot = SymDenotation(cls, owner, name, flags, infoFn(cls), privateWithin)
+ cls.denot = denot
+ cls
+ }
+
+ /** Create a class symbol from its non-info fields and the fields of its info. */
+ def newCompleteClassSymbol(
+ owner: Symbol,
+ name: TypeName,
+ flags: FlagSet,
+ parents: List[TypeRef],
+ decls: Scope = newScope,
+ selfInfo: Type = NoType,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): ClassSymbol =
+ newClassSymbol(
+ owner, name, flags,
+ ClassInfo(owner.thisType, _, parents, decls, selfInfo),
+ privateWithin, coord, assocFile)
+
+ /** Same as `newCompleteClassSymbol` except that `parents` can be a list of arbitrary
+ * types which get normalized into type refs and parameter bindings.
+ */
+ def newNormalizedClassSymbol(
+ owner: Symbol,
+ name: TypeName,
+ flags: FlagSet,
+ parentTypes: List[Type],
+ decls: Scope = newScope,
+ selfInfo: Type = NoType,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): ClassSymbol = {
+ def completer = new LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val cls = denot.asClass.classSymbol
+ val decls = newScope
+ val parentRefs: List[TypeRef] = normalizeToClassRefs(parentTypes, cls, decls)
+ denot.info = ClassInfo(owner.thisType, cls, parentRefs, decls)
+ }
+ }
+ newClassSymbol(owner, name, flags, completer, privateWithin, coord, assocFile)
+ }
+
+ /** Create a module symbol with associated module class
+ * from its non-info fields and a function producing the info
+ * of the module class (this info may be lazy).
+ */
+ def newModuleSymbol(
+ owner: Symbol,
+ name: TermName,
+ modFlags: FlagSet,
+ clsFlags: FlagSet,
+ infoFn: (TermSymbol, ClassSymbol) => Type, // typically a ModuleClassCompleterWithDecls
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): TermSymbol
+ = {
+ val base = owner.thisType
+ val module = newNakedSymbol[TermName](coord)
+ val modcls = newNakedClassSymbol(coord, assocFile)
+ val modclsFlags = clsFlags | ModuleClassCreationFlags
+ val modclsName = name.toTypeName.adjustIfModuleClass(modclsFlags)
+ val cdenot = SymDenotation(
+ modcls, owner, modclsName, modclsFlags,
+ infoFn(module, modcls), privateWithin)
+ val mdenot = SymDenotation(
+ module, owner, name, modFlags | ModuleCreationFlags,
+ if (cdenot.isCompleted) TypeRef.withSymAndName(owner.thisType, modcls, modclsName)
+ else new ModuleCompleter(modcls))
+ module.denot = mdenot
+ modcls.denot = cdenot
+ module
+ }
+
+ /** Create a module symbol with associated module class
+ * from its non-info fields and the fields of the module class info.
+ * @param flags The combined flags of the module and the module class
+ * These are masked with RetainedModuleValFlags/RetainedModuleClassFlags.
+ */
+ def newCompleteModuleSymbol(
+ owner: Symbol,
+ name: TermName,
+ modFlags: FlagSet,
+ clsFlags: FlagSet,
+ parents: List[TypeRef],
+ decls: Scope,
+ privateWithin: Symbol = NoSymbol,
+ coord: Coord = NoCoord,
+ assocFile: AbstractFile = null): TermSymbol =
+ newModuleSymbol(
+ owner, name, modFlags, clsFlags,
+ (module, modcls) => ClassInfo(
+ owner.thisType, modcls, parents, decls, TermRef.withSymAndName(owner.thisType, module, name)),
+ privateWithin, coord, assocFile)
+
+ val companionMethodFlags = Flags.Synthetic | Flags.Private | Flags.Method
+
+ def synthesizeCompanionMethod(name: Name, target: SymDenotation, owner: SymDenotation)(implicit ctx: Context) =
+ if (owner.exists && target.exists && !owner.isAbsent && !target.isAbsent) {
+ val existing = owner.unforcedDecls.lookup(name)
+
+ existing.orElse{
+ ctx.newSymbol(owner.symbol, name, companionMethodFlags , ExprType(target.typeRef))
+ }
+ } else NoSymbol
+
+ /** Create a package symbol with associated package class
+ * from its non-info fields and a lazy type for loading the package's members.
+ */
+ def newPackageSymbol(
+ owner: Symbol,
+ name: TermName,
+ infoFn: (TermSymbol, ClassSymbol) => LazyType): TermSymbol =
+ newModuleSymbol(owner, name, PackageCreationFlags, PackageCreationFlags, infoFn)
+
+ /** Create a package symbol with associated package class
+ * from its non-info fields its member scope.
+ */
+ def newCompletePackageSymbol(
+ owner: Symbol,
+ name: TermName,
+ modFlags: FlagSet = EmptyFlags,
+ clsFlags: FlagSet = EmptyFlags,
+ decls: Scope = newScope): TermSymbol =
+ newCompleteModuleSymbol(
+ owner, name,
+ modFlags | PackageCreationFlags, clsFlags | PackageCreationFlags,
+ Nil, decls)
+
+
+ /** Create a stub symbol that will issue a missing reference error
+ * when attempted to be completed.
+ */
+ def newStubSymbol(owner: Symbol, name: Name, file: AbstractFile = null): Symbol = {
+ def stubCompleter = new StubInfo()
+ val normalizedOwner = if (owner is ModuleVal) owner.moduleClass else owner
+ println(s"creating stub for ${name.show}, owner = ${normalizedOwner.denot.debugString}, file = $file")
+ println(s"decls = ${normalizedOwner.unforcedDecls.toList.map(_.debugString).mkString("\n ")}") // !!! DEBUG
+ //if (base.settings.debug.value) throw new Error()
+ val stub = name match {
+ case name: TermName =>
+ newModuleSymbol(normalizedOwner, name, EmptyFlags, EmptyFlags, stubCompleter, assocFile = file)
+ case name: TypeName =>
+ newClassSymbol(normalizedOwner, name, EmptyFlags, stubCompleter, assocFile = file)
+ }
+ stubs = stub :: stubs
+ stub
+ }
+
+ /** Create the local template dummy of given class `cls`.
+ * In a template
+ *
+ * trait T { val fld: Int; { val x: int = 2 }; val fld2 = { val y = 2; y }}
+ *
+ * the owner of `x` is the local dummy of the template. The owner of the local
+ * dummy is then the class of the template itself. By contrast, the owner of `y`
+ * would be `fld2`. There is a single local dummy per template.
+ */
+ def newLocalDummy(cls: Symbol, coord: Coord = NoCoord) =
+ newSymbol(cls, nme.localDummyName(cls), EmptyFlags, NoType)
+
+ /** Create an import symbol pointing back to given qualifier `expr`. */
+ def newImportSymbol(owner: Symbol, expr: Tree, coord: Coord = NoCoord) =
+ newSymbol(owner, nme.IMPORT, EmptyFlags, ImportType(expr), coord = coord)
+
+ /** Create a class constructor symbol for given class `cls`. */
+ def newConstructor(cls: ClassSymbol, flags: FlagSet, paramNames: List[TermName], paramTypes: List[Type], privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord) =
+ newSymbol(cls, nme.CONSTRUCTOR, flags | Method, MethodType(paramNames, paramTypes)(_ => cls.typeRef), privateWithin, coord)
+
+ /** Create an empty default constructor symbol for given class `cls`. */
+ def newDefaultConstructor(cls: ClassSymbol) =
+ newConstructor(cls, EmptyFlags, Nil, Nil)
+
+ /** Create a symbol representing a selftype declaration for class `cls`. */
+ def newSelfSym(cls: ClassSymbol, name: TermName = nme.WILDCARD, selfInfo: Type = NoType): TermSymbol =
+ ctx.newSymbol(cls, name, SelfSymFlags, selfInfo orElse cls.classInfo.selfType, coord = cls.coord)
+
+ /** Create new type parameters with given owner, names, and flags.
+ * @param boundsFn A function that, given type refs to the newly created
+ * parameters returns a list of their bounds.
+ */
+ def newTypeParams(
+ owner: Symbol,
+ names: List[TypeName],
+ flags: FlagSet,
+ boundsFn: List[TypeRef] => List[Type]): List[TypeSymbol] = {
+
+ val tparamBuf = new mutable.ListBuffer[TypeSymbol]
+ val trefBuf = new mutable.ListBuffer[TypeRef]
+ for (name <- names) {
+ val tparam = newNakedSymbol[TypeName](NoCoord)
+ tparamBuf += tparam
+ trefBuf += TypeRef.withSymAndName(owner.thisType, tparam, name)
+ }
+ val tparams = tparamBuf.toList
+ val bounds = boundsFn(trefBuf.toList)
+ for ((name, tparam, bound) <- (names, tparams, bounds).zipped)
+ tparam.denot = SymDenotation(tparam, owner, name, flags | owner.typeParamCreationFlags, bound)
+ tparams
+ }
+
+ /** Create a new skolem symbol. This is not the same as SkolemType, even though the
+ * motivation (create a singleton referencing to a type) is similar.
+ */
+ def newSkolem(tp: Type) = newSymbol(defn.RootClass, nme.SKOLEM, SyntheticArtifact | Permanent, tp)
+
+ def newErrorSymbol(owner: Symbol, name: Name) =
+ newSymbol(owner, name, SyntheticArtifact,
+ if (name.isTypeName) TypeAlias(ErrorType) else ErrorType)
+
+ /** Map given symbols, subjecting their attributes to the mappings
+ * defined in the given TreeTypeMap `ttmap`.
+ * Cross symbol references are brought over from originals to copies.
+ * Do not copy any symbols if all attributes of all symbols stay the same.
+ */
+ def mapSymbols(originals: List[Symbol], ttmap: TreeTypeMap, mapAlways: Boolean = false): List[Symbol] =
+ if (originals.forall(sym =>
+ (ttmap.mapType(sym.info) eq sym.info) &&
+ !(ttmap.oldOwners contains sym.owner)) && !mapAlways)
+ originals
+ else {
+ val copies: List[Symbol] = for (original <- originals) yield
+ original match {
+ case original: ClassSymbol =>
+ newNakedClassSymbol(original.coord, original.assocFile)
+ case _ =>
+ newNakedSymbol[original.ThisName](original.coord)
+ }
+ val ttmap1 = ttmap.withSubstitution(originals, copies)
+ (originals, copies).zipped foreach {(original, copy) =>
+ copy.denot = original.denot // preliminary denotation, so that we can access symbols in subsequent transform
+ }
+ (originals, copies).zipped foreach {(original, copy) =>
+ val odenot = original.denot
+ val oinfo = original.info match {
+ case ClassInfo(pre, _, parents, decls, selfInfo) =>
+ assert(original.isClass)
+ ClassInfo(pre, copy.asClass, parents, decls.cloneScope, selfInfo)
+ case oinfo => oinfo
+ }
+ copy.denot = odenot.copySymDenotation(
+ symbol = copy,
+ owner = ttmap1.mapOwner(odenot.owner),
+ initFlags = odenot.flags &~ Frozen | Fresh,
+ info = ttmap1.mapType(oinfo),
+ privateWithin = ttmap1.mapOwner(odenot.privateWithin), // since this refers to outer symbols, need not include copies (from->to) in ownermap here.
+ annotations = odenot.annotations.mapConserve(ttmap1.apply))
+ }
+ copies
+ }
+
+// ----- Locating predefined symbols ----------------------------------------
+
+ def requiredPackage(path: PreName): TermSymbol =
+ base.staticRef(path.toTermName).requiredSymbol(_ is Package).asTerm
+
+ def requiredPackageRef(path: PreName): TermRef = requiredPackage(path).termRef
+
+ def requiredClass(path: PreName): ClassSymbol =
+ base.staticRef(path.toTypeName).requiredSymbol(_.isClass).asClass
+
+ def requiredClassRef(path: PreName): TypeRef = requiredClass(path).typeRef
+
+ /** Get ClassSymbol if class is either defined in current compilation run
+ * or present on classpath.
+ * Returns NoSymbol otherwise. */
+ def getClassIfDefined(path: PreName): Symbol =
+ base.staticRef(path.toTypeName, generateStubs = false).requiredSymbol(_.isClass, generateStubs = false)
+
+ def requiredModule(path: PreName): TermSymbol =
+ base.staticRef(path.toTermName).requiredSymbol(_ is Module).asTerm
+
+ def requiredModuleRef(path: PreName): TermRef = requiredModule(path).termRef
+}
+
+object Symbols {
+
+ implicit def eqSymbol: Eq[Symbol, Symbol] = Eq
+
+ /** A Symbol represents a Scala definition/declaration or a package.
+ * @param coord The coordinates of the symbol (a position or an index)
+ * @param id A unique identifier of the symbol (unique per ContextBase)
+ */
+ class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with TypeParamInfo with printing.Showable {
+
+ type ThisName <: Name
+
+ //assert(id != 4285)
+
+ /** The last denotation of this symbol */
+ private[this] var lastDenot: SymDenotation = _
+
+ /** Set the denotation of this symbol */
+ private[core] def denot_=(d: SymDenotation) =
+ lastDenot = d
+
+ /** The current denotation of this symbol */
+ final def denot(implicit ctx: Context): SymDenotation = {
+ var denot = lastDenot
+ if (!(denot.validFor contains ctx.period)) {
+ denot = denot.current.asInstanceOf[SymDenotation]
+ lastDenot = denot
+ }
+ denot
+ }
+
+ private[core] def defRunId: RunId =
+ if (lastDenot == null) NoRunId else lastDenot.validFor.runId
+
+ /** Does this symbol come from a currently compiled source file? */
+ final def isDefinedInCurrentRun(implicit ctx: Context): Boolean = {
+ pos.exists && defRunId == ctx.runId
+ }
+
+ /** Subclass tests and casts */
+ final def isTerm(implicit ctx: Context): Boolean =
+ (if (defRunId == ctx.runId) lastDenot else denot).isTerm
+
+ final def isType(implicit ctx: Context): Boolean =
+ (if (defRunId == ctx.runId) lastDenot else denot).isType
+
+ final def isClass: Boolean = isInstanceOf[ClassSymbol]
+
+ final def asTerm(implicit ctx: Context): TermSymbol = { assert(isTerm, s"asTerm called on not-a-Term $this" ); asInstanceOf[TermSymbol] }
+ final def asType(implicit ctx: Context): TypeSymbol = { assert(isType, s"isType called on not-a-Type $this"); asInstanceOf[TypeSymbol] }
+ final def asClass: ClassSymbol = asInstanceOf[ClassSymbol]
+
+ final def isFresh(implicit ctx: Context) =
+ lastDenot != null && (lastDenot is Fresh)
+
+ /** Special cased here, because it may be used on naked symbols in substituters */
+ final def isStatic(implicit ctx: Context): Boolean =
+ lastDenot != null && denot.isStatic
+
+ /** A unique, densely packed integer tag for each class symbol, -1
+ * for all other symbols. To save memory, this method
+ * should be called only if class is a super class of some other class.
+ */
+ def superId(implicit ctx: Context): Int = -1
+
+ /** This symbol entered into owner's scope (owner must be a class). */
+ final def entered(implicit ctx: Context): this.type = {
+ assert(this.owner.isClass, s"symbol ($this) entered the scope of non-class owner ${this.owner}") // !!! DEBUG
+ this.owner.asClass.enter(this)
+ if (this.is(Module, butNot = Package)) this.owner.asClass.enter(this.moduleClass)
+ this
+ }
+
+ /** Enter this symbol in its class owner after given `phase`. Create a fresh
+ * denotation for its owner class if the class has not yet already one
+ * that starts being valid after `phase`.
+ * @pre Symbol is a class member
+ */
+ def enteredAfter(phase: DenotTransformer)(implicit ctx: Context): this.type =
+ if (ctx.phaseId != phase.next.id) enteredAfter(phase)(ctx.withPhase(phase.next))
+ else {
+ if (this.owner.is(Package)) {
+ denot.validFor |= InitialPeriod
+ if (this is Module) this.moduleClass.validFor |= InitialPeriod
+ }
+ else this.owner.asClass.ensureFreshScopeAfter(phase)
+ entered
+ }
+
+ /** This symbol, if it exists, otherwise the result of evaluating `that` */
+ def orElse(that: => Symbol)(implicit ctx: Context) =
+ if (this.exists) this else that
+
+ /** If this symbol satisfies predicate `p` this symbol, otherwise `NoSymbol` */
+ def filter(p: Symbol => Boolean): Symbol = if (p(this)) this else NoSymbol
+
+ /** The current name of this symbol */
+ final def name(implicit ctx: Context): ThisName = denot.name.asInstanceOf[ThisName]
+
+ /** The source or class file from which this class or
+ * the class containing this symbol was generated, null if not applicable.
+ * Overridden in ClassSymbol
+ */
+ def associatedFile(implicit ctx: Context): AbstractFile =
+ denot.topLevelClass.symbol.associatedFile
+
+ /** The class file from which this class was generated, null if not applicable. */
+ final def binaryFile(implicit ctx: Context): AbstractFile = {
+ val file = associatedFile
+ if (file != null && file.path.endsWith("class")) file else null
+ }
+
+ /** The source file from which this class was generated, null if not applicable. */
+ final def sourceFile(implicit ctx: Context): AbstractFile = {
+ val file = associatedFile
+ if (file != null && !file.path.endsWith("class")) file
+ else denot.topLevelClass.getAnnotation(defn.SourceFileAnnot) match {
+ case Some(sourceAnnot) => sourceAnnot.argumentConstant(0) match {
+ case Some(Constant(path: String)) => AbstractFile.getFile(path)
+ case none => null
+ }
+ case none => null
+ }
+ }
+
+ /** The position of this symbol, or NoPosition is symbol was not loaded
+ * from source.
+ */
+ def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition
+
+ // TypeParamInfo methods
+ def isTypeParam(implicit ctx: Context) = denot.is(TypeParam)
+ def paramName(implicit ctx: Context) = name.asTypeName
+ def paramBounds(implicit ctx: Context) = denot.info.bounds
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds
+ def paramBoundsOrCompleter(implicit ctx: Context): Type = denot.infoOrCompleter
+ def paramVariance(implicit ctx: Context) = denot.variance
+ def paramRef(implicit ctx: Context) = denot.typeRef
+
+// -------- Printing --------------------------------------------------------
+
+ /** The prefix string to be used when displaying this symbol without denotation */
+ protected def prefixString = "Symbol"
+
+ override def toString: String =
+ if (lastDenot == null) s"Naked$prefixString#$id"
+ else lastDenot.toString// + "#" + id // !!! DEBUG
+
+ def toText(printer: Printer): Text = printer.toText(this)
+
+ def showLocated(implicit ctx: Context): String = ctx.locatedText(this).show
+ def showExtendedLocation(implicit ctx: Context): String = ctx.extendedLocationText(this).show
+ def showDcl(implicit ctx: Context): String = ctx.dclText(this).show
+ def showKind(implicit ctx: Context): String = ctx.kindString(this)
+ def showName(implicit ctx: Context): String = ctx.nameString(this)
+ def showFullName(implicit ctx: Context): String = ctx.fullNameString(this)
+
+ override def hashCode() = id // for debugging.
+ }
+
+ type TermSymbol = Symbol { type ThisName = TermName }
+ type TypeSymbol = Symbol { type ThisName = TypeName }
+
+ class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile, id: Int)
+ extends Symbol(coord, id) {
+
+ type ThisName = TypeName
+
+ /** The source or class file from which this class was generated, null if not applicable. */
+ override def associatedFile(implicit ctx: Context): AbstractFile =
+ if (assocFile != null || (this.owner is PackageClass) || this.isEffectiveRoot) assocFile
+ else super.associatedFile
+
+ final def classDenot(implicit ctx: Context): ClassDenotation =
+ denot.asInstanceOf[ClassDenotation]
+
+ private var superIdHint: Int = -1
+
+ override def superId(implicit ctx: Context): Int = {
+ val hint = superIdHint
+ if (hint >= 0 && hint <= ctx.lastSuperId && (ctx.classOfId(hint) eq this))
+ hint
+ else {
+ val id = ctx.superIdOfClass get this match {
+ case Some(id) =>
+ id
+ case None =>
+ val id = ctx.nextSuperId
+ ctx.superIdOfClass(this) = id
+ ctx.classOfId(id) = this
+ id
+ }
+ superIdHint = id
+ id
+ }
+ }
+
+ override protected def prefixString = "ClassSymbol"
+ }
+
+ class ErrorSymbol(val underlying: Symbol, msg: => String)(implicit ctx: Context) extends Symbol(NoCoord, ctx.nextId) {
+ type ThisName = underlying.ThisName
+ denot = underlying.denot
+ }
+
+ @sharable object NoSymbol extends Symbol(NoCoord, 0) {
+ denot = NoDenotation
+
+ override def associatedFile(implicit ctx: Context): AbstractFile = NoSource.file
+ }
+
+ implicit class Copier[N <: Name](sym: Symbol { type ThisName = N })(implicit ctx: Context) {
+ /** Copy a symbol, overriding selective fields */
+ def copy(
+ owner: Symbol = sym.owner,
+ name: N = sym.name,
+ flags: FlagSet = sym.flags,
+ info: Type = sym.info,
+ privateWithin: Symbol = sym.privateWithin,
+ coord: Coord = sym.coord,
+ associatedFile: AbstractFile = sym.associatedFile): Symbol =
+ if (sym.isClass)
+ ctx.newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord, associatedFile)
+ else
+ ctx.newSymbol(owner, name, flags, info, privateWithin, coord)
+ }
+
+ /** Makes all denotation operations available on symbols */
+ implicit def toDenot(sym: Symbol)(implicit ctx: Context): SymDenotation = sym.denot
+
+ /** Makes all class denotations available on class symbols */
+ implicit def toClassDenot(cls: ClassSymbol)(implicit ctx: Context): ClassDenotation = cls.classDenot
+
+ /** The Definitions object */
+ def defn(implicit ctx: Context): Definitions = ctx.definitions
+
+ /** The current class */
+ def currentClass(implicit ctx: Context): ClassSymbol = ctx.owner.enclosingClass.asClass
+
+ @sharable var stubs: List[Symbol] = Nil // diagnostic only
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala
new file mode 100644
index 000000000..70819e590
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala
@@ -0,0 +1,688 @@
+package dotty.tools.dotc
+package core
+
+import Types._
+import Contexts._
+import Symbols._
+import SymDenotations.{LazyType, TypeParamsCompleter}
+import Decorators._
+import util.Stats._
+import util.common._
+import Names._
+import NameOps._
+import Flags._
+import StdNames.tpnme
+import util.Positions.Position
+import config.Printers.core
+import collection.mutable
+import dotty.tools.dotc.config.Config
+import java.util.NoSuchElementException
+
+object TypeApplications {
+
+ /** Assert type is not a TypeBounds instance and return it unchanged */
+ val noBounds = (tp: Type) => tp match {
+ case tp: TypeBounds => throw new AssertionError("no TypeBounds allowed")
+ case _ => tp
+ }
+
+ /** If `tp` is a TypeBounds instance return its lower bound else return `tp` */
+ val boundsToLo = (tp: Type) => tp match {
+ case tp: TypeBounds => tp.lo
+ case _ => tp
+ }
+
+ /** If `tp` is a TypeBounds instance return its upper bound else return `tp` */
+ val boundsToHi = (tp: Type) => tp match {
+ case tp: TypeBounds => tp.hi
+ case _ => tp
+ }
+
+ /** Does variance `v1` conform to variance `v2`?
+ * This is the case if the variances are the same or `sym` is nonvariant.
+ */
+ def varianceConforms(v1: Int, v2: Int): Boolean =
+ v1 == v2 || v2 == 0
+
+ /** Does the variance of type parameter `tparam1` conform to the variance of type parameter `tparam2`?
+ */
+ def varianceConforms(tparam1: TypeParamInfo, tparam2: TypeParamInfo)(implicit ctx: Context): Boolean =
+ varianceConforms(tparam1.paramVariance, tparam2.paramVariance)
+
+ /** Do the variances of type parameters `tparams1` conform to the variances
+ * of corresponding type parameters `tparams2`?
+ * This is only the case of `tparams1` and `tparams2` have the same length.
+ */
+ def variancesConform(tparams1: List[TypeParamInfo], tparams2: List[TypeParamInfo])(implicit ctx: Context): Boolean =
+ tparams1.corresponds(tparams2)(varianceConforms)
+
+ /** Extractor for
+ *
+ * [v1 X1: B1, ..., vn Xn: Bn] -> C[X1, ..., Xn]
+ *
+ * where v1, ..., vn and B1, ..., Bn are the variances and bounds of the type parameters
+ * of the class C.
+ *
+ * @param tycon C
+ */
+ object EtaExpansion {
+ def apply(tycon: Type)(implicit ctx: Context) = {
+ assert(tycon.typeParams.nonEmpty, tycon)
+ tycon.EtaExpand(tycon.typeParamSymbols)
+ }
+
+ def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = tp match {
+ case tp @ PolyType(tparams, AppliedType(fn: TypeRef, args)) if (args == tparams.map(_.toArg)) => Some(fn)
+ case _ => None
+ }
+ }
+
+ /** Extractor for type application T[U_1, ..., U_n]. This is the refined type
+ *
+ * T { type p_1 v_1= U_1; ...; type p_n v_n= U_n }
+ *
+ * where v_i, p_i are the variances and names of the type parameters of T.
+ */
+ object AppliedType {
+ def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = tp.appliedTo(args)
+
+ def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match {
+ case tp: RefinedType =>
+ var refinements: List[RefinedType] = Nil
+ var tycon = tp.stripTypeVar
+ while (tycon.isInstanceOf[RefinedType]) {
+ val rt = tycon.asInstanceOf[RefinedType]
+ refinements = rt :: refinements
+ tycon = rt.parent.stripTypeVar
+ }
+ def collectArgs(tparams: List[TypeParamInfo],
+ refinements: List[RefinedType],
+ argBuf: mutable.ListBuffer[Type]): Option[(Type, List[Type])] = refinements match {
+ case Nil if tparams.isEmpty && argBuf.nonEmpty =>
+ Some((tycon, argBuf.toList))
+ case RefinedType(_, rname, rinfo) :: refinements1
+ if tparams.nonEmpty && rname == tparams.head.paramName =>
+ collectArgs(tparams.tail, refinements1, argBuf += rinfo.argInfo)
+ case _ =>
+ None
+ }
+ collectArgs(tycon.typeParams, refinements, new mutable.ListBuffer[Type])
+ case HKApply(tycon, args) =>
+ Some((tycon, args))
+ case _ =>
+ None
+ }
+ }
+
+ /** Adapt all arguments to possible higher-kinded type parameters using etaExpandIfHK
+ */
+ def EtaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): List[Type] =
+ if (tparams.isEmpty) args
+ else args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.paramBoundsOrCompleter))
+
+ /** A type map that tries to reduce (part of) the result type of the type lambda `tycon`
+ * with the given `args`(some of which are wildcard arguments represented by type bounds).
+ * Non-wildcard arguments are substituted everywhere as usual. A wildcard argument
+ * `>: L <: H` is substituted for a type lambda parameter `X` only under certain conditions.
+ *
+ * 1. If Mode.AllowLambdaWildcardApply is set:
+ * The wildcard argument is substituted only if `X` appears in a toplevel refinement of the form
+ *
+ * { type A = X }
+ *
+ * and there are no other occurrences of `X` in the reduced type. In that case
+ * the refinement above is replaced by
+ *
+ * { type A >: L <: U }
+ *
+ * The `allReplaced` field indicates whether all occurrences of type lambda parameters
+ * in the reduced type have been replaced with arguments.
+ *
+ * 2. If Mode.AllowLambdaWildcardApply is not set:
+ * All refinements of the form
+ *
+ * { type A = X }
+ *
+ * are replaced by:
+ *
+ * { type A >: L <: U }
+ *
+ * Any other occurrence of `X` in `tycon` is replaced by `U`, if the
+ * occurrence of `X` in `tycon` is covariant, or nonvariant, or by `L`,
+ * if the occurrence is contravariant.
+ *
+ * The idea is that the `AllowLambdaWildcardApply` mode is used to check whether
+ * a type can be soundly reduced, and to give an error or warning if that
+ * is not the case. By contrast, the default mode, with `AllowLambdaWildcardApply`
+ * not set, reduces all applications even if this yields a different type, so
+ * its postcondition is that no type parameters of `tycon` appear in the
+ * result type. Using this mode, we can guarantee that `appliedTo` will never
+ * produce a higher-kinded application with a type lambda as type constructor.
+ */
+ class Reducer(tycon: PolyType, args: List[Type])(implicit ctx: Context) extends TypeMap {
+ private var available = (0 until args.length).toSet
+ var allReplaced = true
+ def hasWildcardArg(p: PolyParam) =
+ p.binder == tycon && args(p.paramNum).isInstanceOf[TypeBounds]
+ def canReduceWildcard(p: PolyParam) =
+ !ctx.mode.is(Mode.AllowLambdaWildcardApply) || available.contains(p.paramNum)
+ def apply(t: Type) = t match {
+ case t @ TypeAlias(p: PolyParam) if hasWildcardArg(p) && canReduceWildcard(p) =>
+ available -= p.paramNum
+ args(p.paramNum)
+ case p: PolyParam if p.binder == tycon =>
+ args(p.paramNum) match {
+ case TypeBounds(lo, hi) =>
+ if (ctx.mode.is(Mode.AllowLambdaWildcardApply)) { allReplaced = false; p }
+ else if (variance < 0) lo
+ else hi
+ case arg =>
+ arg
+ }
+ case _: TypeBounds | _: HKApply =>
+ val saved = available
+ available = Set()
+ try mapOver(t)
+ finally available = saved
+ case _ =>
+ mapOver(t)
+ }
+ }
+}
+
+import TypeApplications._
+
+/** A decorator that provides methods for modeling type application */
+class TypeApplications(val self: Type) extends AnyVal {
+
+ /** The type parameters of this type are:
+ * For a ClassInfo type, the type parameters of its class.
+ * For a typeref referring to a class, the type parameters of the class.
+ * For a typeref referring to a Lambda class, the type parameters of
+ * its right hand side or upper bound.
+ * For a refinement type, the type parameters of its parent, dropping
+ * any type parameter that is-rebound by the refinement. "Re-bind" means:
+ * The refinement contains a TypeAlias for the type parameter, or
+ * it introduces bounds for the type parameter, and we are not in the
+ * special case of a type Lambda, where a LambdaTrait gets refined
+ * with the bounds on its hk args. See `LambdaAbstract`, where these
+ * types get introduced, and see `isBoundedLambda` below for the test.
+ */
+ final def typeParams(implicit ctx: Context): List[TypeParamInfo] = /*>|>*/ track("typeParams") /*<|<*/ {
+ self match {
+ case self: ClassInfo =>
+ self.cls.typeParams
+ case self: PolyType =>
+ self.typeParams
+ case self: TypeRef =>
+ val tsym = self.symbol
+ if (tsym.isClass) tsym.typeParams
+ else if (!tsym.isCompleting) tsym.info.typeParams
+ else Nil
+ case self: RefinedType =>
+ self.parent.typeParams.filterNot(_.paramName == self.refinedName)
+ case self: RecType =>
+ self.parent.typeParams
+ case _: SingletonType =>
+ Nil
+ case self: WildcardType =>
+ self.optBounds.typeParams
+ case self: TypeProxy =>
+ self.superType.typeParams
+ case _ =>
+ Nil
+ }
+ }
+
+ /** If `self` is a higher-kinded type, its type parameters, otherwise Nil */
+ final def hkTypeParams(implicit ctx: Context): List[TypeParamInfo] =
+ if (isHK) typeParams else Nil
+
+ /** If `self` is a generic class, its type parameter symbols, otherwise Nil */
+ final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = typeParams match {
+ case (_: Symbol) :: _ =>
+ assert(typeParams.forall(_.isInstanceOf[Symbol]))
+ typeParams.asInstanceOf[List[TypeSymbol]]
+ case _ => Nil
+ }
+
+ /** The named type parameters declared or inherited by this type.
+ * These are all uninstantiated named type parameters of this type or one
+ * of its base types.
+ */
+ final def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = self match {
+ case self: ClassInfo =>
+ self.cls.namedTypeParams
+ case self: RefinedType =>
+ self.parent.namedTypeParams.filterNot(_.name == self.refinedName)
+ case self: SingletonType =>
+ Set()
+ case self: TypeProxy =>
+ self.underlying.namedTypeParams
+ case _ =>
+ Set()
+ }
+
+ /** The smallest supertype of this type that instantiated none of the named type parameters
+ * in `params`. That is, for each named type parameter `p` in `params`, either there is
+ * no type field named `p` in this type, or `p` is a named type parameter of this type.
+ * The first case is important for the recursive case of AndTypes, because some of their operands might
+ * be missing the named parameter altogether, but the AndType as a whole can still
+ * contain it.
+ */
+ final def widenToNamedTypeParams(params: Set[TypeSymbol])(implicit ctx: Context): Type = {
+
+ /** Is widening not needed for `tp`? */
+ def isOK(tp: Type) = {
+ val ownParams = tp.namedTypeParams
+ def isMissingOrOpen(param: TypeSymbol) = {
+ val ownParam = tp.nonPrivateMember(param.name).symbol
+ !ownParam.exists || ownParams.contains(ownParam.asType)
+ }
+ params.forall(isMissingOrOpen)
+ }
+
+ /** Widen type by forming the intersection of its widened parents */
+ def widenToParents(tp: Type) = {
+ val parents = tp.parents.map(p =>
+ tp.baseTypeWithArgs(p.symbol).widenToNamedTypeParams(params))
+ parents.reduceLeft(ctx.typeComparer.andType(_, _))
+ }
+
+ if (isOK(self)) self
+ else self match {
+ case self @ AppliedType(tycon, args) if !isOK(tycon) =>
+ widenToParents(self)
+ case self: TypeRef if self.symbol.isClass =>
+ widenToParents(self)
+ case self: RefinedType =>
+ val parent1 = self.parent.widenToNamedTypeParams(params)
+ if (params.exists(_.name == self.refinedName)) parent1
+ else self.derivedRefinedType(parent1, self.refinedName, self.refinedInfo)
+ case self: TypeProxy =>
+ self.superType.widenToNamedTypeParams(params)
+ case self: AndOrType =>
+ self.derivedAndOrType(
+ self.tp1.widenToNamedTypeParams(params), self.tp2.widenToNamedTypeParams(params))
+ }
+ }
+
+ /** Is self type higher-kinded (i.e. of kind != "*")? */
+ def isHK(implicit ctx: Context): Boolean = self.dealias match {
+ case self: TypeRef => self.info.isHK
+ case self: RefinedType => false
+ case self: PolyType => true
+ case self: SingletonType => false
+ case self: TypeVar =>
+ // Using `origin` instead of `underlying`, as is done for typeParams,
+ // avoids having to set ephemeral in some cases.
+ self.origin.isHK
+ case self: WildcardType => self.optBounds.isHK
+ case self: TypeProxy => self.superType.isHK
+ case _ => false
+ }
+
+ /** Dealias type if it can be done without forcing the TypeRef's info */
+ def safeDealias(implicit ctx: Context): Type = self match {
+ case self: TypeRef if self.denot.exists && self.symbol.isAliasType =>
+ self.superType.stripTypeVar.safeDealias
+ case _ =>
+ self
+ }
+
+ /** Lambda abstract `self` with given type parameters. Examples:
+ *
+ * type T[X] = U becomes type T = [X] -> U
+ * type T[X] >: L <: U becomes type T >: L <: ([X] -> U)
+ *
+ * TODO: Handle parameterized lower bounds
+ */
+ def LambdaAbstract(tparams: List[TypeParamInfo])(implicit ctx: Context): Type = {
+ def expand(tp: Type) =
+ PolyType(
+ tparams.map(_.paramName), tparams.map(_.paramVariance))(
+ tl => tparams.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds),
+ tl => tl.lifted(tparams, tp))
+ if (tparams.isEmpty) self
+ else self match {
+ case self: TypeAlias =>
+ self.derivedTypeAlias(expand(self.alias))
+ case self @ TypeBounds(lo, hi) =>
+ self.derivedTypeBounds(
+ if (lo.isRef(defn.NothingClass)) lo else expand(lo),
+ expand(hi))
+ case _ => expand(self)
+ }
+ }
+
+ /** Convert a type constructor `TC` which has type parameters `T1, ..., Tn`
+ * in a context where type parameters `U1,...,Un` are expected to
+ *
+ * LambdaXYZ { Apply = TC[hk$0, ..., hk$n] }
+ *
+ * Here, XYZ corresponds to the variances of
+ * - `U1,...,Un` if the variances of `T1,...,Tn` are pairwise compatible with `U1,...,Un`,
+ * - `T1,...,Tn` otherwise.
+ * v1 is compatible with v2, if v1 = v2 or v2 is non-variant.
+ */
+ def EtaExpand(tparams: List[TypeSymbol])(implicit ctx: Context): Type = {
+ val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParamSymbols
+ self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparamsToUse)
+ //.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}")
+ }
+
+ /** If self is not higher-kinded, eta expand it. */
+ def ensureHK(implicit ctx: Context): Type =
+ if (isHK) self else EtaExpansion(self)
+
+ /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */
+ def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = {
+ val hkParams = bound.hkTypeParams
+ if (hkParams.isEmpty) self
+ else self match {
+ case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length =>
+ EtaExpansion(self)
+ case _ => self
+ }
+ }
+
+ /** If argument A and type parameter P are higher-kinded, adapt the variances
+ * of A to those of P, ensuring that the variances of the type lambda A
+ * agree with the variances of corresponding higher-kinded type parameters of P. Example:
+ *
+ * class GenericCompanion[+CC[X]]
+ * GenericCompanion[List]
+ *
+ * with adaptHkVariances, the argument `List` will expand to
+ *
+ * [X] => List[X]
+ *
+ * instead of
+ *
+ * [+X] => List[X]
+ *
+ * even though `List` is covariant. This adaptation is necessary to ignore conflicting
+ * variances in overriding members that have types of hk-type parameters such as
+ * `GenericCompanion[GenTraversable]` or `GenericCompanion[ListBuffer]`.
+ * When checking overriding, we need to validate the subtype relationship
+ *
+ * GenericCompanion[[X] -> ListBuffer[X]] <: GenericCompanion[[+X] -> GenTraversable[X]]
+ *
+ * Without adaptation, this would be false, and hence an overriding error would
+ * result. But with adaptation, the rhs argument will be adapted to
+ *
+ * [X] -> GenTraversable[X]
+ *
+ * which makes the subtype test succeed. The crucial point here is that, since
+ * GenericCompanion only expects a non-variant CC, the fact that GenTraversable
+ * is covariant is irrelevant, so can be ignored.
+ */
+ def adaptHkVariances(bound: Type)(implicit ctx: Context): Type = {
+ val hkParams = bound.hkTypeParams
+ if (hkParams.isEmpty) self
+ else {
+ def adaptArg(arg: Type): Type = arg match {
+ case arg @ PolyType(tparams, body) if
+ !tparams.corresponds(hkParams)(_.paramVariance == _.paramVariance) &&
+ tparams.corresponds(hkParams)(varianceConforms) =>
+ PolyType(tparams.map(_.paramName), hkParams.map(_.paramVariance))(
+ tl => arg.paramBounds.map(_.subst(arg, tl).bounds),
+ tl => arg.resultType.subst(arg, tl)
+ )
+ case arg @ TypeAlias(alias) =>
+ arg.derivedTypeAlias(adaptArg(alias))
+ case arg @ TypeBounds(lo, hi) =>
+ arg.derivedTypeBounds(adaptArg(lo), adaptArg(hi))
+ case _ =>
+ arg
+ }
+ adaptArg(self)
+ }
+ }
+
+ /** The type representing
+ *
+ * T[U1, ..., Un]
+ *
+ * where
+ * @param self = `T`
+ * @param args = `U1,...,Un`
+ */
+ final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ {
+ val typParams = self.typeParams
+ def matchParams(t: Type, tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): Type = args match {
+ case arg :: args1 =>
+ try {
+ val tparam :: tparams1 = tparams
+ matchParams(RefinedType(t, tparam.paramName, arg.toBounds(tparam)), tparams1, args1)
+ } catch {
+ case ex: MatchError =>
+ println(s"applied type mismatch: $self with underlying ${self.underlyingIfProxy}, args = $args, typeParams = $typParams") // !!! DEBUG
+ //println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}")
+ throw ex
+ }
+ case nil => t
+ }
+ val stripped = self.stripTypeVar
+ val dealiased = stripped.safeDealias
+ if (args.isEmpty || ctx.erasedTypes) self
+ else dealiased match {
+ case dealiased: PolyType =>
+ def tryReduce =
+ if (!args.exists(_.isInstanceOf[TypeBounds])) {
+ val followAlias = Config.simplifyApplications && {
+ dealiased.resType match {
+ case AppliedType(tyconBody, _) =>
+ variancesConform(typParams, tyconBody.typeParams)
+ // Reducing is safe for type inference, as kind of type constructor does not change
+ case _ => false
+ }
+ }
+ if ((dealiased eq stripped) || followAlias) dealiased.instantiate(args)
+ else HKApply(self, args)
+ }
+ else dealiased.resType match {
+ case AppliedType(tycon, args1) if tycon.safeDealias ne tycon =>
+ // In this case we should always dealias since we cannot handle
+ // higher-kinded applications to wildcard arguments.
+ dealiased
+ .derivedPolyType(resType = tycon.safeDealias.appliedTo(args1))
+ .appliedTo(args)
+ case _ =>
+ val reducer = new Reducer(dealiased, args)
+ val reduced = reducer(dealiased.resType)
+ if (reducer.allReplaced) reduced
+ else HKApply(dealiased, args)
+ }
+ tryReduce
+ case dealiased: AndOrType =>
+ dealiased.derivedAndOrType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args))
+ case dealiased: TypeAlias =>
+ dealiased.derivedTypeAlias(dealiased.alias.appliedTo(args))
+ case dealiased: TypeBounds =>
+ dealiased.derivedTypeBounds(dealiased.lo.appliedTo(args), dealiased.hi.appliedTo(args))
+ case dealiased: LazyRef =>
+ LazyRef(() => dealiased.ref.appliedTo(args))
+ case dealiased: WildcardType =>
+ dealiased
+ case dealiased: TypeRef if dealiased.symbol == defn.NothingClass =>
+ dealiased
+ case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] =>
+ HKApply(self, args)
+ case dealiased =>
+ matchParams(dealiased, typParams, args)
+ }
+ }
+
+ final def appliedTo(arg: Type)(implicit ctx: Context): Type = appliedTo(arg :: Nil)
+ final def appliedTo(arg1: Type, arg2: Type)(implicit ctx: Context): Type = appliedTo(arg1 :: arg2 :: Nil)
+
+ final def applyIfParameterized(args: List[Type])(implicit ctx: Context): Type =
+ if (typeParams.nonEmpty) appliedTo(args) else self
+
+ /** A cycle-safe version of `appliedTo` where computing type parameters do not force
+ * the typeconstructor. Instead, if the type constructor is completing, we make
+ * up hk type parameters matching the arguments. This is needed when unpickling
+ * Scala2 files such as `scala.collection.generic.Mapfactory`.
+ */
+ final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = self match {
+ case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting =>
+ HKApply(self, args)
+ case _ =>
+ appliedTo(args)
+ }
+
+ /** Turn this type, which is used as an argument for
+ * type parameter `tparam`, into a TypeBounds RHS
+ */
+ final def toBounds(tparam: TypeParamInfo)(implicit ctx: Context): TypeBounds = self match {
+ case self: TypeBounds => // this can happen for wildcard args
+ self
+ case _ =>
+ val v = tparam.paramVariance
+ /* Not neeeded.
+ if (v > 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.upper(self)
+ else if (v < 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.lower(self)
+ else
+ */
+ TypeAlias(self, v)
+ }
+
+ /** The type arguments of this type's base type instance wrt. `base`.
+ * Existential types in arguments are returned as TypeBounds instances.
+ */
+ final def baseArgInfos(base: Symbol)(implicit ctx: Context): List[Type] =
+ if (self derivesFrom base)
+ self.dealias match {
+ case self: TypeRef if !self.symbol.isClass => self.superType.baseArgInfos(base)
+ case self: HKApply => self.superType.baseArgInfos(base)
+ case _ => base.typeParams.map(param => self.member(param.name).info.argInfo)
+ }
+ else
+ Nil
+
+ /** The type arguments of this type's base type instance wrt.`base`.
+ * Existential types in arguments are disallowed.
+ */
+ final def baseArgTypes(base: Symbol)(implicit ctx: Context): List[Type] =
+ baseArgInfos(base) mapConserve noBounds
+
+ /** The type arguments of this type's base type instance wrt.`base`.
+ * Existential types in arguments are approximated by their lower bound.
+ */
+ final def baseArgTypesLo(base: Symbol)(implicit ctx: Context): List[Type] =
+ baseArgInfos(base) mapConserve boundsToLo
+
+ /** The type arguments of this type's base type instance wrt.`base`.
+ * Existential types in arguments are approximated by their upper bound.
+ */
+ final def baseArgTypesHi(base: Symbol)(implicit ctx: Context): List[Type] =
+ baseArgInfos(base) mapConserve boundsToHi
+
+ /** The base type including all type arguments and applicable refinements
+ * of this type. Refinements are applicable if they refine a member of
+ * the parent type which furthermore is not a name-mangled type parameter.
+ * Existential types in arguments are returned as TypeBounds instances.
+ */
+ final def baseTypeWithArgs(base: Symbol)(implicit ctx: Context): Type = ctx.traceIndented(s"btwa ${self.show} wrt $base", core, show = true) {
+ def default = self.baseTypeRef(base).appliedTo(baseArgInfos(base))
+ self match {
+ case tp: TypeRef =>
+ tp.info match {
+ case TypeBounds(_, hi) => hi.baseTypeWithArgs(base)
+ case _ => default
+ }
+ case tp @ RefinedType(parent, name, _) if !tp.member(name).symbol.is(ExpandedTypeParam) =>
+ tp.wrapIfMember(parent.baseTypeWithArgs(base))
+ case tp: TermRef =>
+ tp.underlying.baseTypeWithArgs(base)
+ case tp: HKApply =>
+ tp.superType.baseTypeWithArgs(base)
+ case AndType(tp1, tp2) =>
+ tp1.baseTypeWithArgs(base) & tp2.baseTypeWithArgs(base)
+ case OrType(tp1, tp2) =>
+ tp1.baseTypeWithArgs(base) | tp2.baseTypeWithArgs(base)
+ case _ =>
+ default
+ }
+ }
+
+ /** Translate a type of the form From[T] to To[T], keep other types as they are.
+ * `from` and `to` must be static classes, both with one type parameter, and the same variance.
+ * Do the same for by name types => From[T] and => To[T]
+ */
+ def translateParameterized(from: ClassSymbol, to: ClassSymbol)(implicit ctx: Context): Type = self match {
+ case self @ ExprType(tp) =>
+ self.derivedExprType(tp.translateParameterized(from, to))
+ case _ =>
+ if (self.derivesFrom(from))
+ if (ctx.erasedTypes) to.typeRef
+ else RefinedType(to.typeRef, to.typeParams.head.name, self.member(from.typeParams.head.name).info)
+ else self
+ }
+
+ /** If this is repeated parameter type, its underlying Seq type,
+ * or, if isJava is true, Array type, else the type itself.
+ */
+ def underlyingIfRepeated(isJava: Boolean)(implicit ctx: Context): Type =
+ if (self.isRepeatedParam) {
+ val seqClass = if (isJava) defn.ArrayClass else defn.SeqClass
+ translateParameterized(defn.RepeatedParamClass, seqClass)
+ }
+ else self
+
+ /** If this is an encoding of a (partially) applied type, return its arguments,
+ * otherwise return Nil.
+ * Existential types in arguments are returned as TypeBounds instances.
+ */
+ final def argInfos(implicit ctx: Context): List[Type] = self match {
+ case AppliedType(tycon, args) => args
+ case _ => Nil
+ }
+
+ /** Argument types where existential types in arguments are disallowed */
+ def argTypes(implicit ctx: Context) = argInfos mapConserve noBounds
+
+ /** Argument types where existential types in arguments are approximated by their lower bound */
+ def argTypesLo(implicit ctx: Context) = argInfos mapConserve boundsToLo
+
+ /** Argument types where existential types in arguments are approximated by their upper bound */
+ def argTypesHi(implicit ctx: Context) = argInfos mapConserve boundsToHi
+
+ /** The core type without any type arguments.
+ * @param `typeArgs` must be the type arguments of this type.
+ */
+ final def withoutArgs(typeArgs: List[Type]): Type = self match {
+ case HKApply(tycon, args) => tycon
+ case _ =>
+ typeArgs match {
+ case _ :: typeArgs1 =>
+ val RefinedType(tycon, _, _) = self
+ tycon.withoutArgs(typeArgs1)
+ case nil =>
+ self
+ }
+ }
+
+ /** If this is the image of a type argument; recover the type argument,
+ * otherwise NoType.
+ */
+ final def argInfo(implicit ctx: Context): Type = self match {
+ case self: TypeAlias => self.alias
+ case self: TypeBounds => self
+ case _ => NoType
+ }
+
+ /** If this is a type alias, its underlying type, otherwise the type itself */
+ def dropAlias(implicit ctx: Context): Type = self match {
+ case TypeAlias(alias) => alias
+ case _ => self
+ }
+
+ /** The element type of a sequence or array */
+ def elemType(implicit ctx: Context): Type = self match {
+ case defn.ArrayOf(elemtp) => elemtp
+ case JavaArrayType(elemtp) => elemtp
+ case _ => baseArgInfos(defn.SeqClass).headOption.getOrElse(NoType)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
new file mode 100644
index 000000000..f78820fff
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -0,0 +1,1502 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._, Contexts._, Symbols._, Flags._, Names._, NameOps._, Denotations._
+import Decorators._
+import StdNames.{nme, tpnme}
+import collection.mutable
+import util.{Stats, DotClass, SimpleMap}
+import config.Config
+import config.Printers.{typr, constr, subtyping, noPrinter}
+import TypeErasure.{erasedLub, erasedGlb}
+import TypeApplications._
+import scala.util.control.NonFatal
+
+/** Provides methods to compare types.
+ */
+class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
+ implicit val ctx: Context = initctx
+
+ val state = ctx.typerState
+ import state.constraint
+
+ private var pendingSubTypes: mutable.Set[(Type, Type)] = null
+ private var recCount = 0
+
+ private var needsGc = false
+
+ /** Is a subtype check in progress? In that case we may not
+ * permanently instantiate type variables, because the corresponding
+ * constraint might still be retracted and the instantiation should
+ * then be reversed.
+ */
+ def subtypeCheckInProgress: Boolean = {
+ val result = recCount > 0
+ if (result) {
+ constr.println("*** needsGC ***")
+ needsGc = true
+ }
+ result
+ }
+
+ /** For statistics: count how many isSubTypes are part of successful comparisons */
+ private var successCount = 0
+ private var totalCount = 0
+
+ private var myAnyClass: ClassSymbol = null
+ private var myNothingClass: ClassSymbol = null
+ private var myNullClass: ClassSymbol = null
+ private var myObjectClass: ClassSymbol = null
+ private var myAnyType: TypeRef = null
+ private var myNothingType: TypeRef = null
+
+ def AnyClass = {
+ if (myAnyClass == null) myAnyClass = defn.AnyClass
+ myAnyClass
+ }
+ def NothingClass = {
+ if (myNothingClass == null) myNothingClass = defn.NothingClass
+ myNothingClass
+ }
+ def NullClass = {
+ if (myNullClass == null) myNullClass = defn.NullClass
+ myNullClass
+ }
+ def ObjectClass = {
+ if (myObjectClass == null) myObjectClass = defn.ObjectClass
+ myObjectClass
+ }
+ def AnyType = {
+ if (myAnyType == null) myAnyType = AnyClass.typeRef
+ myAnyType
+ }
+ def NothingType = {
+ if (myNothingType == null) myNothingType = NothingClass.typeRef
+ myNothingType
+ }
+
+ /** Indicates whether a previous subtype check used GADT bounds */
+ var GADTused = false
+
+ /** Record that GADT bounds of `sym` were used in a subtype check.
+ * But exclude constructor type parameters, as these are aliased
+ * to the corresponding class parameters, which does not constitute
+ * a true usage of a GADT symbol.
+ */
+ private def GADTusage(sym: Symbol) = {
+ if (!sym.owner.isConstructor) GADTused = true
+ true
+ }
+
+ // Subtype testing `<:<`
+
+ def topLevelSubType(tp1: Type, tp2: Type): Boolean = {
+ if (tp2 eq NoType) return false
+ if ((tp2 eq tp1) || (tp2 eq WildcardType)) return true
+ try isSubType(tp1, tp2)
+ finally
+ if (Config.checkConstraintsSatisfiable)
+ assert(isSatisfiable, constraint.show)
+ }
+
+ protected def isSubType(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSubType ${traceInfo(tp1, tp2)}", subtyping) {
+ if (tp2 eq NoType) false
+ else if (tp1 eq tp2) true
+ else {
+ val saved = constraint
+ val savedSuccessCount = successCount
+ try {
+ recCount = recCount + 1
+ val result =
+ if (recCount < Config.LogPendingSubTypesThreshold) firstTry(tp1, tp2)
+ else monitoredIsSubType(tp1, tp2)
+ recCount = recCount - 1
+ if (!result) constraint = saved
+ else if (recCount == 0 && needsGc) {
+ state.gc()
+ needsGc = false
+ }
+ if (Stats.monitored) recordStatistics(result, savedSuccessCount)
+ result
+ } catch {
+ case NonFatal(ex) =>
+ if (ex.isInstanceOf[AssertionError]) showGoal(tp1, tp2)
+ recCount -= 1
+ constraint = saved
+ successCount = savedSuccessCount
+ throw ex
+ }
+ }
+ }
+
+ private def monitoredIsSubType(tp1: Type, tp2: Type) = {
+ if (pendingSubTypes == null) {
+ pendingSubTypes = new mutable.HashSet[(Type, Type)]
+ ctx.log(s"!!! deep subtype recursion involving ${tp1.show} <:< ${tp2.show}, constraint = ${state.constraint.show}")
+ ctx.log(s"!!! constraint = ${constraint.show}")
+ //if (ctx.settings.YnoDeepSubtypes.value) {
+ // new Error("deep subtype").printStackTrace()
+ //}
+ assert(!ctx.settings.YnoDeepSubtypes.value)
+ if (Config.traceDeepSubTypeRecursions && !this.isInstanceOf[ExplainingTypeComparer])
+ ctx.log(TypeComparer.explained(implicit ctx => ctx.typeComparer.isSubType(tp1, tp2)))
+ }
+ val p = (tp1, tp2)
+ !pendingSubTypes(p) && {
+ try {
+ pendingSubTypes += p
+ firstTry(tp1, tp2)
+ } finally {
+ pendingSubTypes -= p
+ }
+ }
+ }
+
+ private def firstTry(tp1: Type, tp2: Type): Boolean = tp2 match {
+ case tp2: NamedType =>
+ def compareNamed(tp1: Type, tp2: NamedType): Boolean = {
+ implicit val ctx: Context = this.ctx
+ tp2.info match {
+ case info2: TypeAlias => isSubType(tp1, info2.alias)
+ case _ => tp1 match {
+ case tp1: NamedType =>
+ tp1.info match {
+ case info1: TypeAlias =>
+ if (isSubType(info1.alias, tp2)) return true
+ if (tp1.prefix.isStable) return false
+ // If tp1.prefix is stable, the alias does contain all information about the original ref, so
+ // there's no need to try something else. (This is important for performance).
+ // To see why we cannot in general stop here, consider:
+ //
+ // trait C { type A }
+ // trait D { type A = String }
+ // (C & D)#A <: C#A
+ //
+ // Following the alias leads to the judgment `String <: C#A` which is false.
+ // However the original judgment should be true.
+ case _ =>
+ }
+ val sym1 =
+ if (tp1.symbol.is(ModuleClass) && tp2.symbol.is(ModuleVal))
+ // For convenience we want X$ <:< X.type
+ // This is safe because X$ self-type is X.type
+ tp1.symbol.companionModule
+ else
+ tp1.symbol
+ if ((sym1 ne NoSymbol) && (sym1 eq tp2.symbol))
+ ctx.erasedTypes ||
+ sym1.isStaticOwner ||
+ isSubType(tp1.prefix, tp2.prefix) ||
+ thirdTryNamed(tp1, tp2)
+ else
+ ( (tp1.name eq tp2.name)
+ && isSubType(tp1.prefix, tp2.prefix)
+ && tp1.signature == tp2.signature
+ && !tp1.isInstanceOf[WithFixedSym]
+ && !tp2.isInstanceOf[WithFixedSym]
+ ) ||
+ thirdTryNamed(tp1, tp2)
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+ }
+ }
+ compareNamed(tp1, tp2)
+ case tp2: ProtoType =>
+ isMatchedByProto(tp2, tp1)
+ case tp2: BoundType =>
+ tp2 == tp1 || secondTry(tp1, tp2)
+ case tp2: TypeVar =>
+ isSubType(tp1, tp2.underlying)
+ case tp2: WildcardType =>
+ def compareWild = tp2.optBounds match {
+ case TypeBounds(_, hi) => isSubType(tp1, hi)
+ case NoType => true
+ }
+ compareWild
+ case tp2: LazyRef =>
+ !tp2.evaluating && isSubType(tp1, tp2.ref)
+ case tp2: AnnotatedType =>
+ isSubType(tp1, tp2.tpe) // todo: refine?
+ case tp2: ThisType =>
+ def compareThis = {
+ val cls2 = tp2.cls
+ tp1 match {
+ case tp1: ThisType =>
+ // We treat two prefixes A.this, B.this as equivalent if
+ // A's selftype derives from B and B's selftype derives from A.
+ val cls1 = tp1.cls
+ cls1.classInfo.selfType.derivesFrom(cls2) &&
+ cls2.classInfo.selfType.derivesFrom(cls1)
+ case tp1: NamedType if cls2.is(Module) && cls2.eq(tp1.widen.typeSymbol) =>
+ cls2.isStaticOwner ||
+ isSubType(tp1.prefix, cls2.owner.thisType) ||
+ secondTry(tp1, tp2)
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+ }
+ compareThis
+ case tp2: SuperType =>
+ def compareSuper = tp1 match {
+ case tp1: SuperType =>
+ isSubType(tp1.thistpe, tp2.thistpe) &&
+ isSameType(tp1.supertpe, tp2.supertpe)
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+ compareSuper
+ case AndType(tp21, tp22) =>
+ isSubType(tp1, tp21) && isSubType(tp1, tp22)
+ case OrType(tp21, tp22) =>
+ if (tp21.stripTypeVar eq tp22.stripTypeVar) isSubType(tp1, tp21)
+ else secondTry(tp1, tp2)
+ case TypeErasure.ErasedValueType(tycon1, underlying2) =>
+ def compareErasedValueType = tp1 match {
+ case TypeErasure.ErasedValueType(tycon2, underlying1) =>
+ (tycon1.symbol eq tycon2.symbol) && isSameType(underlying1, underlying2)
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+ compareErasedValueType
+ case ErrorType =>
+ true
+ case _ =>
+ secondTry(tp1, tp2)
+ }
+
+ private def secondTry(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case tp1: NamedType =>
+ tp1.info match {
+ case info1: TypeAlias =>
+ if (isSubType(info1.alias, tp2)) return true
+ if (tp1.prefix.isStable) return false
+ case _ =>
+ }
+ thirdTry(tp1, tp2)
+ case tp1: PolyParam =>
+ def flagNothingBound = {
+ if (!frozenConstraint && tp2.isRef(defn.NothingClass) && state.isGlobalCommittable) {
+ def msg = s"!!! instantiated to Nothing: $tp1, constraint = ${constraint.show}"
+ if (Config.failOnInstantiationToNothing) assert(false, msg)
+ else ctx.log(msg)
+ }
+ true
+ }
+ def comparePolyParam =
+ ctx.mode.is(Mode.TypevarsMissContext) ||
+ isSubTypeWhenFrozen(bounds(tp1).hi, tp2) || {
+ if (canConstrain(tp1)) addConstraint(tp1, tp2, fromBelow = false) && flagNothingBound
+ else thirdTry(tp1, tp2)
+ }
+ comparePolyParam
+ case tp1: ThisType =>
+ val cls1 = tp1.cls
+ tp2 match {
+ case tp2: TermRef if cls1.is(Module) && cls1.eq(tp2.widen.typeSymbol) =>
+ cls1.isStaticOwner ||
+ isSubType(cls1.owner.thisType, tp2.prefix) ||
+ thirdTry(tp1, tp2)
+ case _ =>
+ thirdTry(tp1, tp2)
+ }
+ case tp1: SkolemType =>
+ tp2 match {
+ case tp2: SkolemType if !ctx.phase.isTyper && tp1.info <:< tp2.info => true
+ case _ => thirdTry(tp1, tp2)
+ }
+ case tp1: TypeVar =>
+ isSubType(tp1.underlying, tp2)
+ case tp1: WildcardType =>
+ def compareWild = tp1.optBounds match {
+ case TypeBounds(lo, _) => isSubType(lo, tp2)
+ case _ => true
+ }
+ compareWild
+ case tp1: LazyRef =>
+ // If `tp1` is in train of being evaluated, don't force it
+ // because that would cause an assertionError. Return false instead.
+ // See i859.scala for an example where we hit this case.
+ !tp1.evaluating && isSubType(tp1.ref, tp2)
+ case tp1: AnnotatedType =>
+ isSubType(tp1.tpe, tp2)
+ case AndType(tp11, tp12) =>
+ if (tp11.stripTypeVar eq tp12.stripTypeVar) isSubType(tp11, tp2)
+ else thirdTry(tp1, tp2)
+ case tp1 @ OrType(tp11, tp12) =>
+ def joinOK = tp2.dealias match {
+ case tp12: HKApply =>
+ // If we apply the default algorithm for `A[X] | B[Y] <: C[Z]` where `C` is a
+ // type parameter, we will instantiate `C` to `A` and then fail when comparing
+ // with `B[Y]`. To do the right thing, we need to instantiate `C` to the
+ // common superclass of `A` and `B`.
+ isSubType(tp1.join, tp2)
+ case _ =>
+ false
+ }
+ joinOK || isSubType(tp11, tp2) && isSubType(tp12, tp2)
+ case ErrorType =>
+ true
+ case _ =>
+ thirdTry(tp1, tp2)
+ }
+
+ private def thirdTryNamed(tp1: Type, tp2: NamedType): Boolean = tp2.info match {
+ case TypeBounds(lo2, _) =>
+ def compareGADT: Boolean = {
+ val gbounds2 = ctx.gadt.bounds(tp2.symbol)
+ (gbounds2 != null) &&
+ (isSubTypeWhenFrozen(tp1, gbounds2.lo) ||
+ narrowGADTBounds(tp2, tp1, isUpper = false)) &&
+ GADTusage(tp2.symbol)
+ }
+ ((frozenConstraint || !isCappable(tp1)) && isSubType(tp1, lo2) ||
+ compareGADT ||
+ fourthTry(tp1, tp2))
+
+ case _ =>
+ val cls2 = tp2.symbol
+ if (cls2.isClass) {
+ val base = tp1.baseTypeRef(cls2)
+ if (base.exists && (base ne tp1)) return isSubType(base, tp2)
+ if (cls2 == defn.SingletonClass && tp1.isStable) return true
+ }
+ fourthTry(tp1, tp2)
+ }
+
+ private def thirdTry(tp1: Type, tp2: Type): Boolean = tp2 match {
+ case tp2: NamedType =>
+ thirdTryNamed(tp1, tp2)
+ case tp2: PolyParam =>
+ def comparePolyParam =
+ (ctx.mode is Mode.TypevarsMissContext) ||
+ isSubTypeWhenFrozen(tp1, bounds(tp2).lo) || {
+ if (canConstrain(tp2)) addConstraint(tp2, tp1.widenExpr, fromBelow = true)
+ else fourthTry(tp1, tp2)
+ }
+ comparePolyParam
+ case tp2: RefinedType =>
+ def compareRefinedSlow: Boolean = {
+ val name2 = tp2.refinedName
+ isSubType(tp1, tp2.parent) &&
+ (name2 == nme.WILDCARD || hasMatchingMember(name2, tp1, tp2))
+ }
+ def compareRefined: Boolean = {
+ val tp1w = tp1.widen
+ val skipped2 = skipMatching(tp1w, tp2)
+ if ((skipped2 eq tp2) || !Config.fastPathForRefinedSubtype)
+ tp1 match {
+ case tp1: AndType =>
+ // Delay calling `compareRefinedSlow` because looking up a member
+ // of an `AndType` can lead to a cascade of subtyping checks
+ // This twist is needed to make collection/generic/ParFactory.scala compile
+ fourthTry(tp1, tp2) || compareRefinedSlow
+ case _ =>
+ compareRefinedSlow || fourthTry(tp1, tp2)
+ }
+ else // fast path, in particular for refinements resulting from parameterization.
+ isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) &&
+ isSubType(tp1, skipped2)
+ }
+ compareRefined
+ case tp2: RecType =>
+ def compareRec = tp1.safeDealias match {
+ case tp1: RecType =>
+ val rthis1 = RecThis(tp1)
+ isSubType(tp1.parent, tp2.parent.substRecThis(tp2, rthis1))
+ case _ =>
+ val tp1stable = ensureStableSingleton(tp1)
+ isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable))
+ }
+ compareRec
+ case tp2 @ HKApply(tycon2, args2) =>
+ compareHkApply2(tp1, tp2, tycon2, args2)
+ case tp2 @ PolyType(tparams2, body2) =>
+ def compareHkLambda: Boolean = tp1.stripTypeVar match {
+ case tp1 @ PolyType(tparams1, body1) =>
+ /* Don't compare bounds of lambdas under language:Scala2, or t2994 will fail
+ * The issue is that, logically, bounds should compare contravariantly,
+ * but that would invalidate a pattern exploited in t2994:
+ *
+ * [X0 <: Number] -> Number <:< [X0] -> Any
+ *
+ * Under the new scheme, `[X0] -> Any` is NOT a kind that subsumes
+ * all other bounds. You'd have to write `[X0 >: Any <: Nothing] -> Any` instead.
+ * This might look weird, but is the only logically correct way to do it.
+ *
+ * Note: it would be nice if this could trigger a migration warning, but I
+ * am not sure how, since the code is buried so deep in subtyping logic.
+ */
+ def boundsOK =
+ ctx.scala2Mode ||
+ tparams1.corresponds(tparams2)((tparam1, tparam2) =>
+ isSubType(tparam2.paramBounds.subst(tp2, tp1), tparam1.paramBounds))
+ val saved = comparedPolyTypes
+ comparedPolyTypes += tp1
+ comparedPolyTypes += tp2
+ try
+ variancesConform(tparams1, tparams2) &&
+ boundsOK &&
+ isSubType(body1, body2.subst(tp2, tp1))
+ finally comparedPolyTypes = saved
+ case _ =>
+ if (!tp1.isHK) {
+ tp2 match {
+ case EtaExpansion(tycon2) if tycon2.symbol.isClass =>
+ return isSubType(tp1, tycon2)
+ case _ =>
+ }
+ }
+ fourthTry(tp1, tp2)
+ }
+ compareHkLambda
+ case OrType(tp21, tp22) =>
+ // Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22)
+ // and analogously for T1 <: T21 | (T221 & T222)
+ // `|' types to the right of <: are problematic, because
+ // we have to choose one constraint set or another, which might cut off
+ // solutions. The rewriting delays the point where we have to choose.
+ tp21 match {
+ case AndType(tp211, tp212) =>
+ return isSubType(tp1, OrType(tp211, tp22)) && isSubType(tp1, OrType(tp212, tp22))
+ case _ =>
+ }
+ tp22 match {
+ case AndType(tp221, tp222) =>
+ return isSubType(tp1, OrType(tp21, tp221)) && isSubType(tp1, OrType(tp21, tp222))
+ case _ =>
+ }
+ either(isSubType(tp1, tp21), isSubType(tp1, tp22)) || fourthTry(tp1, tp2)
+ case tp2 @ MethodType(_, formals2) =>
+ def compareMethod = tp1 match {
+ case tp1 @ MethodType(_, formals1) =>
+ (tp1.signature consistentParams tp2.signature) &&
+ matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
+ tp1.isImplicit == tp2.isImplicit && // needed?
+ isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ false
+ }
+ compareMethod
+ case tp2 @ ExprType(restpe2) =>
+ def compareExpr = tp1 match {
+ // We allow ()T to be a subtype of => T.
+ // We need some subtype relationship between them so that e.g.
+ // def toString and def toString() don't clash when seen
+ // as members of the same type. And it seems most logical to take
+ // ()T <:< => T, since everything one can do with a => T one can
+ // also do with a ()T by automatic () insertion.
+ case tp1 @ MethodType(Nil, _) => isSubType(tp1.resultType, restpe2)
+ case _ => isSubType(tp1.widenExpr, restpe2)
+ }
+ compareExpr
+ case tp2 @ TypeBounds(lo2, hi2) =>
+ def compareTypeBounds = tp1 match {
+ case tp1 @ TypeBounds(lo1, hi1) =>
+ (tp2.variance > 0 && tp1.variance >= 0 || (lo2 eq NothingType) || isSubType(lo2, lo1)) &&
+ (tp2.variance < 0 && tp1.variance <= 0 || (hi2 eq AnyType) || isSubType(hi1, hi2))
+ case tp1: ClassInfo =>
+ tp2 contains tp1
+ case _ =>
+ false
+ }
+ compareTypeBounds
+ case ClassInfo(pre2, cls2, _, _, _) =>
+ def compareClassInfo = tp1 match {
+ case ClassInfo(pre1, cls1, _, _, _) =>
+ (cls1 eq cls2) && isSubType(pre1, pre2)
+ case _ =>
+ false
+ }
+ compareClassInfo
+ case _ =>
+ fourthTry(tp1, tp2)
+ }
+
+ private def fourthTry(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case tp1: TypeRef =>
+ tp1.info match {
+ case TypeBounds(_, hi1) =>
+ def compareGADT = {
+ val gbounds1 = ctx.gadt.bounds(tp1.symbol)
+ (gbounds1 != null) &&
+ (isSubTypeWhenFrozen(gbounds1.hi, tp2) ||
+ narrowGADTBounds(tp1, tp2, isUpper = true)) &&
+ GADTusage(tp1.symbol)
+ }
+ isSubType(hi1, tp2) || compareGADT
+ case _ =>
+ def isNullable(tp: Type): Boolean = tp.widenDealias match {
+ case tp: TypeRef => tp.symbol.isNullableClass
+ case tp: RefinedOrRecType => isNullable(tp.parent)
+ case AndType(tp1, tp2) => isNullable(tp1) && isNullable(tp2)
+ case OrType(tp1, tp2) => isNullable(tp1) || isNullable(tp2)
+ case _ => false
+ }
+ (tp1.symbol eq NothingClass) && tp2.isValueTypeOrLambda ||
+ (tp1.symbol eq NullClass) && isNullable(tp2)
+ }
+ case tp1: SingletonType =>
+ /** if `tp2 == p.type` and `p: q.type` then try `tp1 <:< q.type` as a last effort.*/
+ def comparePaths = tp2 match {
+ case tp2: TermRef =>
+ tp2.info.widenExpr match {
+ case tp2i: SingletonType =>
+ isSubType(tp1, tp2i) // see z1720.scala for a case where this can arise even in typer.
+ case _ => false
+ }
+ case _ =>
+ false
+ }
+ isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths
+ case tp1: RefinedType =>
+ isNewSubType(tp1.parent, tp2)
+ case tp1: RecType =>
+ isNewSubType(tp1.parent, tp2)
+ case tp1 @ HKApply(tycon1, args1) =>
+ compareHkApply1(tp1, tycon1, args1, tp2)
+ case EtaExpansion(tycon1) =>
+ isSubType(tycon1, tp2)
+ case AndType(tp11, tp12) =>
+ // Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2
+ // and analogously for T11 & (T121 | T122) & T12 <: T2
+ // `&' types to the left of <: are problematic, because
+ // we have to choose one constraint set or another, which might cut off
+ // solutions. The rewriting delays the point where we have to choose.
+ tp11 match {
+ case OrType(tp111, tp112) =>
+ return isSubType(AndType(tp111, tp12), tp2) && isSubType(AndType(tp112, tp12), tp2)
+ case _ =>
+ }
+ tp12 match {
+ case OrType(tp121, tp122) =>
+ return isSubType(AndType(tp11, tp121), tp2) && isSubType(AndType(tp11, tp122), tp2)
+ case _ =>
+ }
+ either(isSubType(tp11, tp2), isSubType(tp12, tp2))
+ case JavaArrayType(elem1) =>
+ def compareJavaArray = tp2 match {
+ case JavaArrayType(elem2) => isSubType(elem1, elem2)
+ case _ => tp2 isRef ObjectClass
+ }
+ compareJavaArray
+ case tp1: ExprType if ctx.phase.id > ctx.gettersPhase.id =>
+ // getters might have converted T to => T, need to compensate.
+ isSubType(tp1.widenExpr, tp2)
+ case _ =>
+ false
+ }
+
+ /** Subtype test for the hk application `tp2 = tycon2[args2]`.
+ */
+ def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = {
+ val tparams = tycon2.typeParams
+ if (tparams.isEmpty) return false // can happen for ill-typed programs, e.g. neg/tcpoly_overloaded.scala
+
+ /** True if `tp1` and `tp2` have compatible type constructors and their
+ * corresponding arguments are subtypes relative to their variance (see `isSubArgs`).
+ */
+ def isMatchingApply(tp1: Type): Boolean = tp1 match {
+ case HKApply(tycon1, args1) =>
+ tycon1.dealias match {
+ case tycon1: PolyParam =>
+ (tycon1 == tycon2 ||
+ canConstrain(tycon1) && tryInstantiate(tycon1, tycon2)) &&
+ isSubArgs(args1, args2, tparams)
+ case tycon1: TypeRef =>
+ tycon2.dealias match {
+ case tycon2: TypeRef if tycon1.symbol == tycon2.symbol =>
+ isSubType(tycon1.prefix, tycon2.prefix) &&
+ isSubArgs(args1, args2, tparams)
+ case _ =>
+ false
+ }
+ case tycon1: TypeVar =>
+ isMatchingApply(tycon1.underlying)
+ case tycon1: AnnotatedType =>
+ isMatchingApply(tycon1.underlying)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+
+ /** `param2` can be instantiated to a type application prefix of the LHS
+ * or to a type application prefix of one of the LHS base class instances
+ * and the resulting type application is a supertype of `tp1`,
+ * or fallback to fourthTry.
+ */
+ def canInstantiate(tycon2: PolyParam): Boolean = {
+
+ /** Let
+ *
+ * `tparams_1, ..., tparams_k-1` be the type parameters of the rhs
+ * `tparams1_1, ..., tparams1_n-1` be the type parameters of the constructor of the lhs
+ * `args1_1, ..., args1_n-1` be the type arguments of the lhs
+ * `d = n - k`
+ *
+ * Returns `true` iff `d >= 0` and `tycon2` can be instantiated to
+ *
+ * [tparams1_d, ... tparams1_n-1] -> tycon1a[args_1, ..., args_d-1, tparams_d, ... tparams_n-1]
+ *
+ * such that the resulting type application is a supertype of `tp1`.
+ */
+ def tyconOK(tycon1a: Type, args1: List[Type]) = {
+ var tycon1b = tycon1a
+ val tparams1a = tycon1a.typeParams
+ val lengthDiff = tparams1a.length - tparams.length
+ lengthDiff >= 0 && {
+ val tparams1 = tparams1a.drop(lengthDiff)
+ variancesConform(tparams1, tparams) && {
+ if (lengthDiff > 0)
+ tycon1b = PolyType(tparams1.map(_.paramName), tparams1.map(_.paramVariance))(
+ tl => tparams1.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds),
+ tl => tycon1a.appliedTo(args1.take(lengthDiff) ++
+ tparams1.indices.toList.map(PolyParam(tl, _))))
+ (ctx.mode.is(Mode.TypevarsMissContext) ||
+ tryInstantiate(tycon2, tycon1b.ensureHK)) &&
+ isSubType(tp1, tycon1b.appliedTo(args2))
+ }
+ }
+ }
+
+ tp1.widen match {
+ case tp1w @ HKApply(tycon1, args1) =>
+ tyconOK(tycon1, args1)
+ case tp1w =>
+ tp1w.typeSymbol.isClass && {
+ val classBounds = tycon2.classSymbols
+ def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 =>
+ classBounds.exists(bc.derivesFrom) &&
+ tyconOK(tp1w.baseTypeRef(bc), tp1w.baseArgInfos(bc)) ||
+ liftToBase(bcs1)
+ case _ =>
+ false
+ }
+ liftToBase(tp1w.baseClasses)
+ } ||
+ fourthTry(tp1, tp2)
+ }
+ }
+
+ /** Fall back to comparing either with `fourthTry` or against the lower
+ * approximation of the rhs.
+ * @param tyconLo The type constructor's lower approximation.
+ */
+ def fallback(tyconLo: Type) =
+ either(fourthTry(tp1, tp2), isSubType(tp1, tyconLo.applyIfParameterized(args2)))
+
+ /** Let `tycon2bounds` be the bounds of the RHS type constructor `tycon2`.
+ * Let `app2 = tp2` where the type constructor of `tp2` is replaced by
+ * `tycon2bounds.lo`.
+ * If both bounds are the same, continue with `tp1 <:< app2`.
+ * otherwise continue with either
+ *
+ * tp1 <:< tp2 using fourthTry (this might instantiate params in tp1)
+ * tp1 <:< app2 using isSubType (this might instantiate params in tp2)
+ */
+ def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean =
+ if (tycon2bounds.lo eq tycon2bounds.hi)
+ isSubType(tp1,
+ if (tyconIsTypeRef) tp2.superType
+ else tycon2bounds.lo.applyIfParameterized(args2))
+ else
+ fallback(tycon2bounds.lo)
+
+ tycon2 match {
+ case param2: PolyParam =>
+ isMatchingApply(tp1) || {
+ if (canConstrain(param2)) canInstantiate(param2)
+ else compareLower(bounds(param2), tyconIsTypeRef = false)
+ }
+ case tycon2: TypeRef =>
+ isMatchingApply(tp1) ||
+ compareLower(tycon2.info.bounds, tyconIsTypeRef = true)
+ case _: TypeVar | _: AnnotatedType =>
+ isSubType(tp1, tp2.superType)
+ case tycon2: HKApply =>
+ fallback(tycon2.lowerBound)
+ case _ =>
+ false
+ }
+ }
+
+ /** Subtype test for the hk application `tp1 = tycon1[args1]`.
+ */
+ def compareHkApply1(tp1: HKApply, tycon1: Type, args1: List[Type], tp2: Type): Boolean =
+ tycon1 match {
+ case param1: PolyParam =>
+ def canInstantiate = tp2 match {
+ case AppliedType(tycon2, args2) =>
+ tryInstantiate(param1, tycon2.ensureHK) && isSubArgs(args1, args2, tycon2.typeParams)
+ case _ =>
+ false
+ }
+ canConstrain(param1) && canInstantiate ||
+ isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2)
+ case tycon1: TypeProxy =>
+ isSubType(tp1.superType, tp2)
+ case _ =>
+ false
+ }
+
+ /** Subtype test for corresponding arguments in `args1`, `args2` according to
+ * variances in type parameters `tparams`.
+ */
+ def isSubArgs(args1: List[Type], args2: List[Type], tparams: List[TypeParamInfo]): Boolean =
+ if (args1.isEmpty) args2.isEmpty
+ else args2.nonEmpty && {
+ val v = tparams.head.paramVariance
+ (v > 0 || isSubType(args2.head, args1.head)) &&
+ (v < 0 || isSubType(args1.head, args2.head))
+ } && isSubArgs(args1.tail, args2.tail, tparams)
+
+ /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where
+ * - `B` derives from one of the class symbols of `tp2`,
+ * - the type parameters of `B` match one-by-one the variances of `tparams`,
+ * - `B` satisfies predicate `p`.
+ */
+ private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeParamInfo], p: Type => Boolean): Boolean = {
+ val classBounds = tp2.classSymbols
+ def recur(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 =>
+ val baseRef = tp1.baseTypeRef(bc)
+ (classBounds.exists(bc.derivesFrom) &&
+ variancesConform(baseRef.typeParams, tparams) &&
+ p(baseRef.appliedTo(tp1.baseArgInfos(bc)))
+ ||
+ recur(bcs1))
+ case nil =>
+ false
+ }
+ recur(tp1.baseClasses)
+ }
+
+ /** Replace any top-level recursive type `{ z => T }` in `tp` with
+ * `[z := anchor]T`.
+ */
+ private def fixRecs(anchor: SingletonType, tp: Type): Type = {
+ def fix(tp: Type): Type = tp.stripTypeVar match {
+ case tp: RecType => fix(tp.parent).substRecThis(tp, anchor)
+ case tp @ RefinedType(parent, rname, rinfo) => tp.derivedRefinedType(fix(parent), rname, rinfo)
+ case tp: PolyParam => fixOrElse(bounds(tp).hi, tp)
+ case tp: TypeProxy => fixOrElse(tp.underlying, tp)
+ case tp: AndOrType => tp.derivedAndOrType(fix(tp.tp1), fix(tp.tp2))
+ case tp => tp
+ }
+ def fixOrElse(tp: Type, fallback: Type) = {
+ val tp1 = fix(tp)
+ if (tp1 ne tp) tp1 else fallback
+ }
+ fix(tp)
+ }
+
+ /** Returns true iff the result of evaluating either `op1` or `op2` is true,
+ * trying at the same time to keep the constraint as wide as possible.
+ * E.g, if
+ *
+ * tp11 <:< tp12 = true with post-constraint c1
+ * tp12 <:< tp22 = true with post-constraint c2
+ *
+ * and c1 subsumes c2, then c2 is kept as the post-constraint of the result,
+ * otherwise c1 is kept.
+ *
+ * This method is used to approximate a solution in one of the following cases
+ *
+ * T1 & T2 <:< T3
+ * T1 <:< T2 | T3
+ *
+ * In the first case (the second one is analogous), we have a choice whether we
+ * want to establish the subtyping judgement using
+ *
+ * T1 <:< T3 or T2 <:< T3
+ *
+ * as a precondition. Either precondition might constrain type variables.
+ * The purpose of this method is to pick the precondition that constrains less.
+ * The method is not complete, because sometimes there is no best solution. Example:
+ *
+ * A? & B? <: T
+ *
+ * Here, each precondition leads to a different constraint, and neither of
+ * the two post-constraints subsumes the other.
+ */
+ private def either(op1: => Boolean, op2: => Boolean): Boolean = {
+ val preConstraint = constraint
+ op1 && {
+ val leftConstraint = constraint
+ constraint = preConstraint
+ if (!(op2 && subsumes(leftConstraint, constraint, preConstraint))) {
+ if (constr != noPrinter && !subsumes(constraint, leftConstraint, preConstraint))
+ constr.println(i"CUT - prefer $leftConstraint over $constraint")
+ constraint = leftConstraint
+ }
+ true
+ } || op2
+ }
+
+ /** Like tp1 <:< tp2, but returns false immediately if we know that
+ * the case was covered previously during subtyping.
+ */
+ private def isNewSubType(tp1: Type, tp2: Type): Boolean =
+ if (isCovered(tp1) && isCovered(tp2)) {
+ //println(s"useless subtype: $tp1 <:< $tp2")
+ false
+ } else isSubType(tp1, tp2)
+
+ /** Does type `tp1` have a member with name `name` whose normalized type is a subtype of
+ * the normalized type of the refinement `tp2`?
+ * Normalization is as follows: If `tp2` contains a skolem to its refinement type,
+ * rebase both itself and the member info of `tp` on a freshly created skolem type.
+ */
+ protected def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = {
+ val rinfo2 = tp2.refinedInfo
+ val mbr = tp1.member(name)
+
+ def qualifies(m: SingleDenotation) = isSubType(m.info, rinfo2)
+
+ def memberMatches: Boolean = mbr match { // inlined hasAltWith for performance
+ case mbr: SingleDenotation => qualifies(mbr)
+ case _ => mbr hasAltWith qualifies
+ }
+
+ // special case for situations like:
+ // class C { type T }
+ // val foo: C
+ // foo.type <: C { type T {= , <: , >:} foo.T }
+ def selfReferentialMatch = tp1.isInstanceOf[SingletonType] && {
+ rinfo2 match {
+ case rinfo2: TypeBounds =>
+ val mbr1 = tp1.select(name)
+ !defn.isBottomType(tp1.widen) &&
+ (mbr1 =:= rinfo2.hi || (rinfo2.hi ne rinfo2.lo) && mbr1 =:= rinfo2.lo)
+ case _ => false
+ }
+ }
+
+ /*>|>*/ ctx.traceIndented(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}) ${mbr.info.show} $rinfo2", subtyping) /*<|<*/ {
+ memberMatches || selfReferentialMatch
+ }
+ }
+
+ final def ensureStableSingleton(tp: Type): SingletonType = tp.stripTypeVar match {
+ case tp: SingletonType if tp.isStable => tp
+ case tp: ValueType => SkolemType(tp)
+ case tp: TypeProxy => ensureStableSingleton(tp.underlying)
+ }
+
+ /** Skip refinements in `tp2` which match corresponding refinements in `tp1`.
+ * "Match" means:
+ * - they appear in the same order,
+ * - they refine the same names,
+ * - the refinement in `tp1` is an alias type, and
+ * - neither refinement refers back to the refined type via a refined this.
+ * @return The parent type of `tp2` after skipping the matching refinements.
+ */
+ private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match {
+ case tp1 @ RefinedType(parent1, name1, rinfo1: TypeAlias) if name1 == tp2.refinedName =>
+ tp2.parent match {
+ case parent2: RefinedType => skipMatching(parent1, parent2)
+ case parent2 => parent2
+ }
+ case _ => tp2
+ }
+
+ /** Are refinements in `tp1` pairwise subtypes of the refinements of `tp2`
+ * up to parent type `limit`?
+ * @pre `tp1` has the necessary number of refinements, they are type aliases,
+ * and their names match the corresponding refinements in `tp2`.
+ * Further, no refinement refers back to the refined type via a refined this.
+ * The precondition is established by `skipMatching`.
+ */
+ private def isSubRefinements(tp1: RefinedType, tp2: RefinedType, limit: Type): Boolean = {
+ def hasSubRefinement(tp1: RefinedType, refine2: Type): Boolean = {
+ isSubType(tp1.refinedInfo, refine2) || {
+ // last effort: try to adapt variances of higher-kinded types if this is sound.
+ val adapted2 = refine2.adaptHkVariances(tp1.parent.member(tp1.refinedName).symbol.info)
+ adapted2.ne(refine2) && hasSubRefinement(tp1, adapted2)
+ }
+ }
+ hasSubRefinement(tp1, tp2.refinedInfo) && (
+ (tp2.parent eq limit) ||
+ isSubRefinements(
+ tp1.parent.asInstanceOf[RefinedType], tp2.parent.asInstanceOf[RefinedType], limit))
+ }
+
+ /** A type has been covered previously in subtype checking if it
+ * is some combination of TypeRefs that point to classes, where the
+ * combiners are RefinedTypes, RecTypes, AndTypes or AnnotatedTypes.
+ * One exception: Refinements referring to basetype args are never considered
+ * to be already covered. This is necessary because such refined types might
+ * still need to be compared with a compareAliasRefined.
+ */
+ private def isCovered(tp: Type): Boolean = tp.dealias.stripTypeVar match {
+ case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass
+ case tp: ProtoType => false
+ case tp: RefinedOrRecType => isCovered(tp.parent)
+ case tp: AnnotatedType => isCovered(tp.underlying)
+ case AndType(tp1, tp2) => isCovered(tp1) && isCovered(tp2)
+ case _ => false
+ }
+
+ /** Defer constraining type variables when compared against prototypes */
+ def isMatchedByProto(proto: ProtoType, tp: Type) = tp.stripTypeVar match {
+ case tp: PolyParam if constraint contains tp => true
+ case _ => proto.isMatchedBy(tp)
+ }
+
+ /** Can type `tp` be constrained from above by adding a constraint to
+ * a typevar that it refers to? In that case we have to be careful not
+ * to approximate with the lower bound of a type in `thirdTry`. Instead,
+ * we should first unroll `tp1` until we hit the type variable and bind the
+ * type variable with (the corresponding type in) `tp2` instead.
+ */
+ private def isCappable(tp: Type): Boolean = tp match {
+ case tp: PolyParam => constraint contains tp
+ case tp: TypeProxy => isCappable(tp.underlying)
+ case tp: AndOrType => isCappable(tp.tp1) || isCappable(tp.tp2)
+ case _ => false
+ }
+
+ /** Narrow gadt.bounds for the type parameter referenced by `tr` to include
+ * `bound` as an upper or lower bound (which depends on `isUpper`).
+ * Test that the resulting bounds are still satisfiable.
+ */
+ private def narrowGADTBounds(tr: NamedType, bound: Type, isUpper: Boolean): Boolean =
+ ctx.mode.is(Mode.GADTflexible) && !frozenConstraint && {
+ val tparam = tr.symbol
+ typr.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.isRef(tparam)}")
+ if (bound.isRef(tparam)) false
+ else bound match {
+ case bound: TypeRef
+ if bound.symbol.is(BindDefinedType) &&
+ ctx.gadt.bounds.contains(bound.symbol) &&
+ !tr.symbol.is(BindDefinedType) =>
+ // Avoid having pattern-bound types in gadt bounds,
+ // as these might be eliminated once the pattern is typechecked.
+ // Pattern-bound type symbols should be narrowed first, only if that fails
+ // should symbols in the environment be constrained.
+ narrowGADTBounds(bound, tr, !isUpper)
+ case _ =>
+ val oldBounds = ctx.gadt.bounds(tparam)
+ val newBounds =
+ if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound)
+ else TypeBounds(oldBounds.lo | bound, oldBounds.hi)
+ isSubType(newBounds.lo, newBounds.hi) &&
+ { ctx.gadt.setBounds(tparam, newBounds); true }
+ }
+ }
+
+ // Tests around `matches`
+
+ /** A function implementing `tp1` matches `tp2`. */
+ final def matchesType(tp1: Type, tp2: Type, relaxed: Boolean): Boolean = tp1.widen match {
+ case tp1: MethodType =>
+ tp2.widen match {
+ case tp2: MethodType =>
+ tp1.isImplicit == tp2.isImplicit &&
+ matchingParams(tp1.paramTypes, tp2.paramTypes, tp1.isJava, tp2.isJava) &&
+ matchesType(tp1.resultType, tp2.resultType.subst(tp2, tp1), relaxed)
+ case tp2 =>
+ relaxed && tp1.paramNames.isEmpty &&
+ matchesType(tp1.resultType, tp2, relaxed)
+ }
+ case tp1: PolyType =>
+ tp2.widen match {
+ case tp2: PolyType =>
+ sameLength(tp1.paramNames, tp2.paramNames) &&
+ matchesType(tp1.resultType, tp2.resultType.subst(tp2, tp1), relaxed)
+ case _ =>
+ false
+ }
+ case _ =>
+ tp2.widen match {
+ case _: PolyType =>
+ false
+ case tp2: MethodType =>
+ relaxed && tp2.paramNames.isEmpty &&
+ matchesType(tp1, tp2.resultType, relaxed)
+ case tp2 =>
+ relaxed || isSameType(tp1, tp2)
+ }
+ }
+
+ /** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
+ def matchingParams(formals1: List[Type], formals2: List[Type], isJava1: Boolean, isJava2: Boolean): Boolean = formals1 match {
+ case formal1 :: rest1 =>
+ formals2 match {
+ case formal2 :: rest2 =>
+ (isSameTypeWhenFrozen(formal1, formal2)
+ || isJava1 && (formal2 isRef ObjectClass) && (formal1 isRef AnyClass)
+ || isJava2 && (formal1 isRef ObjectClass) && (formal2 isRef AnyClass)) &&
+ matchingParams(rest1, rest2, isJava1, isJava2)
+ case nil =>
+ false
+ }
+ case nil =>
+ formals2.isEmpty
+ }
+
+ /** Do generic types `poly1` and `poly2` have type parameters that
+ * have the same bounds (after renaming one set to the other)?
+ */
+ def matchingTypeParams(poly1: PolyType, poly2: PolyType): Boolean =
+ (poly1.paramBounds corresponds poly2.paramBounds)((b1, b2) =>
+ isSameType(b1, b2.subst(poly2, poly1)))
+
+ // Type equality =:=
+
+ /** Two types are the same if are mutual subtypes of each other */
+ def isSameType(tp1: Type, tp2: Type): Boolean =
+ if (tp1 eq NoType) false
+ else if (tp1 eq tp2) true
+ else isSubType(tp1, tp2) && isSubType(tp2, tp1)
+
+ /** Same as `isSameType` but also can be applied to overloaded TermRefs, where
+ * two overloaded refs are the same if they have pairwise equal alternatives
+ */
+ def isSameRef(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSameRef($tp1, $tp2") {
+ def isSubRef(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case tp1: TermRef if tp1.isOverloaded =>
+ tp1.alternatives forall (isSubRef(_, tp2))
+ case _ =>
+ tp2 match {
+ case tp2: TermRef if tp2.isOverloaded =>
+ tp2.alternatives exists (isSubRef(tp1, _))
+ case _ =>
+ isSubType(tp1, tp2)
+ }
+ }
+ isSubRef(tp1, tp2) && isSubRef(tp2, tp1)
+ }
+
+ /** The greatest lower bound of two types */
+ def glb(tp1: Type, tp2: Type): Type = /*>|>*/ ctx.traceIndented(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true) /*<|<*/ {
+ if (tp1 eq tp2) tp1
+ else if (!tp1.exists) tp2
+ else if (!tp2.exists) tp1
+ else if ((tp1 isRef AnyClass) || (tp2 isRef NothingClass)) tp2
+ else if ((tp2 isRef AnyClass) || (tp1 isRef NothingClass)) tp1
+ else tp2 match { // normalize to disjunctive normal form if possible.
+ case OrType(tp21, tp22) =>
+ tp1 & tp21 | tp1 & tp22
+ case _ =>
+ tp1 match {
+ case OrType(tp11, tp12) =>
+ tp11 & tp2 | tp12 & tp2
+ case _ =>
+ val t1 = mergeIfSub(tp1, tp2)
+ if (t1.exists) t1
+ else {
+ val t2 = mergeIfSub(tp2, tp1)
+ if (t2.exists) t2
+ else tp1 match {
+ case tp1: ConstantType =>
+ tp2 match {
+ case tp2: ConstantType =>
+ // Make use of the fact that the intersection of two constant types
+ // types which are not subtypes of each other is known to be empty.
+ // Note: The same does not apply to singleton types in general.
+ // E.g. we could have a pattern match against `x.type & y.type`
+ // which might succeed if `x` and `y` happen to be the same ref
+ // at run time. It would not work to replace that with `Nothing`.
+ // However, maybe we can still apply the replacement to
+ // types which are not explicitly written.
+ defn.NothingType
+ case _ => andType(tp1, tp2)
+ }
+ case _ => andType(tp1, tp2)
+ }
+ }
+ }
+ }
+ }
+
+ /** The greatest lower bound of a list types */
+ final def glb(tps: List[Type]): Type =
+ ((defn.AnyType: Type) /: tps)(glb)
+
+ /** The least upper bound of two types
+ * @note We do not admit singleton types in or-types as lubs.
+ */
+ def lub(tp1: Type, tp2: Type): Type = /*>|>*/ ctx.traceIndented(s"lub(${tp1.show}, ${tp2.show})", subtyping, show = true) /*<|<*/ {
+ if (tp1 eq tp2) tp1
+ else if (!tp1.exists) tp1
+ else if (!tp2.exists) tp2
+ else if ((tp1 isRef AnyClass) || (tp2 isRef NothingClass)) tp1
+ else if ((tp2 isRef AnyClass) || (tp1 isRef NothingClass)) tp2
+ else {
+ val t1 = mergeIfSuper(tp1, tp2)
+ if (t1.exists) t1
+ else {
+ val t2 = mergeIfSuper(tp2, tp1)
+ if (t2.exists) t2
+ else {
+ val tp1w = tp1.widen
+ val tp2w = tp2.widen
+ if ((tp1 ne tp1w) || (tp2 ne tp2w)) lub(tp1w, tp2w)
+ else orType(tp1w, tp2w) // no need to check subtypes again
+ }
+ }
+ }
+ }
+
+ /** The least upper bound of a list of types */
+ final def lub(tps: List[Type]): Type =
+ ((defn.NothingType: Type) /: tps)(lub)
+
+ /** Merge `t1` into `tp2` if t1 is a subtype of some &-summand of tp2.
+ */
+ private def mergeIfSub(tp1: Type, tp2: Type): Type =
+ if (isSubTypeWhenFrozen(tp1, tp2))
+ if (isSubTypeWhenFrozen(tp2, tp1)) tp2 else tp1 // keep existing type if possible
+ else tp2 match {
+ case tp2 @ AndType(tp21, tp22) =>
+ val lower1 = mergeIfSub(tp1, tp21)
+ if (lower1 eq tp21) tp2
+ else if (lower1.exists) lower1 & tp22
+ else {
+ val lower2 = mergeIfSub(tp1, tp22)
+ if (lower2 eq tp22) tp2
+ else if (lower2.exists) tp21 & lower2
+ else NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ /** Merge `tp1` into `tp2` if tp1 is a supertype of some |-summand of tp2.
+ */
+ private def mergeIfSuper(tp1: Type, tp2: Type): Type =
+ if (isSubTypeWhenFrozen(tp2, tp1))
+ if (isSubTypeWhenFrozen(tp1, tp2)) tp2 else tp1 // keep existing type if possible
+ else tp2 match {
+ case tp2 @ OrType(tp21, tp22) =>
+ val higher1 = mergeIfSuper(tp1, tp21)
+ if (higher1 eq tp21) tp2
+ else if (higher1.exists) higher1 | tp22
+ else {
+ val higher2 = mergeIfSuper(tp1, tp22)
+ if (higher2 eq tp22) tp2
+ else if (higher2.exists) tp21 | higher2
+ else NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ /** Form a normalized conjunction of two types.
+ * Note: For certain types, `&` is distributed inside the type. This holds for
+ * all types which are not value types (e.g. TypeBounds, ClassInfo,
+ * ExprType, MethodType, PolyType). Also, when forming an `&`,
+ * instantiated TypeVars are dereferenced and annotations are stripped.
+ * Finally, refined types with the same refined name are
+ * opportunistically merged.
+ *
+ * Sometimes, the conjunction of two types cannot be formed because
+ * the types are in conflict of each other. In particular:
+ *
+ * 1. Two different class types are conflicting.
+ * 2. A class type conflicts with a type bounds that does not include the class reference.
+ * 3. Two method or poly types with different (type) parameters but the same
+ * signature are conflicting
+ *
+ * In these cases, a MergeError is thrown.
+ */
+ final def andType(tp1: Type, tp2: Type, erased: Boolean = ctx.erasedTypes) = ctx.traceIndented(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true) {
+ val t1 = distributeAnd(tp1, tp2)
+ if (t1.exists) t1
+ else {
+ val t2 = distributeAnd(tp2, tp1)
+ if (t2.exists) t2
+ else if (erased) erasedGlb(tp1, tp2, isJava = false)
+ else liftIfHK(tp1, tp2, AndType(_, _), _ & _)
+ }
+ }
+
+ /** Form a normalized conjunction of two types.
+ * Note: For certain types, `|` is distributed inside the type. This holds for
+ * all types which are not value types (e.g. TypeBounds, ClassInfo,
+ * ExprType, MethodType, PolyType). Also, when forming an `|`,
+ * instantiated TypeVars are dereferenced and annotations are stripped.
+ *
+ * Sometimes, the disjunction of two types cannot be formed because
+ * the types are in conflict of each other. (@see `andType` for an enumeration
+ * of these cases). In cases of conflict a `MergeError` is raised.
+ *
+ * @param erased Apply erasure semantics. If erased is true, instead of creating
+ * an OrType, the lub will be computed using TypeCreator#erasedLub.
+ */
+ final def orType(tp1: Type, tp2: Type, erased: Boolean = ctx.erasedTypes) = {
+ val t1 = distributeOr(tp1, tp2)
+ if (t1.exists) t1
+ else {
+ val t2 = distributeOr(tp2, tp1)
+ if (t2.exists) t2
+ else if (erased) erasedLub(tp1, tp2)
+ else liftIfHK(tp1, tp2, OrType(_, _), _ | _)
+ }
+ }
+
+ /** `op(tp1, tp2)` unless `tp1` and `tp2` are type-constructors with at least
+ * some unnamed type parameters.
+ * In the latter case, combine `tp1` and `tp2` under a type lambda like this:
+ *
+ * [X1, ..., Xn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn])
+ *
+ * Note: There is a tension between named and positional parameters here, which
+ * is impossible to resolve completely. Say you have
+ *
+ * C[type T], D[type U]
+ *
+ * Then do you expand `C & D` to `[T] -> C[T] & D[T]` or not? Under the named
+ * type parameter interpretation, this would be wrong whereas under the traditional
+ * higher-kinded interpretation this would be required. The problem arises from
+ * allowing both interpretations. A possible remedy is to be somehow stricter
+ * in where we allow which interpretation.
+ */
+ private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type, original: (Type, Type) => Type) = {
+ val tparams1 = tp1.typeParams
+ val tparams2 = tp2.typeParams
+ if (tparams1.isEmpty)
+ if (tparams2.isEmpty) op(tp1, tp2)
+ else original(tp1, tp2.appliedTo(tp2.typeParams.map(_.paramBoundsAsSeenFrom(tp2))))
+ else if (tparams2.isEmpty)
+ original(tp1.appliedTo(tp1.typeParams.map(_.paramBoundsAsSeenFrom(tp1))), tp2)
+ else
+ PolyType(
+ paramNames = tpnme.syntheticTypeParamNames(tparams1.length),
+ variances = (tparams1, tparams2).zipped.map((tparam1, tparam2) =>
+ (tparam1.paramVariance + tparam2.paramVariance) / 2))(
+ paramBoundsExp = tl => (tparams1, tparams2).zipped.map((tparam1, tparam2) =>
+ tl.lifted(tparams1, tparam1.paramBoundsAsSeenFrom(tp1)).bounds &
+ tl.lifted(tparams2, tparam2.paramBoundsAsSeenFrom(tp2)).bounds),
+ resultTypeExp = tl =>
+ original(tl.lifted(tparams1, tp1).appliedTo(tl.paramRefs),
+ tl.lifted(tparams2, tp2).appliedTo(tl.paramRefs)))
+ }
+
+ /** Try to distribute `&` inside type, detect and handle conflicts
+ * @pre !(tp1 <: tp2) && !(tp2 <:< tp1) -- these cases were handled before
+ */
+ private def distributeAnd(tp1: Type, tp2: Type): Type = tp1 match {
+ // opportunistically merge same-named refinements
+ // this does not change anything semantically (i.e. merging or not merging
+ // gives =:= types), but it keeps the type smaller.
+ case tp1: RefinedType =>
+ tp2 match {
+ case tp2: RefinedType if tp1.refinedName == tp2.refinedName =>
+ // Given two refinements `T1 { X = S1 }` and `T2 { X = S2 }`, if `S1 =:= S2`
+ // (possibly by instantiating type parameters), rewrite to `T1 & T2 { X = S1 }`.
+ // Otherwise rewrite to `T1 & T2 { X B }` where `B` is the conjunction of
+ // the bounds of `X` in `T1` and `T2`.
+ // The first rule above is contentious because it cuts the constraint set.
+ // But without it we would replace the two aliases by
+ // `T { X >: S1 | S2 <: S1 & S2 }`, which looks weird and is probably
+ // not what's intended.
+ val rinfo1 = tp1.refinedInfo
+ val rinfo2 = tp2.refinedInfo
+ val parent = tp1.parent & tp2.parent
+ val rinfo =
+ if (rinfo1.isAlias && rinfo2.isAlias && isSameType(rinfo1, rinfo2))
+ rinfo1
+ else
+ rinfo1 & rinfo2
+ tp1.derivedRefinedType(parent, tp1.refinedName, rinfo)
+ case _ =>
+ NoType
+ }
+ case tp1: RecType =>
+ tp1.rebind(distributeAnd(tp1.parent, tp2))
+ case ExprType(rt1) =>
+ tp2 match {
+ case ExprType(rt2) =>
+ ExprType(rt1 & rt2)
+ case _ =>
+ rt1 & tp2
+ }
+ case tp1: TypeVar if tp1.isInstantiated =>
+ tp1.underlying & tp2
+ case tp1: AnnotatedType =>
+ tp1.underlying & tp2
+ case _ =>
+ NoType
+ }
+
+ /** Try to distribute `|` inside type, detect and handle conflicts
+ * Note that, unlike for `&`, a disjunction cannot be pushed into
+ * a refined or applied type. Example:
+ *
+ * List[T] | List[U] is not the same as List[T | U].
+ *
+ * The rhs is a proper supertype of the lhs.
+ */
+ private def distributeOr(tp1: Type, tp2: Type): Type = tp1 match {
+ case ExprType(rt1) =>
+ ExprType(rt1 | tp2.widenExpr)
+ case tp1: TypeVar if tp1.isInstantiated =>
+ tp1.underlying | tp2
+ case tp1: AnnotatedType =>
+ tp1.underlying | tp2
+ case _ =>
+ NoType
+ }
+
+ /** Show type, handling type types better than the default */
+ private def showType(tp: Type)(implicit ctx: Context) = tp match {
+ case ClassInfo(_, cls, _, _, _) => cls.showLocated
+ case bounds: TypeBounds => "type bounds" + bounds.show
+ case _ => tp.show
+ }
+
+ /** A comparison function to pick a winner in case of a merge conflict */
+ private def isAsGood(tp1: Type, tp2: Type): Boolean = tp1 match {
+ case tp1: ClassInfo =>
+ tp2 match {
+ case tp2: ClassInfo =>
+ isSubTypeWhenFrozen(tp1.prefix, tp2.prefix) || (tp1.cls.owner derivesFrom tp2.cls.owner)
+ case _ =>
+ false
+ }
+ case tp1: PolyType =>
+ tp2 match {
+ case tp2: PolyType =>
+ tp1.typeParams.length == tp2.typeParams.length &&
+ isAsGood(tp1.resultType, tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ false
+ }
+ case tp1: MethodType =>
+ tp2 match {
+ case tp2: MethodType =>
+ def asGoodParams(formals1: List[Type], formals2: List[Type]) =
+ (formals2 corresponds formals1)(isSubTypeWhenFrozen)
+ asGoodParams(tp1.paramTypes, tp2.paramTypes) &&
+ (!asGoodParams(tp2.paramTypes, tp1.paramTypes) ||
+ isAsGood(tp1.resultType, tp2.resultType))
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+
+ /** A new type comparer of the same type as this one, using the given context. */
+ def copyIn(ctx: Context) = new TypeComparer(ctx)
+
+ // ----------- Diagnostics --------------------------------------------------
+
+ /** A hook for showing subtype traces. Overridden in ExplainingTypeComparer */
+ def traceIndented[T](str: String)(op: => T): T = op
+
+ private def traceInfo(tp1: Type, tp2: Type) =
+ s"${tp1.show} <:< ${tp2.show}" + {
+ if (ctx.settings.verbose.value || Config.verboseExplainSubtype) {
+ s" ${tp1.getClass}, ${tp2.getClass}" +
+ (if (frozenConstraint) " frozen" else "") +
+ (if (ctx.mode is Mode.TypevarsMissContext) " tvars-miss-ctx" else "")
+ }
+ else ""
+ }
+
+ /** Show subtype goal that led to an assertion failure */
+ def showGoal(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ println(ex"assertion failure for $tp1 <:< $tp2, frozen = $frozenConstraint")
+ def explainPoly(tp: Type) = tp match {
+ case tp: PolyParam => ctx.echo(s"polyparam ${tp.show} found in ${tp.binder.show}")
+ case tp: TypeRef if tp.symbol.exists => ctx.echo(s"typeref ${tp.show} found in ${tp.symbol.owner.show}")
+ case tp: TypeVar => ctx.echo(s"typevar ${tp.show}, origin = ${tp.origin}")
+ case _ => ctx.echo(s"${tp.show} is a ${tp.getClass}")
+ }
+ explainPoly(tp1)
+ explainPoly(tp2)
+ }
+
+ /** Record statistics about the total number of subtype checks
+ * and the number of "successful" subtype checks, i.e. checks
+ * that form part of a subtype derivation tree that's ultimately successful.
+ */
+ def recordStatistics(result: Boolean, prevSuccessCount: Int) = {
+ // Stats.record(s"isSubType ${tp1.show} <:< ${tp2.show}")
+ totalCount += 1
+ if (result) successCount += 1 else successCount = prevSuccessCount
+ if (recCount == 0) {
+ Stats.record("successful subType", successCount)
+ Stats.record("total subType", totalCount)
+ successCount = 0
+ totalCount = 0
+ }
+ }
+}
+
+object TypeComparer {
+
+ /** Show trace of comparison operations when performing `op` as result string */
+ def explained[T](op: Context => T)(implicit ctx: Context): String = {
+ val nestedCtx = ctx.fresh.setTypeComparerFn(new ExplainingTypeComparer(_))
+ op(nestedCtx)
+ nestedCtx.typeComparer.toString
+ }
+}
+
+/** A type comparer that can record traces of subtype operations */
+class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) {
+ private var indent = 0
+ private val b = new StringBuilder
+
+ private var skipped = false
+
+ override def traceIndented[T](str: String)(op: => T): T =
+ if (skipped) op
+ else {
+ indent += 2
+ b append "\n" append (" " * indent) append "==> " append str
+ val res = op
+ b append "\n" append (" " * indent) append "<== " append str append " = " append show(res)
+ indent -= 2
+ res
+ }
+
+ private def show(res: Any) = res match {
+ case res: printing.Showable if !ctx.settings.Yexplainlowlevel.value => res.show
+ case _ => String.valueOf(res)
+ }
+
+ override def isSubType(tp1: Type, tp2: Type) =
+ traceIndented(s"${show(tp1)} <:< ${show(tp2)}${if (Config.verboseExplainSubtype) s" ${tp1.getClass} ${tp2.getClass}" else ""}${if (frozenConstraint) " frozen" else ""}") {
+ super.isSubType(tp1, tp2)
+ }
+
+ override def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean =
+ traceIndented(s"hasMatchingMember(${show(tp1)} . $name, ${show(tp2.refinedInfo)}), member = ${show(tp1.member(name).info)}") {
+ super.hasMatchingMember(name, tp1, tp2)
+ }
+
+ override def lub(tp1: Type, tp2: Type) =
+ traceIndented(s"lub(${show(tp1)}, ${show(tp2)})") {
+ super.lub(tp1, tp2)
+ }
+
+ override def glb(tp1: Type, tp2: Type) =
+ traceIndented(s"glb(${show(tp1)}, ${show(tp2)})") {
+ super.glb(tp1, tp2)
+ }
+
+ override def addConstraint(param: PolyParam, bound: Type, fromBelow: Boolean): Boolean =
+ traceIndented(i"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint, constraint = ${ctx.typerState.constraint}") {
+ super.addConstraint(param, bound, fromBelow)
+ }
+
+ override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx)
+
+ override def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = {
+ def addendum = ""
+ traceIndented(i"compareHkApply $tp1, $tp2$addendum") {
+ super.compareHkApply2(tp1, tp2, tycon2, args2)
+ }
+ }
+
+ override def toString = "Subtype trace:" + { try b.toString finally b.clear() }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
new file mode 100644
index 000000000..abbacee49
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala
@@ -0,0 +1,514 @@
+package dotty.tools
+package dotc
+package core
+
+import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Decorators._, Flags.JavaDefined
+import Uniques.unique
+import dotc.transform.ExplicitOuter._
+import dotc.transform.ValueClasses._
+import util.DotClass
+
+/** Erased types are:
+ *
+ * ErasedValueType
+ * TypeRef(prefix is ignored, denot is ClassDenotation)
+ * TermRef(prefix is ignored, denot is SymDenotation)
+ * JavaArrayType
+ * AnnotatedType
+ * MethodType
+ * ThisType
+ * SuperType
+ * ClassInfo (NoPrefix, ...)
+ * NoType
+ * NoPrefix
+ * WildcardType
+ * ErrorType
+ *
+ * only for isInstanceOf, asInstanceOf: PolyType, PolyParam, TypeBounds
+ *
+ */
+object TypeErasure {
+
+ /** A predicate that tests whether a type is a legal erased type. Only asInstanceOf and
+ * isInstanceOf may have types that do not satisfy the predicate.
+ * ErasedValueType is considered an erased type because it is valid after Erasure (it is
+ * eliminated by ElimErasedValueType).
+ */
+ def isErasedType(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case _: ErasedValueType =>
+ true
+ case tp: TypeRef =>
+ tp.symbol.isClass && tp.symbol != defn.AnyClass && tp.symbol != defn.ArrayClass
+ case _: TermRef =>
+ true
+ case JavaArrayType(elem) =>
+ isErasedType(elem)
+ case AnnotatedType(tp, _) =>
+ isErasedType(tp)
+ case ThisType(tref) =>
+ isErasedType(tref)
+ case tp: MethodType =>
+ tp.paramTypes.forall(isErasedType) && isErasedType(tp.resultType)
+ case tp @ ClassInfo(pre, _, parents, decls, _) =>
+ isErasedType(pre) && parents.forall(isErasedType) //&& decls.forall(sym => isErasedType(sym.info)) && isErasedType(tp.selfType)
+ case NoType | NoPrefix | WildcardType | ErrorType | SuperType(_, _) =>
+ true
+ case _ =>
+ false
+ }
+
+ /** A type representing the semi-erasure of a derived value class, see SIP-15
+ * where it's called "C$unboxed" for a class C.
+ * Derived value classes are erased to this type during Erasure (when
+ * semiEraseVCs = true) and subsequently erased to their underlying type
+ * during ElimErasedValueType. This type is outside the normal Scala class
+ * hierarchy: it is a subtype of no other type and is a supertype only of
+ * Nothing. This is because this type is only useful for type adaptation (see
+ * [[Erasure.Boxing#adaptToType]]).
+ *
+ * @param tycon A TypeRef referring to the value class symbol
+ * @param erasedUnderlying The erased type of the single field of the value class
+ */
+ abstract case class ErasedValueType(tycon: TypeRef, erasedUnderlying: Type)
+ extends CachedGroundType with ValueType {
+ override def computeHash = doHash(tycon, erasedUnderlying)
+ }
+
+ final class CachedErasedValueType(tycon: TypeRef, erasedUnderlying: Type)
+ extends ErasedValueType(tycon, erasedUnderlying)
+
+ object ErasedValueType {
+ def apply(tycon: TypeRef, erasedUnderlying: Type)(implicit ctx: Context) = {
+ unique(new CachedErasedValueType(tycon, erasedUnderlying))
+ }
+ }
+
+ private def erasureIdx(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean, wildcardOK: Boolean) =
+ (if (isJava) 1 else 0) +
+ (if (semiEraseVCs) 2 else 0) +
+ (if (isConstructor) 4 else 0) +
+ (if (wildcardOK) 8 else 0)
+
+ private val erasures = new Array[TypeErasure](16)
+
+ for {
+ isJava <- List(false, true)
+ semiEraseVCs <- List(false, true)
+ isConstructor <- List(false, true)
+ wildcardOK <- List(false, true)
+ } erasures(erasureIdx(isJava, semiEraseVCs, isConstructor, wildcardOK)) =
+ new TypeErasure(isJava, semiEraseVCs, isConstructor, wildcardOK)
+
+ /** Produces an erasure function. See the documentation of the class [[TypeErasure]]
+ * for a description of each parameter.
+ */
+ private def erasureFn(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean, wildcardOK: Boolean): TypeErasure =
+ erasures(erasureIdx(isJava, semiEraseVCs, isConstructor, wildcardOK))
+
+ /** The current context with a phase no later than erasure */
+ private def erasureCtx(implicit ctx: Context) =
+ if (ctx.erasedTypes) ctx.withPhase(ctx.erasurePhase) else ctx
+
+ /** The standard erasure of a Scala type. Value classes are erased as normal classes.
+ *
+ * @param tp The type to erase.
+ */
+ def erasure(tp: Type)(implicit ctx: Context): Type =
+ erasureFn(isJava = false, semiEraseVCs = false, isConstructor = false, wildcardOK = false)(tp)(erasureCtx)
+
+ /** The value class erasure of a Scala type, where value classes are semi-erased to
+ * ErasedValueType (they will be fully erased in [[ElimErasedValueType]]).
+ *
+ * @param tp The type to erase.
+ */
+ def valueErasure(tp: Type)(implicit ctx: Context): Type =
+ erasureFn(isJava = false, semiEraseVCs = true, isConstructor = false, wildcardOK = false)(tp)(erasureCtx)
+
+ def sigName(tp: Type, isJava: Boolean)(implicit ctx: Context): TypeName = {
+ val normTp =
+ if (tp.isRepeatedParam) {
+ val seqClass = if (isJava) defn.ArrayClass else defn.SeqClass
+ tp.translateParameterized(defn.RepeatedParamClass, seqClass)
+ }
+ else tp
+ val erase = erasureFn(isJava, semiEraseVCs = false, isConstructor = false, wildcardOK = true)
+ erase.sigName(normTp)(erasureCtx)
+ }
+
+ /** The erasure of a top-level reference. Differs from normal erasure in that
+ * TermRefs are kept instead of being widened away.
+ */
+ def erasedRef(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp: TermRef =>
+ assert(tp.symbol.exists, tp)
+ val tp1 = ctx.makePackageObjPrefixExplicit(tp)
+ if (tp1 ne tp) erasedRef(tp1)
+ else TermRef(erasedRef(tp.prefix), tp.symbol.asTerm)
+ case tp: ThisType =>
+ tp
+ case tp =>
+ valueErasure(tp)
+ }
+
+ /** The symbol's erased info. This is the type's erasure, except for the following symbols:
+ *
+ * - For $asInstanceOf : [T]T
+ * - For $isInstanceOf : [T]Boolean
+ * - For all abstract types : = ?
+ * - For companion methods : the erasure of their type with semiEraseVCs = false.
+ * The signature of these methods are used to keep a
+ * link between companions and should not be semi-erased.
+ * - For Java-defined symbols: : the erasure of their type with isJava = true,
+ * semiEraseVCs = false. Semi-erasure never happens in Java.
+ * - For all other symbols : the semi-erasure of their types, with
+ * isJava, isConstructor set according to symbol.
+ */
+ def transformInfo(sym: Symbol, tp: Type)(implicit ctx: Context): Type = {
+ val isJava = sym is JavaDefined
+ val semiEraseVCs = !isJava && !sym.isCompanionMethod
+ val erase = erasureFn(isJava, semiEraseVCs, sym.isConstructor, wildcardOK = false)
+
+ def eraseParamBounds(tp: PolyType): Type =
+ tp.derivedPolyType(
+ tp.paramNames, tp.paramNames map (Function.const(TypeBounds.upper(defn.ObjectType))), tp.resultType)
+
+ if (defn.isPolymorphicAfterErasure(sym)) eraseParamBounds(sym.info.asInstanceOf[PolyType])
+ else if (sym.isAbstractType) TypeAlias(WildcardType)
+ else if (sym.isConstructor) outer.addParam(sym.owner.asClass, erase(tp)(erasureCtx))
+ else erase.eraseInfo(tp, sym)(erasureCtx) match {
+ case einfo: MethodType if sym.isGetter && einfo.resultType.isRef(defn.UnitClass) =>
+ MethodType(Nil, defn.BoxedUnitType)
+ case einfo =>
+ einfo
+ }
+ }
+
+ /** Is `tp` an abstract type or polymorphic type parameter that has `Any`, `AnyVal`,
+ * or a universal trait as upper bound and that is not Java defined? Arrays of such types are
+ * erased to `Object` instead of `Object[]`.
+ */
+ def isUnboundedGeneric(tp: Type)(implicit ctx: Context): Boolean = tp.dealias match {
+ case tp: TypeRef =>
+ !tp.symbol.isClass &&
+ !tp.derivesFrom(defn.ObjectClass) &&
+ !tp.symbol.is(JavaDefined)
+ case tp: PolyParam =>
+ !tp.derivesFrom(defn.ObjectClass) &&
+ !tp.binder.resultType.isInstanceOf[JavaMethodType]
+ case tp: TypeAlias => isUnboundedGeneric(tp.alias)
+ case tp: TypeBounds => !tp.hi.derivesFrom(defn.ObjectClass)
+ case tp: TypeProxy => isUnboundedGeneric(tp.underlying)
+ case tp: AndType => isUnboundedGeneric(tp.tp1) || isUnboundedGeneric(tp.tp2)
+ case tp: OrType => isUnboundedGeneric(tp.tp1) && isUnboundedGeneric(tp.tp2)
+ case _ => false
+ }
+
+ /** The erased least upper bound is computed as follows
+ * - if both argument are arrays of objects, an array of the lub of the element types
+ * - if both arguments are arrays of same primitives, an array of this primitive
+ * - if one argument is array of primitives and the other is array of objects, Object
+ * - if one argument is an array, Object
+ * - otherwise a common superclass or trait S of the argument classes, with the
+ * following two properties:
+ * S is minimal: no other common superclass or trait derives from S]
+ * S is last : in the linearization of the first argument type `tp1`
+ * there are no minimal common superclasses or traits that
+ * come after S.
+ * (the reason to pick last is that we prefer classes over traits that way).
+ */
+ def erasedLub(tp1: Type, tp2: Type)(implicit ctx: Context): Type = tp1 match {
+ case JavaArrayType(elem1) =>
+ import dotty.tools.dotc.transform.TypeUtils._
+ tp2 match {
+ case JavaArrayType(elem2) =>
+ if (elem1.isPrimitiveValueType || elem2.isPrimitiveValueType) {
+ if (elem1.classSymbol eq elem2.classSymbol) // same primitive
+ JavaArrayType(elem1)
+ else defn.ObjectType
+ } else JavaArrayType(erasedLub(elem1, elem2))
+ case _ => defn.ObjectType
+ }
+ case _ =>
+ tp2 match {
+ case JavaArrayType(_) => defn.ObjectType
+ case _ =>
+ val cls2 = tp2.classSymbol
+ def loop(bcs: List[ClassSymbol], bestSoFar: ClassSymbol): ClassSymbol = bcs match {
+ case bc :: bcs1 =>
+ if (cls2.derivesFrom(bc))
+ if (!bc.is(Trait) && bc != defn.AnyClass) bc
+ else loop(bcs1, if (bestSoFar.derivesFrom(bc)) bestSoFar else bc)
+ else
+ loop(bcs1, bestSoFar)
+ case nil =>
+ bestSoFar
+ }
+ val t = loop(tp1.baseClasses, defn.ObjectClass)
+ if (t eq defn.AnyValClass)
+ // while AnyVal is a valid common super class for primitives it does not exist after erasure
+ defn.ObjectType
+ else t.typeRef
+ }
+ }
+
+ /** The erased greatest lower bound picks one of the two argument types. It prefers, in this order:
+ * - arrays over non-arrays
+ * - subtypes over supertypes, unless isJava is set
+ * - real classes over traits
+ */
+ def erasedGlb(tp1: Type, tp2: Type, isJava: Boolean)(implicit ctx: Context): Type = tp1 match {
+ case JavaArrayType(elem1) =>
+ tp2 match {
+ case JavaArrayType(elem2) => JavaArrayType(erasedGlb(elem1, elem2, isJava))
+ case _ => tp1
+ }
+ case _ =>
+ tp2 match {
+ case JavaArrayType(_) => tp2
+ case _ =>
+ val tsym1 = tp1.typeSymbol
+ val tsym2 = tp2.typeSymbol
+ if (!tsym2.exists) tp1
+ else if (!tsym1.exists) tp2
+ else if (!isJava && tsym1.derivesFrom(tsym2)) tp1
+ else if (!isJava && tsym2.derivesFrom(tsym1)) tp2
+ else if (tp1.typeSymbol.isRealClass) tp1
+ else if (tp2.typeSymbol.isRealClass) tp2
+ else tp1
+ }
+ }
+
+ /** Does the (possibly generic) type `tp` have the same erasure in all its
+ * possible instantiations?
+ */
+ def hasStableErasure(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case tp: TypeRef =>
+ tp.info match {
+ case TypeAlias(alias) => hasStableErasure(alias)
+ case _: ClassInfo => true
+ case _ => false
+ }
+ case tp: PolyParam => false
+ case tp: TypeProxy => hasStableErasure(tp.superType)
+ case tp: AndOrType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2)
+ case _ => false
+ }
+}
+import TypeErasure._
+
+/**
+ * @param isJava Arguments should be treated the way Java does it
+ * @param semiEraseVCs If true, value classes are semi-erased to ErasedValueType
+ * (they will be fully erased in [[ElimErasedValueType]]).
+ * If false, they are erased like normal classes.
+ * @param isConstructor Argument forms part of the type of a constructor
+ * @param wildcardOK Wildcards are acceptable (true when using the erasure
+ * for computing a signature name).
+ */
+class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean, wildcardOK: Boolean) extends DotClass {
+
+ /** The erasure |T| of a type T. This is:
+ *
+ * - For a refined type scala.Array+[T]:
+ * - if T is Nothing or Null, []Object
+ * - otherwise, if T <: Object, []|T|
+ * - otherwise, if T is a type paramter coming from Java, []Object
+ * - otherwise, Object
+ * - For a term ref p.x, the type <noprefix> # x.
+ * - For a typeref scala.Any, scala.AnyVal or scala.Singleton: |java.lang.Object|
+ * - For a typeref scala.Unit, |scala.runtime.BoxedUnit|.
+ * - For a typeref P.C where C refers to a class, <noprefix> # C.
+ * - For a typeref P.C where C refers to an alias type, the erasure of C's alias.
+ * - For a typeref P.C where C refers to an abstract type, the erasure of C's upper bound.
+ * - For a this-type C.this, the type itself.
+ * - For all other type proxies: The erasure of the underlying type.
+ * - For T1 & T2, the erased glb of |T1| and |T2| (see erasedGlb)
+ * - For T1 | T2, the first base class in the linearization of T which is also a base class of T2
+ * - For => T, ()T
+ * - For a method type (Fs)scala.Unit, (|Fs|)scala.Unit.
+ * - For any other uncurried method type (Fs)T, (|Fs|)|T|.
+ * - For a curried method type (Fs1)(Fs2)T, (|Fs1|,Es2)ET where (Es2)ET = |(Fs2)T|.
+ * - For a polymorphic type [Ts](Ps)T, |(Ps)T|
+ * _ For a polymorphic type [Ts]T where T is not a method type, ()|T|
+ * - For the class info type of java.lang.Object, the same type without any parents.
+ * - For a class info type of a value class, the same type without any parents.
+ * - For any other class info type with parents Ps, the same type with
+ * parents |Ps|, but with duplicate references of Object removed.
+ * - For NoType or NoPrefix, the type itself.
+ * - For any other type, exception.
+ */
+ private def apply(tp: Type)(implicit ctx: Context): Type = tp match {
+ case _: ErasedValueType =>
+ tp
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (!sym.isClass) this(tp.info)
+ else if (semiEraseVCs && isDerivedValueClass(sym)) eraseDerivedValueClassRef(tp)
+ else if (sym == defn.ArrayClass) apply(tp.appliedTo(TypeBounds.empty)) // i966 shows that we can hit a raw Array type.
+ else eraseNormalClassRef(tp)
+ case tp: RefinedType =>
+ val parent = tp.parent
+ if (parent isRef defn.ArrayClass) eraseArray(tp)
+ else this(parent)
+ case _: TermRef | _: ThisType =>
+ this(tp.widen)
+ case SuperType(thistpe, supertpe) =>
+ SuperType(this(thistpe), this(supertpe))
+ case ExprType(rt) =>
+ defn.FunctionClass(0).typeRef
+ case AndType(tp1, tp2) =>
+ erasedGlb(this(tp1), this(tp2), isJava)
+ case OrType(tp1, tp2) =>
+ ctx.typeComparer.orType(this(tp1), this(tp2), erased = true)
+ case tp: MethodType =>
+ def paramErasure(tpToErase: Type) =
+ erasureFn(tp.isJava, semiEraseVCs, isConstructor, wildcardOK)(tpToErase)
+ val formals = tp.paramTypes.mapConserve(paramErasure)
+ eraseResult(tp.resultType) match {
+ case rt: MethodType =>
+ tp.derivedMethodType(tp.paramNames ++ rt.paramNames, formals ++ rt.paramTypes, rt.resultType)
+ case rt =>
+ tp.derivedMethodType(tp.paramNames, formals, rt)
+ }
+ case tp @ ClassInfo(pre, cls, classParents, decls, _) =>
+ if (cls is Package) tp
+ else {
+ def eraseTypeRef(p: TypeRef) = this(p).asInstanceOf[TypeRef]
+ val parents: List[TypeRef] =
+ if ((cls eq defn.ObjectClass) || cls.isPrimitiveValueClass) Nil
+ else classParents.mapConserve(eraseTypeRef) match {
+ case tr :: trs1 =>
+ assert(!tr.classSymbol.is(Trait), cls)
+ val tr1 = if (cls is Trait) defn.ObjectType else tr
+ tr1 :: trs1.filterNot(_ isRef defn.ObjectClass)
+ case nil => nil
+ }
+ val erasedDecls = decls.filteredScope(sym => !sym.isType || sym.isClass)
+ tp.derivedClassInfo(NoPrefix, parents, erasedDecls, erasedRef(tp.selfType))
+ // can't replace selftype by NoType because this would lose the sourceModule link
+ }
+ case NoType | NoPrefix | ErrorType | JavaArrayType(_) =>
+ tp
+ case tp: WildcardType if wildcardOK =>
+ tp
+ case tp: TypeProxy =>
+ this(tp.underlying)
+ }
+
+ private def eraseArray(tp: RefinedType)(implicit ctx: Context) = {
+ val defn.ArrayOf(elemtp) = tp
+ def arrayErasure(tpToErase: Type) =
+ erasureFn(isJava, semiEraseVCs = false, isConstructor, wildcardOK)(tpToErase)
+ if (elemtp derivesFrom defn.NullClass) JavaArrayType(defn.ObjectType)
+ else if (isUnboundedGeneric(elemtp) && !isJava) defn.ObjectType
+ else JavaArrayType(arrayErasure(elemtp))
+ }
+
+ /** The erasure of a symbol's info. This is different from `apply` in the way `ExprType`s and
+ * `PolyType`s are treated. `eraseInfo` maps them them to method types, whereas `apply` maps them
+ * to the underlying type.
+ */
+ def eraseInfo(tp: Type, sym: Symbol)(implicit ctx: Context) = tp match {
+ case ExprType(rt) =>
+ if (sym is Param) apply(tp)
+ // Note that params with ExprTypes are eliminated by ElimByName,
+ // but potentially re-introduced by ResolveSuper, when we add
+ // forwarders to mixin methods.
+ // See doc comment for ElimByName for speculation how we could improve this.
+ else MethodType(Nil, Nil, eraseResult(rt))
+ case tp: PolyType =>
+ eraseResult(tp.resultType) match {
+ case rt: MethodType => rt
+ case rt => MethodType(Nil, Nil, rt)
+ }
+ case tp => this(tp)
+ }
+
+ private def eraseDerivedValueClassRef(tref: TypeRef)(implicit ctx: Context): Type = {
+ val cls = tref.symbol.asClass
+ val underlying = underlyingOfValueClass(cls)
+ if (underlying.exists) ErasedValueType(tref, valueErasure(underlying))
+ else NoType
+ }
+
+ private def eraseNormalClassRef(tref: TypeRef)(implicit ctx: Context): Type = {
+ val cls = tref.symbol.asClass
+ (if (cls.owner is Package) normalizeClass(cls) else cls).typeRef
+ }
+
+ /** The erasure of a function result type. */
+ private def eraseResult(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp: TypeRef =>
+ val sym = tp.typeSymbol
+ if (sym eq defn.UnitClass) sym.typeRef
+ // For a value class V, "new V(x)" should have type V for type adaptation to work
+ // correctly (see SIP-15 and [[Erasure.Boxing.adaptToType]]), so the return type of a
+ // constructor method should not be semi-erased.
+ else if (isConstructor && isDerivedValueClass(sym)) eraseNormalClassRef(tp)
+ else this(tp)
+ case RefinedType(parent, _, _) if !(parent isRef defn.ArrayClass) =>
+ eraseResult(parent)
+ case _ =>
+ this(tp)
+ }
+
+ private def normalizeClass(cls: ClassSymbol)(implicit ctx: Context): ClassSymbol = {
+ if (cls.owner == defn.ScalaPackageClass) {
+ if (cls == defn.AnyClass || cls == defn.AnyValClass || cls == defn.SingletonClass)
+ return defn.ObjectClass
+ if (cls == defn.UnitClass)
+ return defn.BoxedUnitClass
+ }
+ cls
+ }
+
+ /** The name of the type as it is used in `Signature`s.
+ * Need to ensure correspondence with erasure!
+ */
+ private def sigName(tp: Type)(implicit ctx: Context): TypeName = try {
+ tp match {
+ case ErasedValueType(_, underlying) =>
+ sigName(underlying)
+ case tp: TypeRef =>
+ if (!tp.denot.exists) throw new MissingType(tp.prefix, tp.name)
+ val sym = tp.symbol
+ if (!sym.isClass) {
+ val info = tp.info
+ if (!info.exists) assert(false, "undefined: $tp with symbol $sym")
+ return sigName(info)
+ }
+ if (isDerivedValueClass(sym)) {
+ val erasedVCRef = eraseDerivedValueClassRef(tp)
+ if (erasedVCRef.exists) return sigName(erasedVCRef)
+ }
+ normalizeClass(sym.asClass).fullName.asTypeName
+ case defn.ArrayOf(elem) =>
+ sigName(this(tp))
+ case JavaArrayType(elem) =>
+ sigName(elem) ++ "[]"
+ case tp: TermRef =>
+ sigName(tp.widen)
+ case ExprType(rt) =>
+ sigName(defn.FunctionOf(Nil, rt))
+ case tp: TypeVar =>
+ val inst = tp.instanceOpt
+ if (inst.exists) sigName(inst) else tpnme.Uninstantiated
+ case tp: TypeProxy =>
+ sigName(tp.underlying)
+ case ErrorType | WildcardType =>
+ tpnme.WILDCARD
+ case tp: WildcardType =>
+ sigName(tp.optBounds)
+ case _ =>
+ val erased = this(tp)
+ assert(erased ne tp, tp)
+ sigName(erased)
+ }
+ } catch {
+ case ex: AssertionError =>
+ println(s"no sig for $tp")
+ throw ex
+ }
+
+
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala
new file mode 100644
index 000000000..92e5f9d57
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala
@@ -0,0 +1,554 @@
+package dotty.tools
+package dotc
+package core
+
+import Contexts._, Types._, Symbols._, Names._, Flags._, Scopes._
+import SymDenotations._, Denotations.SingleDenotation
+import config.Printers.typr
+import util.Positions._
+import NameOps._
+import Decorators._
+import StdNames._
+import Annotations._
+import util.SimpleMap
+import collection.mutable
+import ast.tpd._
+
+trait TypeOps { this: Context => // TODO: Make standalone object.
+
+ /** The type `tp` as seen from prefix `pre` and owner `cls`. See the spec
+ * for what this means. Called very often, so the code is optimized heavily.
+ *
+ * A tricky aspect is what to do with unstable prefixes. E.g. say we have a class
+ *
+ * class C { type T; def f(x: T): T }
+ *
+ * and an expression `e` of type `C`. Then computing the type of `e.f` leads
+ * to the query asSeenFrom(`C`, `(x: T)T`). What should its result be? The
+ * naive answer `(x: C#T)C#T` is incorrect given that we treat `C#T` as the existential
+ * `exists(c: C)c.T`. What we need to do instead is to skolemize the existential. So
+ * the answer would be `(x: c.T)c.T` for some (unknown) value `c` of type `C`.
+ * `c.T` is expressed in the compiler as a skolem type `Skolem(C)`.
+ *
+ * Now, skolemization is messy and expensive, so we want to do it only if we absolutely
+ * must. Also, skolemizing immediately would mean that asSeenFrom was no longer
+ * idempotent - each call would return a type with a different skolem.
+ * Instead we produce an annotated type that marks the prefix as unsafe:
+ *
+ * (x: (C @ UnsafeNonvariant)#T)C#T
+ *
+ * We also set a global state flag `unsafeNonvariant` to the current run.
+ * When typing a Select node, typer will check that flag, and if it
+ * points to the current run will scan the result type of the select for
+ * @UnsafeNonvariant annotations. If it finds any, it will introduce a skolem
+ * constant for the prefix and try again.
+ *
+ * The scheme is efficient in particular because we expect that unsafe situations are rare;
+ * most compiles would contain none, so no scanning would be necessary.
+ */
+ final def asSeenFrom(tp: Type, pre: Type, cls: Symbol): Type =
+ asSeenFrom(tp, pre, cls, null)
+
+ /** Helper method, taking a map argument which is instantiated only for more
+ * complicated cases of asSeenFrom.
+ */
+ private def asSeenFrom(tp: Type, pre: Type, cls: Symbol, theMap: AsSeenFromMap): Type = {
+
+ /** Map a `C.this` type to the right prefix. If the prefix is unstable and
+ * the `C.this` occurs in nonvariant or contravariant position, mark the map
+ * to be unstable.
+ */
+ def toPrefix(pre: Type, cls: Symbol, thiscls: ClassSymbol): Type = /*>|>*/ ctx.conditionalTraceIndented(TypeOps.track, s"toPrefix($pre, $cls, $thiscls)") /*<|<*/ {
+ if ((pre eq NoType) || (pre eq NoPrefix) || (cls is PackageClass))
+ tp
+ else pre match {
+ case pre: SuperType => toPrefix(pre.thistpe, cls, thiscls)
+ case _ =>
+ if (thiscls.derivesFrom(cls) && pre.baseTypeRef(thiscls).exists) {
+ if (theMap != null && theMap.currentVariance <= 0 && !isLegalPrefix(pre)) {
+ ctx.base.unsafeNonvariant = ctx.runId
+ AnnotatedType(pre, Annotation(defn.UnsafeNonvariantAnnot, Nil))
+ }
+ else pre
+ }
+ else if ((pre.termSymbol is Package) && !(thiscls is Package))
+ toPrefix(pre.select(nme.PACKAGE), cls, thiscls)
+ else
+ toPrefix(pre.baseTypeRef(cls).normalizedPrefix, cls.owner, thiscls)
+ }
+ }
+
+ /*>|>*/ ctx.conditionalTraceIndented(TypeOps.track, s"asSeen ${tp.show} from (${pre.show}, ${cls.show})", show = true) /*<|<*/ { // !!! DEBUG
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ if (sym.isStatic) tp
+ else {
+ val pre1 = asSeenFrom(tp.prefix, pre, cls, theMap)
+ if (pre1.isUnsafeNonvariant)
+ pre1.member(tp.name).info match {
+ case TypeAlias(alias) =>
+ // try to follow aliases of this will avoid skolemization.
+ return alias
+ case _ =>
+ }
+ tp.derivedSelect(pre1)
+ }
+ case tp: ThisType =>
+ toPrefix(pre, cls, tp.cls)
+ case _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(
+ asSeenFrom(tp.parent, pre, cls, theMap),
+ tp.refinedName,
+ asSeenFrom(tp.refinedInfo, pre, cls, theMap))
+ case tp: TypeAlias if tp.variance == 1 => // if variance != 1, need to do the variance calculation
+ tp.derivedTypeAlias(asSeenFrom(tp.alias, pre, cls, theMap))
+ case _ =>
+ (if (theMap != null) theMap else new AsSeenFromMap(pre, cls))
+ .mapOver(tp)
+ }
+ }
+ }
+
+ private def isLegalPrefix(pre: Type)(implicit ctx: Context) =
+ pre.isStable || !ctx.phase.isTyper
+
+ /** The TypeMap handling the asSeenFrom in more complicated cases */
+ class AsSeenFromMap(pre: Type, cls: Symbol) extends TypeMap {
+ def apply(tp: Type) = asSeenFrom(tp, pre, cls, this)
+
+ /** A method to export the current variance of the map */
+ def currentVariance = variance
+ }
+
+ /** Approximate a type `tp` with a type that does not contain skolem types. */
+ object deskolemize extends ApproximatingTypeMap {
+ private var seen: Set[SkolemType] = Set()
+ def apply(tp: Type) = tp match {
+ case tp: SkolemType =>
+ if (seen contains tp) NoType
+ else {
+ val saved = seen
+ seen += tp
+ try approx(hi = tp.info)
+ finally seen = saved
+ }
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ /** Implementation of Types#simplified */
+ final def simplify(tp: Type, theMap: SimplifyMap): Type = tp match {
+ case tp: NamedType =>
+ if (tp.symbol.isStatic) tp
+ else tp.derivedSelect(simplify(tp.prefix, theMap)) match {
+ case tp1: NamedType if tp1.denotationIsCurrent =>
+ val tp2 = tp1.reduceProjection
+ //if (tp2 ne tp1) println(i"simplified $tp1 -> $tp2")
+ tp2
+ case tp1 => tp1
+ }
+ case tp: PolyParam =>
+ typerState.constraint.typeVarOfParam(tp) orElse tp
+ case _: ThisType | _: BoundType | NoPrefix =>
+ tp
+ case tp: RefinedType =>
+ tp.derivedRefinedType(simplify(tp.parent, theMap), tp.refinedName, simplify(tp.refinedInfo, theMap))
+ case tp: TypeAlias =>
+ tp.derivedTypeAlias(simplify(tp.alias, theMap))
+ case AndType(l, r) =>
+ simplify(l, theMap) & simplify(r, theMap)
+ case OrType(l, r) =>
+ simplify(l, theMap) | simplify(r, theMap)
+ case _ =>
+ (if (theMap != null) theMap else new SimplifyMap).mapOver(tp)
+ }
+
+ class SimplifyMap extends TypeMap {
+ def apply(tp: Type) = simplify(tp, this)
+ }
+
+ /** Approximate union type by intersection of its dominators.
+ * That is, replace a union type Tn | ... | Tn
+ * by the smallest intersection type of base-class instances of T1,...,Tn.
+ * Example: Given
+ *
+ * trait C[+T]
+ * trait D
+ * class A extends C[A] with D
+ * class B extends C[B] with D with E
+ *
+ * we approximate `A | B` by `C[A | B] with D`
+ */
+ def orDominator(tp: Type): Type = {
+
+ /** a faster version of cs1 intersect cs2 */
+ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = {
+ val cs2AsSet = new util.HashSet[ClassSymbol](100)
+ cs2.foreach(cs2AsSet.addEntry)
+ cs1.filter(cs2AsSet.contains)
+ }
+
+ /** The minimal set of classes in `cs` which derive all other classes in `cs` */
+ def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match {
+ case c :: rest =>
+ val accu1 = if (accu exists (_ derivesFrom c)) accu else c :: accu
+ if (cs == c.baseClasses) accu1 else dominators(rest, accu1)
+ }
+
+ def mergeRefined(tp1: Type, tp2: Type): Type = {
+ def fail = throw new AssertionError(i"Failure to join alternatives $tp1 and $tp2")
+ tp1 match {
+ case tp1 @ RefinedType(parent1, name1, rinfo1) =>
+ tp2 match {
+ case RefinedType(parent2, `name1`, rinfo2) =>
+ tp1.derivedRefinedType(
+ mergeRefined(parent1, parent2), name1, rinfo1 | rinfo2)
+ case _ => fail
+ }
+ case tp1 @ TypeRef(pre1, name1) =>
+ tp2 match {
+ case tp2 @ TypeRef(pre2, `name1`) =>
+ tp1.derivedSelect(pre1 | pre2)
+ case _ => fail
+ }
+ case _ => fail
+ }
+ }
+
+ def approximateOr(tp1: Type, tp2: Type): Type = {
+ def isClassRef(tp: Type): Boolean = tp match {
+ case tp: TypeRef => tp.symbol.isClass
+ case tp: RefinedType => isClassRef(tp.parent)
+ case _ => false
+ }
+
+ tp1 match {
+ case tp1: RecType =>
+ tp1.rebind(approximateOr(tp1.parent, tp2))
+ case tp1: TypeProxy if !isClassRef(tp1) =>
+ orDominator(tp1.superType | tp2)
+ case _ =>
+ tp2 match {
+ case tp2: RecType =>
+ tp2.rebind(approximateOr(tp1, tp2.parent))
+ case tp2: TypeProxy if !isClassRef(tp2) =>
+ orDominator(tp1 | tp2.superType)
+ case _ =>
+ val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect)
+ val doms = dominators(commonBaseClasses, Nil)
+ def baseTp(cls: ClassSymbol): Type = {
+ val base =
+ if (tp1.typeParams.nonEmpty) tp.baseTypeRef(cls)
+ else tp.baseTypeWithArgs(cls)
+ base.mapReduceOr(identity)(mergeRefined)
+ }
+ doms.map(baseTp).reduceLeft(AndType.apply)
+ }
+ }
+ }
+
+ tp match {
+ case tp: OrType =>
+ approximateOr(tp.tp1, tp.tp2)
+ case _ =>
+ tp
+ }
+ }
+
+ /** Given a disjunction T1 | ... | Tn of types with potentially embedded
+ * type variables, constrain type variables further if this eliminates
+ * some of the branches of the disjunction. Do this also for disjunctions
+ * embedded in intersections, as parents in refinements, and in recursive types.
+ *
+ * For instance, if `A` is an unconstrained type variable, then
+ *
+ * ArrayBuffer[Int] | ArrayBuffer[A]
+ *
+ * is approximated by constraining `A` to be =:= to `Int` and returning `ArrayBuffer[Int]`
+ * instead of `ArrayBuffer[_ >: Int | A <: Int & A]`
+ */
+ def harmonizeUnion(tp: Type): Type = tp match {
+ case tp: OrType =>
+ joinIfScala2(typeComparer.fluidly(tp.tp1 | tp.tp2))
+ case tp @ AndType(tp1, tp2) =>
+ tp derived_& (harmonizeUnion(tp1), harmonizeUnion(tp2))
+ case tp: RefinedType =>
+ tp.derivedRefinedType(harmonizeUnion(tp.parent), tp.refinedName, tp.refinedInfo)
+ case tp: RecType =>
+ tp.rebind(harmonizeUnion(tp.parent))
+ case _ =>
+ tp
+ }
+
+ /** Under -language:Scala2: Replace or-types with their joins */
+ private def joinIfScala2(tp: Type) = tp match {
+ case tp: OrType if scala2Mode => tp.join
+ case _ => tp
+ }
+
+ /** Not currently needed:
+ *
+ def liftToRec(f: (Type, Type) => Type)(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ def f2(tp1: Type, tp2: Type): Type = tp2 match {
+ case tp2: RecType => tp2.rebind(f(tp1, tp2.parent))
+ case _ => f(tp1, tp2)
+ }
+ tp1 match {
+ case tp1: RecType => tp1.rebind(f2(tp1.parent, tp2))
+ case _ => f2(tp1, tp2)
+ }
+ }
+ */
+
+ private def enterArgBinding(formal: Symbol, info: Type, cls: ClassSymbol, decls: Scope) = {
+ val lazyInfo = new LazyType { // needed so we do not force `formal`.
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ denot setFlag formal.flags & RetainedTypeArgFlags
+ denot.info = info
+ }
+ }
+ val sym = ctx.newSymbol(
+ cls, formal.name,
+ formal.flagsUNSAFE & RetainedTypeArgFlags | BaseTypeArg | Override,
+ lazyInfo,
+ coord = cls.coord)
+ cls.enter(sym, decls)
+ }
+
+ /** If `tpe` is of the form `p.x` where `p` refers to a package
+ * but `x` is not owned by a package, expand it to
+ *
+ * p.package.x
+ */
+ def makePackageObjPrefixExplicit(tpe: NamedType): Type = {
+ def tryInsert(pkgClass: SymDenotation): Type = pkgClass match {
+ case pkgCls: PackageClassDenotation if !(tpe.symbol.maybeOwner is Package) =>
+ tpe.derivedSelect(pkgCls.packageObj.valRef)
+ case _ =>
+ tpe
+ }
+ tpe.prefix match {
+ case pre: ThisType if pre.cls is Package => tryInsert(pre.cls)
+ case pre: TermRef if pre.symbol is Package => tryInsert(pre.symbol.moduleClass)
+ case _ => tpe
+ }
+ }
+
+ /** If we have member definitions
+ *
+ * type argSym v= from
+ * type from v= to
+ *
+ * where the variances of both alias are the same, then enter a new definition
+ *
+ * type argSym v= to
+ *
+ * unless a definition for `argSym` already exists in the current scope.
+ */
+ def forwardRef(argSym: Symbol, from: Symbol, to: TypeBounds, cls: ClassSymbol, decls: Scope) =
+ argSym.info match {
+ case info @ TypeBounds(lo2 @ TypeRef(_: ThisType, name), hi2) =>
+ if (name == from.name &&
+ (lo2 eq hi2) &&
+ info.variance == to.variance &&
+ !decls.lookup(argSym.name).exists) {
+ // println(s"short-circuit ${argSym.name} was: ${argSym.info}, now: $to")
+ enterArgBinding(argSym, to, cls, decls)
+ }
+ case _ =>
+ }
+
+
+ /** Normalize a list of parent types of class `cls` that may contain refinements
+ * to a list of typerefs referring to classes, by converting all refinements to member
+ * definitions in scope `decls`. Can add members to `decls` as a side-effect.
+ */
+ def normalizeToClassRefs(parents: List[Type], cls: ClassSymbol, decls: Scope): List[TypeRef] = {
+
+ /** If we just entered the type argument binding
+ *
+ * type From = To
+ *
+ * and there is a type argument binding in a parent in `prefs` of the form
+ *
+ * type X = From
+ *
+ * then also add the binding
+ *
+ * type X = To
+ *
+ * to the current scope, provided (1) variances of both aliases are the same, and
+ * (2) X is not yet defined in current scope. This "short-circuiting" prevents
+ * long chains of aliases which would have to be traversed in type comparers.
+ *
+ * Note: Test i1401.scala shows that `forwardRefs` is also necessary
+ * for typechecking in the case where self types refer to type parameters
+ * that are upper-bounded by subclass instances.
+ */
+ def forwardRefs(from: Symbol, to: Type, prefs: List[TypeRef]) = to match {
+ case to @ TypeBounds(lo1, hi1) if lo1 eq hi1 =>
+ for (pref <- prefs) {
+ def forward(): Unit =
+ for (argSym <- pref.decls)
+ if (argSym is BaseTypeArg)
+ forwardRef(argSym, from, to, cls, decls)
+ pref.info match {
+ case info: TempClassInfo => info.addSuspension(forward)
+ case _ => forward()
+ }
+ }
+ case _ =>
+ }
+
+ // println(s"normalizing $parents of $cls in ${cls.owner}") // !!! DEBUG
+
+ // A map consolidating all refinements arising from parent type parameters
+ var refinements: SimpleMap[TypeName, Type] = SimpleMap.Empty
+
+ // A map of all formal type parameters of base classes that get refined
+ var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter
+
+ // Strip all refinements from parent type, populating `refinements` and `formals` maps.
+ def normalizeToRef(tp: Type): TypeRef = {
+ def fail = throw new TypeError(s"unexpected parent type: $tp")
+ tp.dealias match {
+ case tp: TypeRef =>
+ tp
+ case tp @ RefinedType(tp1, name: TypeName, rinfo) =>
+ rinfo match {
+ case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) =>
+ // Don't record refinements of the form X = this.X (These can arise using named parameters).
+ typr.println(s"dropping refinement $tp")
+ case _ =>
+ val prevInfo = refinements(name)
+ refinements = refinements.updated(name,
+ if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo)
+ formals = formals.updated(name, tp1.typeParamNamed(name))
+ }
+ normalizeToRef(tp1)
+ case ErrorType =>
+ defn.AnyType
+ case AnnotatedType(tpe, _) =>
+ normalizeToRef(tpe)
+ case HKApply(tycon: TypeRef, args) =>
+ tycon.info match {
+ case TypeAlias(alias) => normalizeToRef(alias.appliedTo(args))
+ case _ => fail
+ }
+ case _ =>
+ fail
+ }
+ }
+
+ val parentRefs = parents map normalizeToRef
+
+ // Enter all refinements into current scope.
+ refinements foreachBinding { (name, refinedInfo) =>
+ assert(decls.lookup(name) == NoSymbol, // DEBUG
+ s"redefinition of ${decls.lookup(name).debugString} in ${cls.showLocated}")
+ enterArgBinding(formals(name), refinedInfo, cls, decls)
+ }
+ // Forward definitions in super classes that have one of the refined parameters
+ // as aliases directly to the refined info.
+ // Note that this cannot be fused with the previous loop because we now
+ // assume that all arguments have been entered in `decls`.
+ refinements foreachBinding { (name, refinedInfo) =>
+ forwardRefs(formals(name), refinedInfo, parentRefs)
+ }
+ parentRefs
+ }
+
+ /** An argument bounds violation is a triple consisting of
+ * - the argument tree
+ * - a string "upper" or "lower" indicating which bound is violated
+ * - the violated bound
+ */
+ type BoundsViolation = (Tree, String, Type)
+
+ /** The list of violations where arguments are not within bounds.
+ * @param args The arguments
+ * @param boundss The list of type bounds
+ * @param instantiate A function that maps a bound type and the list of argument types to a resulting type.
+ * Needed to handle bounds that refer to other bounds.
+ */
+ def boundsViolations(args: List[Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context): List[BoundsViolation] = {
+ val argTypes = args.tpes
+ val violations = new mutable.ListBuffer[BoundsViolation]
+ for ((arg, bounds) <- args zip boundss) {
+ def checkOverlapsBounds(lo: Type, hi: Type): Unit = {
+ //println(i"instantiating ${bounds.hi} with $argTypes")
+ //println(i" = ${instantiate(bounds.hi, argTypes)}")
+ val hiBound = instantiate(bounds.hi, argTypes.mapConserve(_.bounds.hi))
+ val loBound = instantiate(bounds.lo, argTypes.mapConserve(_.bounds.lo))
+ // Note that argTypes can contain a TypeBounds type for arguments that are
+ // not fully determined. In that case we need to check against the hi bound of the argument.
+ if (!(lo <:< hiBound)) violations += ((arg, "upper", hiBound))
+ if (!(loBound <:< hi)) violations += ((arg, "lower", bounds.lo))
+ }
+ arg.tpe match {
+ case TypeBounds(lo, hi) => checkOverlapsBounds(lo, hi)
+ case tp => checkOverlapsBounds(tp, tp)
+ }
+ }
+ violations.toList
+ }
+
+ /** Is `feature` enabled in class `owner`?
+ * This is the case if one of the following two alternatives holds:
+ *
+ * 1. The feature is imported by a named import
+ *
+ * import owner.feature
+ *
+ * (the feature may be bunched with others, or renamed, but wildcard imports
+ * don't count).
+ *
+ * 2. The feature is enabled by a compiler option
+ *
+ * - language:<prefix>feature
+ *
+ * where <prefix> is the full name of the owner followed by a "." minus
+ * the prefix "dotty.language.".
+ */
+ def featureEnabled(owner: ClassSymbol, feature: TermName): Boolean = {
+ def toPrefix(sym: Symbol): String =
+ if (!sym.exists || (sym eq defn.LanguageModuleClass)) ""
+ else toPrefix(sym.owner) + sym.name + "."
+ def featureName = toPrefix(owner) + feature
+ def hasImport(implicit ctx: Context): Boolean = {
+ if (ctx.importInfo == null || (ctx.importInfo.site.widen.typeSymbol ne owner)) false
+ else if (ctx.importInfo.excluded.contains(feature)) false
+ else if (ctx.importInfo.originals.contains(feature)) true
+ else {
+ var c = ctx.outer
+ while (c.importInfo eq ctx.importInfo) c = c.outer
+ hasImport(c)
+ }
+ }
+ def hasOption = ctx.base.settings.language.value exists (s => s == featureName || s == "_")
+ hasImport(ctx.withPhase(ctx.typerPhase)) || hasOption
+ }
+
+ /** Is auto-tupling enabled? */
+ def canAutoTuple =
+ !featureEnabled(defn.LanguageModuleClass, nme.noAutoTupling)
+
+ def scala2Mode =
+ featureEnabled(defn.LanguageModuleClass, nme.Scala2)
+
+ def dynamicsEnabled =
+ featureEnabled(defn.LanguageModuleClass, nme.dynamics)
+
+ def testScala2Mode(msg: String, pos: Position) = {
+ if (scala2Mode) migrationWarning(msg, pos)
+ scala2Mode
+ }
+}
+
+object TypeOps {
+ @sharable var track = false // !!!DEBUG
+}
diff --git a/compiler/src/dotty/tools/dotc/core/TypeParamInfo.scala b/compiler/src/dotty/tools/dotc/core/TypeParamInfo.scala
new file mode 100644
index 000000000..647c895db
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TypeParamInfo.scala
@@ -0,0 +1,40 @@
+package dotty.tools.dotc.core
+
+import Names.TypeName
+import Contexts.Context
+import Types.{Type, TypeBounds}
+
+/** A common super trait of Symbol and LambdaParam.
+ * Used to capture the attributes of type parameters which can be implemented as either.
+ */
+trait TypeParamInfo {
+
+ /** Is this the info of a type parameter? Will return `false` for symbols
+ * that are not type parameters.
+ */
+ def isTypeParam(implicit ctx: Context): Boolean
+
+ /** The name of the type parameter */
+ def paramName(implicit ctx: Context): TypeName
+
+ /** The info of the type parameter */
+ def paramBounds(implicit ctx: Context): TypeBounds
+
+ /** The info of the type parameter as seen from a prefix type.
+ * For type parameter symbols, this is the `memberInfo` as seen from `prefix`.
+ * For type lambda parameters, it's the same as `paramBounds` as
+ * `asSeenFrom` has already been applied to the whole type lambda.
+ */
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds
+
+ /** The parameter bounds, or the completer if the type parameter
+ * is an as-yet uncompleted symbol.
+ */
+ def paramBoundsOrCompleter(implicit ctx: Context): Type
+
+ /** The variance of the type parameter */
+ def paramVariance(implicit ctx: Context): Int
+
+ /** A type that refers to the parameter */
+ def paramRef(implicit ctx: Context): Type
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala
new file mode 100644
index 000000000..5c476c1cb
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala
@@ -0,0 +1,210 @@
+package dotty.tools
+package dotc
+package core
+
+import Types._
+import Flags._
+import Contexts._
+import util.{SimpleMap, DotClass}
+import reporting._
+import printing.{Showable, Printer}
+import printing.Texts._
+import config.Config
+import collection.mutable
+
+class TyperState(r: Reporter) extends DotClass with Showable {
+
+ /** The current reporter */
+ def reporter = r
+
+ /** The current constraint set */
+ def constraint: Constraint =
+ new OrderingConstraint(SimpleMap.Empty, SimpleMap.Empty, SimpleMap.Empty)
+ def constraint_=(c: Constraint)(implicit ctx: Context): Unit = {}
+
+ /** The uninstantiated variables */
+ def uninstVars = constraint.uninstVars
+
+ /** The ephemeral flag is set as a side effect if an operation accesses
+ * the underlying type of a type variable. The reason we need this flag is
+ * that any such operation is not referentially transparent; it might logically change
+ * its value at the moment the type variable is instantiated. Caching code needs to
+ * check the ephemeral flag; If the flag is set during an operation, the result
+ * of that operation should not be cached.
+ */
+ def ephemeral: Boolean = false
+ def ephemeral_=(x: Boolean): Unit = ()
+
+ /** Gives for each instantiated type var that does not yet have its `inst` field
+ * set, the instance value stored in the constraint. Storing instances in constraints
+ * is done only in a temporary way for contexts that may be retracted
+ * without also retracting the type var as a whole.
+ */
+ def instType(tvar: TypeVar)(implicit ctx: Context): Type = constraint.entry(tvar.origin) match {
+ case _: TypeBounds => NoType
+ case tp: PolyParam =>
+ var tvar1 = constraint.typeVarOfParam(tp)
+ if (tvar1.exists) tvar1 else tp
+ case tp => tp
+ }
+
+ /** A fresh typer state with the same constraint as this one.
+ * @param isCommittable The constraint can be committed to an enclosing context.
+ */
+ def fresh(isCommittable: Boolean): TyperState = this
+
+ /** A fresh type state with the same constraint as this one and the given reporter */
+ def withReporter(reporter: Reporter) = new TyperState(reporter)
+
+ /** Commit state so that it gets propagated to enclosing context */
+ def commit()(implicit ctx: Context): Unit = unsupported("commit")
+
+ /** The closest ancestor of this typer state (including possibly this typer state itself)
+ * which is not yet committed, or which does not have a parent.
+ */
+ def uncommittedAncestor: TyperState = this
+
+ /** Make type variable instances permanent by assigning to `inst` field if
+ * type variable instantiation cannot be retracted anymore. Then, remove
+ * no-longer needed constraint entries.
+ */
+ def gc()(implicit ctx: Context): Unit = ()
+
+ /** Is it allowed to commit this state? */
+ def isCommittable: Boolean = false
+
+ /** Can this state be transitively committed until the top-level? */
+ def isGlobalCommittable: Boolean = false
+
+ def tryWithFallback[T](op: => T)(fallback: => T)(implicit ctx: Context): T = unsupported("tryWithFallBack")
+
+ override def toText(printer: Printer): Text = "ImmutableTyperState"
+}
+
+class MutableTyperState(previous: TyperState, r: Reporter, override val isCommittable: Boolean)
+extends TyperState(r) {
+
+ private var myReporter = r
+
+ override def reporter = myReporter
+
+ private val previousConstraint = previous.constraint
+ private var myConstraint: Constraint = previousConstraint
+
+ override def constraint = myConstraint
+ override def constraint_=(c: Constraint)(implicit ctx: Context) = {
+ if (Config.debugCheckConstraintsClosed && isGlobalCommittable) c.checkClosed()
+ myConstraint = c
+ }
+
+ private var myEphemeral: Boolean = previous.ephemeral
+
+ override def ephemeral = myEphemeral
+ override def ephemeral_=(x: Boolean): Unit = { myEphemeral = x }
+
+ override def fresh(isCommittable: Boolean): TyperState =
+ new MutableTyperState(this, new StoreReporter(reporter), isCommittable)
+
+ override def withReporter(reporter: Reporter) =
+ new MutableTyperState(this, reporter, isCommittable)
+
+ override val isGlobalCommittable =
+ isCommittable &&
+ (!previous.isInstanceOf[MutableTyperState] || previous.isGlobalCommittable)
+
+ private var isCommitted = false
+
+ override def uncommittedAncestor: TyperState =
+ if (isCommitted) previous.uncommittedAncestor else this
+
+ /** Commit typer state so that its information is copied into current typer state
+ * In addition (1) the owning state of undetermined or temporarily instantiated
+ * type variables changes from this typer state to the current one. (2) Variables
+ * that were temporarily instantiated in the current typer state are permanently
+ * instantiated instead.
+ *
+ * A note on merging: An interesting test case is isApplicableSafe.scala. It turns out that this
+ * requires a context merge using the new `&' operator. Sequence of actions:
+ * 1) Typecheck argument in typerstate 1.
+ * 2) Cache argument.
+ * 3) Evolve same typer state (to typecheck other arguments, say)
+ * leading to a different constraint.
+ * 4) Take typechecked argument in same state.
+ *
+ * It turns out that the merge is needed not just for
+ * isApplicableSafe but also for (e.g. erased-lubs.scala) as well as
+ * many parts of dotty itself.
+ */
+ override def commit()(implicit ctx: Context) = {
+ val targetState = ctx.typerState
+ assert(isCommittable)
+ targetState.constraint =
+ if (targetState.constraint eq previousConstraint) constraint
+ else targetState.constraint & constraint
+ constraint foreachTypeVar { tvar =>
+ if (tvar.owningState eq this)
+ tvar.owningState = targetState
+ }
+ targetState.ephemeral |= ephemeral
+ targetState.gc()
+ reporter.flush()
+ isCommitted = true
+ }
+
+ override def gc()(implicit ctx: Context): Unit = {
+ val toCollect = new mutable.ListBuffer[PolyType]
+ constraint foreachTypeVar { tvar =>
+ if (!tvar.inst.exists) {
+ val inst = instType(tvar)
+ if (inst.exists && (tvar.owningState eq this)) {
+ tvar.inst = inst
+ val poly = tvar.origin.binder
+ if (constraint.isRemovable(poly)) toCollect += poly
+ }
+ }
+ }
+ for (poly <- toCollect)
+ constraint = constraint.remove(poly)
+ }
+
+ /** Try operation `op`; if it produces errors, execute `fallback` with constraint and
+ * reporter as they were before `op` was executed. This is similar to `typer/tryEither`,
+ * but with one important difference: Any type variable instantiations produced by `op`
+ * are persisted even if `op` fails. This is normally not what one wants and therefore
+ * it is recommended to use
+ *
+ * tryEither { implicit ctx => op } { (_, _) => fallBack }
+ *
+ * instead of
+ *
+ * ctx.tryWithFallback(op)(fallBack)
+ *
+ * `tryWithFallback` is only used when an implicit parameter search fails
+ * and the whole expression is subsequently retype-checked with a Wildcard
+ * expected type (so as to allow an implicit conversion on the result and
+ * avoid over-constraining the implicit parameter search). In this case,
+ * the only type variables that might be falsely instantiated by `op` but
+ * not by `fallBack` are type variables in the typed expression itself, and
+ * these will be thrown away and new ones will be created on re-typing.
+ * So `tryWithFallback` is safe. It is also necessary because without it
+ * we do not propagate enough instantiation information into the implicit search
+ * and this might lead to a missing parameter type error. This is exhibited
+ * at several places in the test suite (for instance in `pos_typers`).
+ * Overall, this is rather ugly, but despite trying for 2 days I have not
+ * found a better solution.
+ */
+ override def tryWithFallback[T](op: => T)(fallback: => T)(implicit ctx: Context): T = {
+ val storeReporter = new StoreReporter(myReporter)
+ val savedReporter = myReporter
+ myReporter = storeReporter
+ val savedConstraint = myConstraint
+ val result = try op finally myReporter = savedReporter
+ if (!storeReporter.hasErrors) result
+ else {
+ myConstraint = savedConstraint
+ fallback
+ }
+ }
+
+ override def toText(printer: Printer): Text = constraint.toText(printer)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Types.overflow b/compiler/src/dotty/tools/dotc/core/Types.overflow
new file mode 100644
index 000000000..77f1f6fc1
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Types.overflow
@@ -0,0 +1,66 @@
+object Types {
+ class Type {
+
+ /** The non-private symbol with given name in the given class that matches this type.
+ * @param inClass The class containing the symbol's definition
+ * @param name The name of the symbol we are looking for
+ * @param site The base type from which member types are computed
+ def matchingTermSymbol(inClass: Symbol, name: Name, site: Type)(implicit ctx: Context): Symbol = {
+ var denot = inClass.info.nonPrivateDecl(name)
+ if (denot.isTerm) { // types of the same name always match
+ if (denot.isOverloaded)
+ denot = denot.atSignature(this.signature) // seems we need two kinds of signatures here
+ if (!(site.memberInfo(denot.symbol) matches this))
+ denot = NoDenotation
+ }
+ denot.symbol
+ }
+
+ final def firstParamTypes: List[Type] = this match {
+ case mt: MethodType => mt.paramTypes
+ case pt: PolyType => pt.firstParamTypes
+ case _ => Nil
+ }
+
+ /** `tp` is either a type variable or poly param. Returns
+ * Covariant if all occurrences of `tp` in this type are covariant
+ * Contravariant if all occurrences of `tp` in this type are contravariant
+ * Covariant | Contravariant if there are no occurrences of `tp` in this type
+ * EmptyFlags if `tp` occurs noon-variantly in this type
+ */
+ def varianceOf(tp: Type): FlagSet = ???
+
+
+ }
+
+ class AndType extends Type {
+
+ def derived_& (tp1: Type, tp2: Type)(implicit ctx: Context) =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else tp1 & tp2
+
+ }
+
+ class OrType extends Type {
+
+ def derived_| (tp1: Type, tp2: Type)(implicit ctx: Context) =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else tp1 | tp2
+
+ }
+
+ class MethodType {
+ /* probably won't be needed
+ private var _isVarArgs: Boolean = _
+ private var knownVarArgs: Boolean = false
+
+ def isVarArgs(implicit ctx: Context) = {
+ if (!knownVarArgs) {
+ _isVarArgs = paramTypes.nonEmpty && paramTypes.last.isRepeatedParam
+ knownVarArgs = true
+ }
+ _isVarArgs
+ }
+ */
+ }
+} \ No newline at end of file
diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala
new file mode 100644
index 000000000..89bc21929
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Types.scala
@@ -0,0 +1,3865 @@
+package dotty.tools
+package dotc
+package core
+
+import util.common._
+import Symbols._
+import Flags._
+import Names._
+import StdNames._, NameOps._
+import Scopes._
+import Constants._
+import Contexts._
+import Annotations._
+import SymDenotations._
+import Decorators._
+import Denotations._
+import Periods._
+import util.Positions.Position
+import util.Stats._
+import util.{DotClass, SimpleMap}
+import ast.tpd._
+import ast.TreeTypeMap
+import printing.Texts._
+import ast.untpd
+import dotty.tools.dotc.transform.Erasure
+import printing.Printer
+import Hashable._
+import Uniques._
+import collection.{mutable, Seq, breakOut}
+import config.Config
+import annotation.tailrec
+import Flags.FlagSet
+import language.implicitConversions
+import scala.util.hashing.{ MurmurHash3 => hashing }
+import config.Printers.{core, typr, cyclicErrors}
+
+object Types {
+
+ @sharable private var nextId = 0
+
+ implicit def eqType: Eq[Type, Type] = Eq
+
+ /** The class of types.
+ * The principal subclasses and sub-objects are as follows:
+ *
+ * Type -+- ProxyType --+- NamedType ----+--- TypeRef
+ * | | \
+ * | +- SingletonType-+-+- TermRef
+ * | | |
+ * | | +--- ThisType
+ * | | +--- SuperType
+ * | | +--- ConstantType
+ * | | +--- MethodParam
+ * | | +----RecThis
+ * | | +--- SkolemType
+ * | +- PolyParam
+ * | +- RefinedOrRecType -+-- RefinedType
+ * | | -+-- RecType
+ * | +- HKApply
+ * | +- TypeBounds
+ * | +- ExprType
+ * | +- AnnotatedType
+ * | +- TypeVar
+ * | +- PolyType
+ * |
+ * +- GroundType -+- AndType
+ * +- OrType
+ * +- MethodType -----+- ImplicitMethodType
+ * | +- JavaMethodType
+ * +- ClassInfo
+ * |
+ * +- NoType
+ * +- NoPrefix
+ * +- ErrorType
+ * +- WildcardType
+ *
+ * Note: please keep in sync with copy in `docs/docs/internals/type-system.md`.
+ */
+ abstract class Type extends DotClass with Hashable with printing.Showable {
+
+// ----- Tests -----------------------------------------------------
+
+ // debug only: a unique identifier for a type
+ val uniqId = {
+ nextId = nextId + 1
+// if (nextId == 19555)
+// println("foo")
+ nextId
+ }
+
+ /** Is this type different from NoType? */
+ def exists: Boolean = true
+
+ /** This type, if it exists, otherwise `that` type */
+ def orElse(that: => Type) = if (exists) this else that
+
+ /** Is this type a value type? */
+ final def isValueType: Boolean = this.isInstanceOf[ValueType]
+
+ /** Is the is value type or type lambda? */
+ final def isValueTypeOrLambda: Boolean = isValueType || this.isInstanceOf[PolyType]
+
+ /** Does this type denote a stable reference (i.e. singleton type)? */
+ final def isStable(implicit ctx: Context): Boolean = stripTypeVar match {
+ case tp: TermRef => tp.termSymbol.isStable && tp.prefix.isStable
+ case _: SingletonType | NoPrefix => true
+ case tp: RefinedOrRecType => tp.parent.isStable
+ case _ => false
+ }
+
+ /** Is this type a (possibly refined or applied or aliased) type reference
+ * to the given type symbol?
+ * @sym The symbol to compare to. It must be a class symbol or abstract type.
+ * It makes no sense for it to be an alias type because isRef would always
+ * return false in that case.
+ */
+ def isRef(sym: Symbol)(implicit ctx: Context): Boolean = stripAnnots.stripTypeVar match {
+ case this1: TypeRef =>
+ this1.info match { // see comment in Namer#typeDefSig
+ case TypeAlias(tp) => tp.isRef(sym)
+ case _ => this1.symbol eq sym
+ }
+ case this1: RefinedOrRecType => this1.parent.isRef(sym)
+ case this1: HKApply => this1.superType.isRef(sym)
+ case _ => false
+ }
+
+ /** Is this type a (neither aliased nor applied) reference to class `sym`? */
+ def isDirectRef(sym: Symbol)(implicit ctx: Context): Boolean = stripTypeVar match {
+ case this1: TypeRef =>
+ this1.name == sym.name && // avoid forcing info if names differ
+ (this1.symbol eq sym)
+ case _ =>
+ false
+ }
+
+ /** Does this type refer exactly to class symbol `sym`, instead of to a subclass of `sym`?
+ * Implemented like `isRef`, but follows more types: all type proxies as well as and- and or-types
+ */
+ private[Types] def isTightPrefix(sym: Symbol)(implicit ctx: Context): Boolean = stripTypeVar match {
+ case tp: NamedType => tp.info.isTightPrefix(sym)
+ case tp: ClassInfo => tp.cls eq sym
+ case tp: Types.ThisType => tp.cls eq sym
+ case tp: TypeProxy => tp.underlying.isTightPrefix(sym)
+ case tp: AndType => tp.tp1.isTightPrefix(sym) && tp.tp2.isTightPrefix(sym)
+ case tp: OrType => tp.tp1.isTightPrefix(sym) || tp.tp2.isTightPrefix(sym)
+ case _ => false
+ }
+
+ /** Is this type an instance of a non-bottom subclass of the given class `cls`? */
+ final def derivesFrom(cls: Symbol)(implicit ctx: Context): Boolean = {
+ def loop(tp: Type) = tp match {
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (sym.isClass) sym.derivesFrom(cls) else tp.superType.derivesFrom(cls)
+ case tp: TypeProxy =>
+ tp.underlying.derivesFrom(cls)
+ case tp: AndType =>
+ tp.tp1.derivesFrom(cls) || tp.tp2.derivesFrom(cls)
+ case tp: OrType =>
+ tp.tp1.derivesFrom(cls) && tp.tp2.derivesFrom(cls)
+ case tp: JavaArrayType =>
+ cls == defn.ObjectClass
+ case _ =>
+ false
+ }
+ cls == defn.AnyClass || loop(this)
+ }
+
+ /** Is this type guaranteed not to have `null` as a value?
+ * For the moment this is only true for modules, but it could
+ * be refined later.
+ */
+ final def isNotNull(implicit ctx: Context): Boolean =
+ classSymbol is ModuleClass
+
+ /** Is this type produced as a repair for an error? */
+ final def isError(implicit ctx: Context): Boolean = stripTypeVar match {
+ case ErrorType => true
+ case tp => (tp.typeSymbol is Erroneous) || (tp.termSymbol is Erroneous)
+ }
+
+ /** Is some part of this type produced as a repair for an error? */
+ final def isErroneous(implicit ctx: Context): Boolean = existsPart(_.isError, forceLazy = false)
+
+ /** Does the type carry an annotation that is an instance of `cls`? */
+ final def hasAnnotation(cls: ClassSymbol)(implicit ctx: Context): Boolean = stripTypeVar match {
+ case AnnotatedType(tp, annot) => (annot matches cls) || (tp hasAnnotation cls)
+ case _ => false
+ }
+
+ /** Does this type occur as a part of type `that`? */
+ final def occursIn(that: Type)(implicit ctx: Context): Boolean =
+ that existsPart (this == _)
+
+ /** Is this a type of a repeated parameter? */
+ def isRepeatedParam(implicit ctx: Context): Boolean =
+ typeSymbol eq defn.RepeatedParamClass
+
+ /** Does this type carry an UnsafeNonvariant annotation? */
+ final def isUnsafeNonvariant(implicit ctx: Context): Boolean = this match {
+ case AnnotatedType(_, annot) => annot.symbol == defn.UnsafeNonvariantAnnot
+ case _ => false
+ }
+
+ /** Does this type have an UnsafeNonvariant annotation on one of its parts? */
+ final def hasUnsafeNonvariant(implicit ctx: Context): Boolean =
+ new HasUnsafeNonAccumulator().apply(false, this)
+
+ /** Is this the type of a method that has a repeated parameter type as
+ * last parameter type?
+ */
+ def isVarArgsMethod(implicit ctx: Context): Boolean = this match {
+ case tp: PolyType => tp.resultType.isVarArgsMethod
+ case MethodType(_, paramTypes) => paramTypes.nonEmpty && paramTypes.last.isRepeatedParam
+ case _ => false
+ }
+
+ /** Is this an alias TypeBounds? */
+ def isAlias: Boolean = this.isInstanceOf[TypeAlias]
+
+// ----- Higher-order combinators -----------------------------------
+
+ /** Returns true if there is a part of this type that satisfies predicate `p`.
+ */
+ final def existsPart(p: Type => Boolean, forceLazy: Boolean = true)(implicit ctx: Context): Boolean =
+ new ExistsAccumulator(p, forceLazy).apply(false, this)
+
+ /** Returns true if all parts of this type satisfy predicate `p`.
+ */
+ final def forallParts(p: Type => Boolean)(implicit ctx: Context): Boolean =
+ !existsPart(!p(_))
+
+ /** Performs operation on all parts of this type */
+ final def foreachPart(p: Type => Unit, stopAtStatic: Boolean = false)(implicit ctx: Context): Unit =
+ new ForeachAccumulator(p, stopAtStatic).apply((), this)
+
+ /** The parts of this type which are type or term refs */
+ final def namedParts(implicit ctx: Context): collection.Set[NamedType] =
+ namedPartsWith(alwaysTrue)
+
+ /** The parts of this type which are type or term refs and which
+ * satisfy predicate `p`.
+ *
+ * @param p The predicate to satisfy
+ * @param excludeLowerBounds If set to true, the lower bounds of abstract
+ * types will be ignored.
+ */
+ def namedPartsWith(p: NamedType => Boolean, excludeLowerBounds: Boolean = false)
+ (implicit ctx: Context): collection.Set[NamedType] =
+ new NamedPartsAccumulator(p, excludeLowerBounds).apply(mutable.LinkedHashSet(), this)
+
+ /** Map function `f` over elements of an AndType, rebuilding with function `g` */
+ def mapReduceAnd[T](f: Type => T)(g: (T, T) => T)(implicit ctx: Context): T = stripTypeVar match {
+ case AndType(tp1, tp2) => g(tp1.mapReduceAnd(f)(g), tp2.mapReduceAnd(f)(g))
+ case _ => f(this)
+ }
+
+ /** Map function `f` over elements of an OrType, rebuilding with function `g` */
+ final def mapReduceOr[T](f: Type => T)(g: (T, T) => T)(implicit ctx: Context): T = stripTypeVar match {
+ case OrType(tp1, tp2) => g(tp1.mapReduceOr(f)(g), tp2.mapReduceOr(f)(g))
+ case _ => f(this)
+ }
+
+// ----- Associated symbols ----------------------------------------------
+
+ /** The type symbol associated with the type */
+ final def typeSymbol(implicit ctx: Context): Symbol = this match {
+ case tp: TypeRef => tp.symbol
+ case tp: ClassInfo => tp.cls
+// case ThisType(cls) => cls // needed?
+ case tp: SingletonType => NoSymbol
+ case tp: TypeProxy => tp.underlying.typeSymbol
+ case _ => NoSymbol
+ }
+
+ /** The least class or trait of which this type is a subtype or parameterized
+ * instance, or NoSymbol if none exists (either because this type is not a
+ * value type, or because superclasses are ambiguous).
+ */
+ final def classSymbol(implicit ctx: Context): Symbol = this match {
+ case ConstantType(constant) =>
+ constant.tpe.classSymbol
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (sym.isClass) sym else tp.superType.classSymbol
+ case tp: ClassInfo =>
+ tp.cls
+ case tp: SingletonType =>
+ NoSymbol
+ case tp: TypeProxy =>
+ tp.underlying.classSymbol
+ case AndType(l, r) =>
+ val lsym = l.classSymbol
+ val rsym = r.classSymbol
+ if (lsym isSubClass rsym) lsym
+ else if (rsym isSubClass lsym) rsym
+ else NoSymbol
+ case OrType(l, r) => // TODO does not conform to spec
+ val lsym = l.classSymbol
+ val rsym = r.classSymbol
+ if (lsym isSubClass rsym) rsym
+ else if (rsym isSubClass lsym) lsym
+ else NoSymbol
+ case _ =>
+ NoSymbol
+ }
+
+ /** The least (wrt <:<) set of class symbols of which this type is a subtype
+ */
+ final def classSymbols(implicit ctx: Context): List[ClassSymbol] = this match {
+ case tp: ClassInfo =>
+ tp.cls :: Nil
+ case tp: TypeRef =>
+ val sym = tp.symbol
+ if (sym.isClass) sym.asClass :: Nil else tp.superType.classSymbols
+ case tp: TypeProxy =>
+ tp.underlying.classSymbols
+ case AndType(l, r) =>
+ l.classSymbols union r.classSymbols
+ case OrType(l, r) =>
+ l.classSymbols intersect r.classSymbols // TODO does not conform to spec
+ case _ =>
+ Nil
+ }
+
+ /** The term symbol associated with the type */
+ final def termSymbol(implicit ctx: Context): Symbol = this match {
+ case tp: TermRef => tp.symbol
+ case tp: TypeProxy => tp.underlying.termSymbol
+ case _ => NoSymbol
+ }
+
+ /** The base classes of this type as determined by ClassDenotation
+ * in linearization order, with the class itself as first element.
+ * For AndTypes/OrTypes, the union/intersection of the operands' baseclasses.
+ * Inherited by all type proxies. `Nil` for all other types.
+ */
+ final def baseClasses(implicit ctx: Context): List[ClassSymbol] = track("baseClasses") {
+ this match {
+ case tp: TypeProxy =>
+ tp.underlying.baseClasses
+ case tp: ClassInfo =>
+ tp.cls.baseClasses
+ case AndType(tp1, tp2) =>
+ tp1.baseClasses union tp2.baseClasses
+ case OrType(tp1, tp2) =>
+ tp1.baseClasses intersect tp2.baseClasses
+ case _ => Nil
+ }
+ }
+
+// ----- Member access -------------------------------------------------
+
+ /** The scope of all declarations of this type.
+ * Defined by ClassInfo, inherited by type proxies.
+ * Empty scope for all other types.
+ */
+ final def decls(implicit ctx: Context): Scope = this match {
+ case tp: ClassInfo =>
+ tp.decls
+ case tp: TypeProxy =>
+ tp.underlying.decls
+ case _ =>
+ EmptyScope
+ }
+
+ /** A denotation containing the declaration(s) in this type with the given name.
+ * The result is either a SymDenotation or a MultiDenotation of SymDenotations.
+ * The info(s) are the original symbol infos, no translation takes place.
+ */
+ final def decl(name: Name)(implicit ctx: Context): Denotation = track("decl") {
+ findDecl(name, EmptyFlags)
+ }
+
+ /** A denotation containing the non-private declaration(s) in this type with the given name */
+ final def nonPrivateDecl(name: Name)(implicit ctx: Context): Denotation = track("nonPrivateDecl") {
+ findDecl(name, Private)
+ }
+
+ /** A denotation containing the declaration(s) in this type with the given
+ * name, as seen from prefix type `pre`. Declarations that have a flag
+ * in `excluded` are omitted.
+ */
+ final def findDecl(name: Name, excluded: FlagSet)(implicit ctx: Context): Denotation = this match {
+ case tp: ClassInfo =>
+ tp.decls.denotsNamed(name).filterExcluded(excluded).toDenot(NoPrefix)
+ case tp: TypeProxy =>
+ tp.underlying.findDecl(name, excluded)
+ case ErrorType =>
+ ctx.newErrorSymbol(classSymbol orElse defn.RootClass, name)
+ case _ =>
+ NoDenotation
+ }
+
+ /** The member of this type with the given name */
+ final def member(name: Name)(implicit ctx: Context): Denotation = /*>|>*/ track("member") /*<|<*/ {
+ memberExcluding(name, EmptyFlags)
+ }
+
+ /** The non-private member of this type with the given name. */
+ final def nonPrivateMember(name: Name)(implicit ctx: Context): Denotation = track("nonPrivateMember") {
+ memberExcluding(name, Flags.Private)
+ }
+
+ final def memberExcluding(name: Name, excluding: FlagSet)(implicit ctx: Context): Denotation = {
+ // We need a valid prefix for `asSeenFrom`
+ val pre = this match {
+ case tp: ClassInfo =>
+ tp.typeRef
+ case _ =>
+ widenIfUnstable
+ }
+ findMember(name, pre, excluding)
+ }
+
+ /** Find member of this type with given name and
+ * produce a denotation that contains the type of the member
+ * as seen from given prefix `pre`. Exclude all members that have
+ * flags in `excluded` from consideration.
+ */
+ final def findMember(name: Name, pre: Type, excluded: FlagSet)(implicit ctx: Context): Denotation = {
+ @tailrec def go(tp: Type): Denotation = tp match {
+ case tp: RefinedType =>
+ if (name eq tp.refinedName) goRefined(tp) else go(tp.parent)
+ case tp: ThisType =>
+ goThis(tp)
+ case tp: TypeRef =>
+ tp.denot.findMember(name, pre, excluded)
+ case tp: TermRef =>
+ go (tp.underlying match {
+ case mt: MethodType
+ if mt.paramTypes.isEmpty && (tp.symbol is Stable) => mt.resultType
+ case tp1 => tp1
+ })
+ case tp: PolyParam =>
+ goParam(tp)
+ case tp: RecType =>
+ goRec(tp)
+ case tp: HKApply =>
+ goApply(tp)
+ case tp: TypeProxy =>
+ go(tp.underlying)
+ case tp: ClassInfo =>
+ tp.cls.findMember(name, pre, excluded)
+ case AndType(l, r) =>
+ goAnd(l, r)
+ case tp: OrType =>
+ // we need to keep the invariant that `pre <: tp`. Branch `union-types-narrow-prefix`
+ // achieved that by narrowing `pre` to each alternative, but it led to merge errors in
+ // lots of places. The present strategy is instead of widen `tp` using `join` to be a
+ // supertype of `pre`.
+ go(tp.join)
+ case tp: JavaArrayType =>
+ defn.ObjectType.findMember(name, pre, excluded)
+ case ErrorType =>
+ ctx.newErrorSymbol(pre.classSymbol orElse defn.RootClass, name)
+ case _ =>
+ NoDenotation
+ }
+ def goRec(tp: RecType) =
+ if (tp.parent == null) NoDenotation
+ else {
+ //println(s"find member $pre . $name in $tp")
+
+ // We have to be careful because we might open the same (wrt eq) recursive type
+ // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)`
+ // call below. To avoid this problem we do a defensive copy of the recursive
+ // type first. But if we do this always we risk being inefficient and we ran into
+ // stackoverflows when compiling pos/hk.scala under the refinement encoding
+ // of hk-types. So we only do a copy if the type
+ // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`.
+ // Furthermore, if this happens we mark the original recursive type with `openedTwice`
+ // which means that we always defensively copy the type in the future. This second
+ // measure is necessary because findMember calls might be cached, so do not
+ // necessarily appear in nested order.
+ // Without the defensive copy, Typer.scala fails to compile at the line
+ //
+ // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType)
+ //
+ // because the subtype check
+ //
+ // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed]
+ //
+ // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.)
+ //
+ // Without the `openedTwice` trick, Typer.scala fails to Ycheck
+ // at phase resolveSuper.
+ val rt =
+ if (tp.opened) { // defensive copy
+ tp.openedTwice = true
+ RecType(rt => tp.parent.substRecThis(tp, RecThis(rt)))
+ } else tp
+ rt.opened = true
+ try go(rt.parent).mapInfo(_.substRecThis(rt, pre))
+ finally {
+ if (!rt.openedTwice) rt.opened = false
+ }
+ }
+
+ def goRefined(tp: RefinedType) = {
+ val pdenot = go(tp.parent)
+ val rinfo = tp.refinedInfo
+ if (name.isTypeName) { // simplified case that runs more efficiently
+ val jointInfo =
+ if (rinfo.isAlias) rinfo
+ else if (pdenot.info.isAlias) pdenot.info
+ else if (ctx.pendingMemberSearches.contains(name)) pdenot.info safe_& rinfo
+ else
+ try pdenot.info & rinfo
+ catch {
+ case ex: CyclicReference =>
+ // happens for tests/pos/sets.scala. findMember is called from baseTypeRef.
+ // The & causes a subtype check which calls baseTypeRef again with the same
+ // superclass. In the observed case, the superclass was Any, and
+ // the special shortcut for Any in derivesFrom was as yet absent. To reproduce,
+ // remove the special treatment of Any in derivesFrom and compile
+ // sets.scala.
+ pdenot.info safe_& rinfo
+ }
+ pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, jointInfo)
+ } else {
+ pdenot & (
+ new JointRefDenotation(NoSymbol, rinfo, Period.allInRun(ctx.runId)),
+ pre,
+ safeIntersection = ctx.pendingMemberSearches.contains(name))
+ }
+ }
+
+ def goApply(tp: HKApply) = tp.tycon match {
+ case tl: PolyType =>
+ go(tl.resType).mapInfo(info =>
+ tl.derivedLambdaAbstraction(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args))
+ case _ =>
+ go(tp.superType)
+ }
+
+ def goThis(tp: ThisType) = {
+ val d = go(tp.underlying)
+ if (d.exists)
+ if ((pre eq tp) && d.symbol.is(NamedTypeParam) && (d.symbol.owner eq tp.cls))
+ // If we look for a named type parameter `P` in `C.this.P`, looking up
+ // the fully applied self type of `C` will give as an info the alias type
+ // `P = this.P`. We need to return a denotation with the underlying bounds instead.
+ d.symbol.denot
+ else d
+ else
+ // There is a special case to handle:
+ // trait Super { this: Sub => private class Inner {} println(this.Inner) }
+ // class Sub extends Super
+ // When resolving Super.this.Inner, the normal logic goes to the self type and
+ // looks for Inner from there. But this fails because Inner is private.
+ // We fix the problem by having the following fallback case, which links up the
+ // member in Super instead of Sub.
+ // As an example of this in the wild, see
+ // loadClassWithPrivateInnerAndSubSelf in ShowClassTests
+ go(tp.cls.typeRef) orElse d
+ }
+ def goParam(tp: PolyParam) = {
+ val next = tp.underlying
+ ctx.typerState.constraint.entry(tp) match {
+ case bounds: TypeBounds if bounds ne next =>
+ ctx.typerState.ephemeral = true
+ go(bounds.hi)
+ case _ =>
+ go(next)
+ }
+ }
+ def goAnd(l: Type, r: Type) = {
+ go(l) & (go(r), pre, safeIntersection = ctx.pendingMemberSearches.contains(name))
+ }
+
+ { val recCount = ctx.findMemberCount + 1
+ ctx.findMemberCount = recCount
+ if (recCount >= Config.LogPendingFindMemberThreshold)
+ ctx.pendingMemberSearches = name :: ctx.pendingMemberSearches
+ }
+
+ //assert(ctx.findMemberCount < 20)
+ try go(this)
+ catch {
+ case ex: Throwable =>
+ core.println(i"findMember exception for $this member $name, pre = $pre")
+ throw ex // DEBUG
+ }
+ finally {
+ val recCount = ctx.findMemberCount
+ if (recCount >= Config.LogPendingFindMemberThreshold)
+ ctx.pendingMemberSearches = ctx.pendingMemberSearches.tail
+ ctx.findMemberCount = recCount - 1
+ }
+ }
+
+ /** The set of names of members of this type that pass the given name filter
+ * when seen as members of `pre`. More precisely, these are all
+ * of members `name` such that `keepOnly(pre, name)` is `true`.
+ * @note: OK to use a Set[Name] here because Name hashcodes are replayable,
+ * hence the Set will always give the same names in the same order.
+ */
+ final def memberNames(keepOnly: NameFilter, pre: Type = this)(implicit ctx: Context): Set[Name] = this match {
+ case tp: ClassInfo =>
+ tp.cls.memberNames(keepOnly) filter (keepOnly(pre, _))
+ case tp: RefinedType =>
+ val ns = tp.parent.memberNames(keepOnly, pre)
+ if (keepOnly(pre, tp.refinedName)) ns + tp.refinedName else ns
+ case tp: TypeProxy =>
+ tp.underlying.memberNames(keepOnly, pre)
+ case tp: AndType =>
+ tp.tp1.memberNames(keepOnly, pre) | tp.tp2.memberNames(keepOnly, pre)
+ case tp: OrType =>
+ tp.tp1.memberNames(keepOnly, pre) & tp.tp2.memberNames(keepOnly, pre)
+ case _ =>
+ Set()
+ }
+
+ def memberDenots(keepOnly: NameFilter, f: (Name, mutable.Buffer[SingleDenotation]) => Unit)(implicit ctx: Context): Seq[SingleDenotation] = {
+ val buf = mutable.ArrayBuffer[SingleDenotation]()
+ for (name <- memberNames(keepOnly)) f(name, buf)
+ buf
+ }
+
+ /** The set of abstract term members of this type. */
+ final def abstractTermMembers(implicit ctx: Context): Seq[SingleDenotation] = track("abstractTermMembers") {
+ memberDenots(abstractTermNameFilter,
+ (name, buf) => buf ++= nonPrivateMember(name).altsWith(_ is Deferred))
+ }
+
+ /** The set of abstract type members of this type. */
+ final def abstractTypeMembers(implicit ctx: Context): Seq[SingleDenotation] = track("abstractTypeMembers") {
+ memberDenots(abstractTypeNameFilter,
+ (name, buf) => buf += nonPrivateMember(name).asSingleDenotation)
+ }
+
+ /** The set of abstract type members of this type. */
+ final def nonClassTypeMembers(implicit ctx: Context): Seq[SingleDenotation] = track("nonClassTypeMembers") {
+ memberDenots(nonClassTypeNameFilter,
+ (name, buf) => buf += member(name).asSingleDenotation)
+ }
+
+ /** The set of type members of this type */
+ final def typeMembers(implicit ctx: Context): Seq[SingleDenotation] = track("typeMembers") {
+ memberDenots(typeNameFilter,
+ (name, buf) => buf += member(name).asSingleDenotation)
+ }
+
+ /** The set of implicit members of this type */
+ final def implicitMembers(implicit ctx: Context): List[TermRef] = track("implicitMembers") {
+ memberDenots(implicitFilter,
+ (name, buf) => buf ++= member(name).altsWith(_ is Implicit))
+ .toList.map(d => TermRef.withSig(this, d.symbol.asTerm))
+ }
+
+ /** The set of member classes of this type */
+ final def memberClasses(implicit ctx: Context): Seq[SingleDenotation] = track("implicitMembers") {
+ memberDenots(typeNameFilter,
+ (name, buf) => buf ++= member(name).altsWith(x => x.isClass))
+ }
+
+ final def fields(implicit ctx: Context): Seq[SingleDenotation] = track("fields") {
+ memberDenots(fieldFilter,
+ (name, buf) => buf ++= member(name).altsWith(x => !x.is(Method)))
+ }
+
+ /** The set of members of this type having at least one of `requiredFlags` but none of `excludedFlags` set */
+ final def membersBasedOnFlags(requiredFlags: FlagSet, excludedFlags: FlagSet)(implicit ctx: Context): Seq[SingleDenotation] = track("implicitMembers") {
+ memberDenots(takeAllFilter,
+ (name, buf) => buf ++= memberExcluding(name, excludedFlags).altsWith(x => x.is(requiredFlags)))
+ }
+
+ /** The info of `sym`, seen as a member of this type. */
+ final def memberInfo(sym: Symbol)(implicit ctx: Context): Type =
+ sym.info.asSeenFrom(this, sym.owner)
+
+ /** This type seen as if it were the type of a member of prefix type `pre`
+ * declared in class `cls`.
+ */
+ final def asSeenFrom(pre: Type, cls: Symbol)(implicit ctx: Context): Type = track("asSeenFrom") {
+ if (!cls.membersNeedAsSeenFrom(pre)) this
+ else ctx.asSeenFrom(this, pre, cls)
+ }
+
+// ----- Subtype-related --------------------------------------------
+
+ /** Is this type a subtype of that type? */
+ final def <:<(that: Type)(implicit ctx: Context): Boolean = track("<:<") {
+ ctx.typeComparer.topLevelSubType(this, that)
+ }
+
+ /** Is this type a subtype of that type? */
+ final def frozen_<:<(that: Type)(implicit ctx: Context): Boolean = track("frozen_<:<") {
+ ctx.typeComparer.isSubTypeWhenFrozen(this, that)
+ }
+
+ /** Is this type the same as that type?
+ * This is the case iff `this <:< that` and `that <:< this`.
+ */
+ final def =:=(that: Type)(implicit ctx: Context): Boolean = track("=:=") {
+ ctx.typeComparer.isSameType(this, that)
+ }
+
+ /** Is this type a primitive value type which can be widened to the primitive value type `that`? */
+ def isValueSubType(that: Type)(implicit ctx: Context) = widen match {
+ case self: TypeRef if self.symbol.isPrimitiveValueClass =>
+ that.widenExpr match {
+ case that: TypeRef if that.symbol.isPrimitiveValueClass =>
+ defn.isValueSubClass(self.symbol, that.symbol)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+
+ def relaxed_<:<(that: Type)(implicit ctx: Context) =
+ (this <:< that) || (this isValueSubType that)
+
+ /** Is this type a legal type for a member that overrides another
+ * member of type `that`? This is the same as `<:<`, except that
+ * the types ()T and => T are identified, and T is seen as overriding
+ * either type.
+ */
+ final def overrides(that: Type)(implicit ctx: Context) = {
+ def result(tp: Type): Type = tp match {
+ case ExprType(_) | MethodType(Nil, _) => tp.resultType
+ case _ => tp
+ }
+ (this frozen_<:< that) || {
+ val rthat = result(that)
+ (rthat ne that) && (result(this) frozen_<:< rthat)
+ }
+ }
+
+ /** Is this type close enough to that type so that members
+ * with the two types would override each other?
+ * This means:
+ * - Either both types are polytypes with the same number of
+ * type parameters and their result types match after renaming
+ * corresponding type parameters
+ * - Or both types are method types with =:=-equivalent(*) parameter types
+ * and matching result types after renaming corresponding parameter types
+ * if the method types are dependent.
+ * - Or both types are =:=-equivalent
+ * - Or phase.erasedTypes is false, and neither type takes
+ * term or type parameters.
+ *
+ * (*) when matching with a Java method, we also regard Any and Object as equivalent
+ * parameter types.
+ */
+ def matches(that: Type)(implicit ctx: Context): Boolean = track("matches") {
+ ctx.typeComparer.matchesType(this, that, relaxed = !ctx.phase.erasedTypes)
+ }
+
+ /** This is the same as `matches` except that it also matches => T with T and
+ * vice versa.
+ */
+ def matchesLoosely(that: Type)(implicit ctx: Context): Boolean =
+ (this matches that) || {
+ val thisResult = this.widenExpr
+ val thatResult = that.widenExpr
+ (this eq thisResult) != (that eq thatResult) && (thisResult matchesLoosely thatResult)
+ }
+
+ /** The basetype TypeRef of this type with given class symbol,
+ * but without including any type arguments
+ */
+ final def baseTypeRef(base: Symbol)(implicit ctx: Context): Type = /*ctx.traceIndented(s"$this baseTypeRef $base")*/ /*>|>*/ track("baseTypeRef") /*<|<*/ {
+ base.denot match {
+ case classd: ClassDenotation => classd.baseTypeRefOf(this)
+ case _ => NoType
+ }
+ }
+
+ def & (that: Type)(implicit ctx: Context): Type = track("&") {
+ ctx.typeComparer.glb(this, that)
+ }
+
+ /** Safer version of `&`.
+ *
+ * This version does not simplify the upper bound of the intersection of
+ * two TypeBounds. The simplification done by `&` requires subtyping checks
+ * which may end up calling `&` again, in most cases this should be safe
+ * but because of F-bounded types, this can result in an infinite loop
+ * (which will be masked unless `-Yno-deep-subtypes` is enabled).
+ */
+ def safe_& (that: Type)(implicit ctx: Context): Type = (this, that) match {
+ case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => TypeBounds(lo1 | lo2, AndType(hi1, hi2))
+ case _ => this & that
+ }
+
+ def | (that: Type)(implicit ctx: Context): Type = track("|") {
+ ctx.typeComparer.lub(this, that)
+ }
+
+// ----- Unwrapping types -----------------------------------------------
+
+ /** Map a TypeVar to either its instance if it is instantiated, or its origin,
+ * if not, until the result is no longer a TypeVar. Identity on all other types.
+ */
+ def stripTypeVar(implicit ctx: Context): Type = this
+
+ /** Remove all AnnotatedTypes wrapping this type.
+ */
+ def stripAnnots(implicit ctx: Context): Type = this
+
+ /** Widen from singleton type to its underlying non-singleton
+ * base type by applying one or more `underlying` dereferences,
+ * Also go from => T to T.
+ * Identity for all other types. Example:
+ *
+ * class Outer { class C ; val x: C }
+ * def o: Outer
+ * <o.x.type>.widen = o.C
+ */
+ final def widen(implicit ctx: Context): Type = widenSingleton match {
+ case tp: ExprType => tp.resultType.widen
+ case tp => tp
+ }
+
+ /** Widen from singleton type to its underlying non-singleton
+ * base type by applying one or more `underlying` dereferences.
+ */
+ final def widenSingleton(implicit ctx: Context): Type = stripTypeVar match {
+ case tp: SingletonType if !tp.isOverloaded => tp.underlying.widenSingleton
+ case _ => this
+ }
+
+ /** Widen from TermRef to its underlying non-termref
+ * base type, while also skipping Expr types.
+ */
+ final def widenTermRefExpr(implicit ctx: Context): Type = stripTypeVar match {
+ case tp: TermRef if !tp.isOverloaded => tp.underlying.widenExpr.widenTermRefExpr
+ case _ => this
+ }
+
+ /** Widen from ExprType type to its result type.
+ * (Note: no stripTypeVar needed because TypeVar's can't refer to ExprTypes.)
+ */
+ final def widenExpr: Type = this match {
+ case tp: ExprType => tp.resType
+ case _ => this
+ }
+
+ /** Widen type if it is unstable (i.e. an ExprType, or TermRef to unstable symbol */
+ final def widenIfUnstable(implicit ctx: Context): Type = stripTypeVar match {
+ case tp: ExprType => tp.resultType.widenIfUnstable
+ case tp: TermRef if !tp.symbol.isStable => tp.underlying.widenIfUnstable
+ case _ => this
+ }
+
+ /** If this is a skolem, its underlying type, otherwise the type itself */
+ final def widenSkolem(implicit ctx: Context): Type = this match {
+ case tp: SkolemType => tp.underlying
+ case _ => this
+ }
+
+ /** Eliminate anonymous classes */
+ final def deAnonymize(implicit ctx: Context): Type = this match {
+ case tp:TypeRef if tp.symbol.isAnonymousClass =>
+ tp.symbol.asClass.typeRef.asSeenFrom(tp.prefix, tp.symbol.owner)
+ case tp => tp
+ }
+
+ private def dealias(keepAnnots: Boolean)(implicit ctx: Context): Type = this match {
+ case tp: TypeRef =>
+ if (tp.symbol.isClass) tp
+ else tp.info match {
+ case TypeAlias(tp) => tp.dealias(keepAnnots)
+ case _ => tp
+ }
+ case tp: TypeVar =>
+ val tp1 = tp.instanceOpt
+ if (tp1.exists) tp1.dealias(keepAnnots) else tp
+ case tp: AnnotatedType =>
+ val tp1 = tp.tpe.dealias(keepAnnots)
+ if (keepAnnots) tp.derivedAnnotatedType(tp1, tp.annot) else tp1
+ case tp: LazyRef =>
+ tp.ref.dealias(keepAnnots)
+ case app @ HKApply(tycon, args) =>
+ val tycon1 = tycon.dealias(keepAnnots)
+ if (tycon1 ne tycon) app.superType.dealias(keepAnnots)
+ else this
+ case _ => this
+ }
+
+ /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type
+ * is no longer alias type, LazyRef, or instantiated type variable.
+ * Goes through annotated types and rewraps annotations on the result.
+ */
+ final def dealiasKeepAnnots(implicit ctx: Context): Type =
+ dealias(keepAnnots = true)
+
+ /** Follow aliases and dereferences LazyRefs, annotated types and instantiated
+ * TypeVars until type is no longer alias type, annotated type, LazyRef,
+ * or instantiated type variable.
+ */
+ final def dealias(implicit ctx: Context): Type =
+ dealias(keepAnnots = false)
+
+ /** Perform successive widenings and dealiasings until none can be applied anymore */
+ final def widenDealias(implicit ctx: Context): Type = {
+ val res = this.widen.dealias
+ if (res eq this) res else res.widenDealias
+ }
+
+ /** Widen from constant type to its underlying non-constant
+ * base type.
+ */
+ final def deconst(implicit ctx: Context): Type = stripTypeVar match {
+ case tp: ConstantType => tp.value.tpe
+ case _ => this
+ }
+
+ /** If this is a (possibly aliased, annotated, and/or parameterized) reference to
+ * a class, the class type ref, otherwise NoType.
+ * @param refinementOK If `true` we also skip non-parameter refinements.
+ */
+ def underlyingClassRef(refinementOK: Boolean)(implicit ctx: Context): Type = dealias match {
+ case tp: TypeRef =>
+ if (tp.symbol.isClass) tp
+ else if (tp.symbol.isAliasType) tp.underlying.underlyingClassRef(refinementOK)
+ else NoType
+ case tp: AnnotatedType =>
+ tp.underlying.underlyingClassRef(refinementOK)
+ case tp: RefinedType =>
+ def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName)
+ if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK)
+ else NoType
+ case tp: RecType =>
+ tp.underlying.underlyingClassRef(refinementOK)
+ case _ =>
+ NoType
+ }
+
+ /** The iterator of underlying types as long as type is a TypeProxy.
+ * Useful for diagnostics
+ */
+ def underlyingIterator(implicit ctx: Context): Iterator[Type] = new Iterator[Type] {
+ var current = Type.this
+ var hasNext = true
+ def next = {
+ val res = current
+ hasNext = current.isInstanceOf[TypeProxy]
+ if (hasNext) current = current.asInstanceOf[TypeProxy].underlying
+ res
+ }
+ }
+
+ /** A prefix-less refined this or a termRef to a new skolem symbol
+ * that has the given type as info.
+ */
+ def narrow(implicit ctx: Context): TermRef =
+ TermRef(NoPrefix, ctx.newSkolem(this))
+
+ /** Useful for diagnostics: The underlying type if this type is a type proxy,
+ * otherwise NoType
+ */
+ def underlyingIfProxy(implicit ctx: Context) = this match {
+ case this1: TypeProxy => this1.underlying
+ case _ => NoType
+ }
+
+ /** If this is a FunProto or PolyProto, WildcardType, otherwise this. */
+ def notApplied: Type = this
+
+ // ----- Normalizing typerefs over refined types ----------------------------
+
+ /** If this normalizes* to a refinement type that has a refinement for `name` (which might be followed
+ * by other refinements), and the refined info is a type alias, return the alias,
+ * otherwise return NoType. Used to reduce types of the form
+ *
+ * P { ... type T = / += / -= U ... } # T
+ *
+ * to just U. Does not perform the reduction if the resulting type would contain
+ * a reference to the "this" of the current refined type, except in the following situation
+ *
+ * (1) The "this" reference can be avoided by following an alias. Example:
+ *
+ * P { type T = String, type R = P{...}.T } # R --> String
+ *
+ * (*) normalizes means: follow instantiated typevars and aliases.
+ */
+ def lookupRefined(name: Name)(implicit ctx: Context): Type = {
+ def loop(pre: Type): Type = pre.stripTypeVar match {
+ case pre: RefinedType =>
+ pre.refinedInfo match {
+ case TypeAlias(alias) =>
+ if (pre.refinedName ne name) loop(pre.parent) else alias
+ case _ => loop(pre.parent)
+ }
+ case pre: RecType =>
+ val candidate = loop(pre.parent)
+ if (candidate.exists && !pre.isReferredToBy(candidate)) {
+ //println(s"lookupRefined ${this.toString} . $name, pre: $pre ---> $candidate / ${candidate.toString}")
+ candidate
+ }
+ else NoType
+ case SkolemType(tp) =>
+ tp.lookupRefined(name)
+ case pre: WildcardType =>
+ WildcardType
+ case pre: TypeRef =>
+ pre.info match {
+ case TypeAlias(alias) => loop(alias)
+ case _ => NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ loop(this)
+ }
+
+ /** The type <this . name> , reduced if possible */
+ def select(name: Name)(implicit ctx: Context): Type = name match {
+ case name: TermName => TermRef.all(this, name)
+ case name: TypeName => TypeRef(this, name).reduceProjection
+ }
+
+ /** The type <this . name> , reduced if possible, with given denotation if unreduced */
+ def select(name: Name, denot: Denotation)(implicit ctx: Context): Type = name match {
+ case name: TermName => TermRef(this, name, denot)
+ case name: TypeName => TypeRef(this, name, denot).reduceProjection
+ }
+
+ /** The type <this . name> with given symbol, reduced if possible */
+ def select(sym: Symbol)(implicit ctx: Context): Type =
+ if (sym.isTerm) TermRef(this, sym.asTerm)
+ else TypeRef(this, sym.asType).reduceProjection
+
+// ----- Access to parts --------------------------------------------
+
+ /** The normalized prefix of this type is:
+ * For an alias type, the normalized prefix of its alias
+ * For all other named type and class infos: the prefix.
+ * Inherited by all other type proxies.
+ * `NoType` for all other types.
+ */
+ final def normalizedPrefix(implicit ctx: Context): Type = this match {
+ case tp: NamedType =>
+ if (tp.symbol.info.isAlias) tp.info.normalizedPrefix else tp.prefix
+ case tp: ClassInfo =>
+ tp.prefix
+ case tp: TypeProxy =>
+ tp.underlying.normalizedPrefix
+ case _ =>
+ NoType
+ }
+
+ /** For a ClassInfo type, its parents,
+ * Inherited by all type proxies. Empty for all other types.
+ * Overwritten in ClassInfo, where parents is cached.
+ */
+ def parents(implicit ctx: Context): List[TypeRef] = this match {
+ case tp: TypeProxy => tp.underlying.parents
+ case _ => List()
+ }
+
+ /** The full parent types, including all type arguments */
+ def parentsWithArgs(implicit ctx: Context): List[Type] = this match {
+ case tp: TypeProxy => tp.superType.parentsWithArgs
+ case _ => List()
+ }
+
+ /** The first parent of this type, AnyRef if list of parents is empty */
+ def firstParent(implicit ctx: Context): TypeRef = parents match {
+ case p :: _ => p
+ case _ => defn.AnyType
+ }
+
+ /** the self type of the underlying classtype */
+ def givenSelfType(implicit ctx: Context): Type = this match {
+ case tp: RefinedType => tp.wrapIfMember(tp.parent.givenSelfType)
+ case tp: ThisType => tp.tref.givenSelfType
+ case tp: TypeProxy => tp.superType.givenSelfType
+ case _ => NoType
+ }
+
+ /** The parameter types of a PolyType or MethodType, Empty list for others */
+ final def paramTypess(implicit ctx: Context): List[List[Type]] = this match {
+ case mt: MethodType => mt.paramTypes :: mt.resultType.paramTypess
+ case pt: PolyType => pt.resultType.paramTypess
+ case _ => Nil
+ }
+
+ /** The parameter names of a PolyType or MethodType, Empty list for others */
+ final def paramNamess(implicit ctx: Context): List[List[TermName]] = this match {
+ case mt: MethodType => mt.paramNames :: mt.resultType.paramNamess
+ case pt: PolyType => pt.resultType.paramNamess
+ case _ => Nil
+ }
+
+
+ /** The parameter types in the first parameter section of a generic type or MethodType, Empty list for others */
+ final def firstParamTypes(implicit ctx: Context): List[Type] = this match {
+ case mt: MethodType => mt.paramTypes
+ case pt: PolyType => pt.resultType.firstParamTypes
+ case _ => Nil
+ }
+
+ /** Is this either not a method at all, or a parameterless method? */
+ final def isParameterless(implicit ctx: Context): Boolean = this match {
+ case mt: MethodType => false
+ case pt: PolyType => pt.resultType.isParameterless
+ case _ => true
+ }
+
+ /** The resultType of a PolyType, MethodType, or ExprType, the type itself for others */
+ def resultType(implicit ctx: Context): Type = this
+
+ /** The final result type of a PolyType, MethodType, or ExprType, after skipping
+ * all parameter sections, the type itself for all others.
+ */
+ def finalResultType(implicit ctx: Context): Type = resultType match {
+ case mt: MethodType => mt.resultType.finalResultType
+ case pt: PolyType => pt.resultType.finalResultType
+ case _ => resultType
+ }
+
+ /** This type seen as a TypeBounds */
+ final def bounds(implicit ctx: Context): TypeBounds = this match {
+ case tp: TypeBounds => tp
+ case ci: ClassInfo => TypeAlias(ci.typeRef)
+ case wc: WildcardType =>
+ wc.optBounds match {
+ case bounds: TypeBounds => bounds
+ case NoType => TypeBounds.empty
+ }
+ case _ => TypeAlias(this)
+ }
+
+ /** The type parameter with given `name`. This tries first `decls`
+ * in order not to provoke a cycle by forcing the info. If that yields
+ * no symbol it tries `member` as an alternative.
+ */
+ def typeParamNamed(name: TypeName)(implicit ctx: Context): Symbol =
+ classSymbol.unforcedDecls.lookup(name) orElse member(name).symbol
+
+ /** If this is a prototype with some ignored component, reveal one more
+ * layer of it. Otherwise the type itself.
+ */
+ def deepenProto(implicit ctx: Context): Type = this
+
+// ----- Substitutions -----------------------------------------------------
+
+ /** Substitute all types that refer in their symbol attribute to
+ * one of the symbols in `from` by the corresponding types in `to`.
+ */
+ final def subst(from: List[Symbol], to: List[Type])(implicit ctx: Context): Type =
+ if (from.isEmpty) this
+ else {
+ val from1 = from.tail
+ if (from1.isEmpty) ctx.subst1(this, from.head, to.head, null)
+ else {
+ val from2 = from1.tail
+ if (from2.isEmpty) ctx.subst2(this, from.head, to.head, from1.head, to.tail.head, null)
+ else ctx.subst(this, from, to, null)
+ }
+ }
+
+ /** Same as `subst` but follows aliases as a fallback. When faced with a reference
+ * to an alias type, where normal substitution does not yield a new type, the
+ * substitution is instead applied to the alias. If that yields a new type,
+ * this type is returned, otherwise the original type (not the alias) is returned.
+ * A use case for this method is if one wants to substitute the type parameters
+ * of a class and also wants to substitute any parameter accessors that alias
+ * the type parameters.
+ */
+ final def substDealias(from: List[Symbol], to: List[Type])(implicit ctx: Context): Type =
+ ctx.substDealias(this, from, to, null)
+
+ /** Substitute all types of the form `PolyParam(from, N)` by
+ * `PolyParam(to, N)`.
+ */
+ final def subst(from: BindingType, to: BindingType)(implicit ctx: Context): Type =
+ ctx.subst(this, from, to, null)
+
+ /** Substitute all occurrences of `This(cls)` by `tp` */
+ final def substThis(cls: ClassSymbol, tp: Type)(implicit ctx: Context): Type =
+ ctx.substThis(this, cls, tp, null)
+
+ /** As substThis, but only is class is a static owner (i.e. a globally accessible object) */
+ final def substThisUnlessStatic(cls: ClassSymbol, tp: Type)(implicit ctx: Context): Type =
+ if (cls.isStaticOwner) this else ctx.substThis(this, cls, tp, null)
+
+ /** Substitute all occurrences of `RecThis(binder)` by `tp` */
+ final def substRecThis(binder: RecType, tp: Type)(implicit ctx: Context): Type =
+ ctx.substRecThis(this, binder, tp, null)
+
+ /** Substitute a bound type by some other type */
+ final def substParam(from: ParamType, to: Type)(implicit ctx: Context): Type =
+ ctx.substParam(this, from, to, null)
+
+ /** Substitute bound types by some other types */
+ final def substParams(from: BindingType, to: List[Type])(implicit ctx: Context): Type =
+ ctx.substParams(this, from, to, null)
+
+ /** Substitute all occurrences of symbols in `from` by references to corresponding symbols in `to`
+ */
+ final def substSym(from: List[Symbol], to: List[Symbol])(implicit ctx: Context): Type =
+ ctx.substSym(this, from, to, null)
+
+// ----- misc -----------------------------------------------------------
+
+ /** Turn type into a function type.
+ * @pre this is a non-dependent method type.
+ * @param dropLast The number of trailing parameters that should be dropped
+ * when forming the function type.
+ */
+ def toFunctionType(dropLast: Int = 0)(implicit ctx: Context): Type = this match {
+ case mt @ MethodType(_, formals) if !mt.isDependent || ctx.mode.is(Mode.AllowDependentFunctions) =>
+ val formals1 = if (dropLast == 0) formals else formals dropRight dropLast
+ defn.FunctionOf(
+ formals1 mapConserve (_.underlyingIfRepeated(mt.isJava)), mt.resultType)
+ }
+
+ /** The signature of this type. This is by default NotAMethod,
+ * but is overridden for PolyTypes, MethodTypes, and TermRefWithSignature types.
+ * (the reason why we deviate from the "final-method-with-pattern-match-in-base-class"
+ * pattern is that method signatures use caching, so encapsulation
+ * is improved using an OO scheme).
+ */
+ def signature(implicit ctx: Context): Signature = Signature.NotAMethod
+
+ /** Convert to text */
+ def toText(printer: Printer): Text = printer.toText(this)
+
+ /** Utility method to show the underlying type of a TypeProxy chain together
+ * with the proxy type itself.
+ */
+ def showWithUnderlying(n: Int = 1)(implicit ctx: Context): String = this match {
+ case tp: TypeProxy if n > 0 => s"$show with underlying ${tp.underlying.showWithUnderlying(n - 1)}"
+ case _ => show
+ }
+
+ /** A simplified version of this type which is equivalent wrt =:= to this type.
+ * This applies a typemap to the type which (as all typemaps) follows type
+ * variable instances and reduces typerefs over refined types. It also
+ * re-evaluates all occurrences of And/OrType with &/| because
+ * what was a union or intersection of type variables might be a simpler type
+ * after the type variables are instantiated. Finally, it
+ * maps poly params in the current constraint set back to their type vars.
+ */
+ def simplified(implicit ctx: Context) = ctx.simplify(this, null)
+
+ /** customized hash code of this type.
+ * NotCached for uncached types. Cached types
+ * compute hash and use it as the type's hashCode.
+ */
+ def hash: Int
+ } // end Type
+
+// ----- Type categories ----------------------------------------------
+
+ /** A marker trait for cached types */
+ trait CachedType extends Type
+
+ /** A marker trait for type proxies.
+ * Each implementation is expected to redefine the `underlying` method.
+ */
+ abstract class TypeProxy extends Type {
+
+ /** The type to which this proxy forwards operations. */
+ def underlying(implicit ctx: Context): Type
+
+ /** The closest supertype of this type. This is the same as `underlying`,
+ * except for TypeRefs where the upper bound is returned, and HKApplys,
+ * where the upper bound of the constructor is re-applied to the arguments.
+ */
+ def superType(implicit ctx: Context): Type = underlying
+ }
+
+ // Every type has to inherit one of the following four abstract type classes.,
+ // which determine whether the type is cached, and whether
+ // it is a proxy of some other type. The duplication in their methods
+ // is for efficiency.
+
+ /** Instances of this class are cached and are not proxies. */
+ abstract class CachedGroundType extends Type with CachedType {
+ private[this] var myHash = HashUnknown
+ final def hash = {
+ if (myHash == HashUnknown) {
+ myHash = computeHash
+ assert(myHash != HashUnknown)
+ }
+ myHash
+ }
+ override final def hashCode =
+ if (hash == NotCached) System.identityHashCode(this) else hash
+ def computeHash: Int
+ }
+
+ /** Instances of this class are cached and are proxies. */
+ abstract class CachedProxyType extends TypeProxy with CachedType {
+ protected[this] var myHash = HashUnknown
+ final def hash = {
+ if (myHash == HashUnknown) {
+ myHash = computeHash
+ assert(myHash != HashUnknown)
+ }
+ myHash
+ }
+ override final def hashCode =
+ if (hash == NotCached) System.identityHashCode(this) else hash
+ def computeHash: Int
+ }
+
+ /** Instances of this class are uncached and are not proxies. */
+ abstract class UncachedGroundType extends Type {
+ final def hash = NotCached
+ if (monitored) {
+ record(s"uncachable")
+ record(s"uncachable: $getClass")
+ }
+ }
+
+ /** Instances of this class are uncached and are proxies. */
+ abstract class UncachedProxyType extends TypeProxy {
+ final def hash = NotCached
+ if (monitored) {
+ record(s"uncachable")
+ record(s"uncachable: $getClass")
+ }
+ }
+
+ /** A marker trait for types that apply only to type symbols */
+ trait TypeType extends Type
+
+ /** A marker trait for types that apply only to term symbols or that
+ * represent higher-kinded types.
+ */
+ trait TermType extends Type
+
+ /** A marker trait for types that can be types of values or prototypes of value types */
+ trait ValueTypeOrProto extends TermType
+
+ /** A marker trait for types that can be types of values or that are higher-kinded */
+ trait ValueType extends ValueTypeOrProto
+
+ /** A marker trait for types that are guaranteed to contain only a
+ * single non-null value (they might contain null in addition).
+ */
+ trait SingletonType extends TypeProxy with ValueType {
+ def isOverloaded(implicit ctx: Context) = false
+ }
+
+ /** A marker trait for types that bind other types that refer to them.
+ * Instances are: PolyType, MethodType, RefinedType.
+ */
+ trait BindingType extends Type
+
+ /** A trait for proto-types, used as expected types in typer */
+ trait ProtoType extends Type {
+ def isMatchedBy(tp: Type)(implicit ctx: Context): Boolean
+ def fold[T](x: T, ta: TypeAccumulator[T])(implicit ctx: Context): T
+ def map(tm: TypeMap)(implicit ctx: Context): ProtoType
+ }
+
+ /** Implementations of this trait cache the results of `narrow`. */
+ trait NarrowCached extends Type {
+ private var myNarrow: TermRef = null
+ override def narrow(implicit ctx: Context): TermRef = {
+ if (myNarrow eq null) myNarrow = super.narrow
+ myNarrow
+ }
+ }
+
+// --- NamedTypes ------------------------------------------------------------------
+
+ /** A NamedType of the form Prefix # name */
+ abstract class NamedType extends CachedProxyType with ValueType {
+
+ val prefix: Type
+ val name: Name
+
+ type ThisType >: this.type <: NamedType
+
+ assert(prefix.isValueType || (prefix eq NoPrefix), s"invalid prefix $prefix")
+
+ private[this] var lastDenotation: Denotation = _
+ private[this] var lastSymbol: Symbol = _
+ private[this] var checkedPeriod = Nowhere
+
+ // Invariants:
+ // (1) checkedPeriod != Nowhere => lastDenotation != null
+ // (2) lastDenotation != null => lastSymbol != null
+
+ /** There is a denotation computed which is valid (somewhere in) the
+ * current run.
+ */
+ def denotationIsCurrent(implicit ctx: Context) =
+ lastDenotation != null && lastDenotation.validFor.runId == ctx.runId
+
+ /** The denotation is current, its symbol, otherwise NoDenotation.
+ *
+ * Note: This operation does not force the denotation, and is therefore
+ * timing dependent. It should only be used if the outcome of the
+ * essential computation does not depend on the symbol being present or not.
+ * It's currently used to take an optimized path in substituters and
+ * type accumulators, as well as to be safe in diagnostic printing.
+ * Normally, it's better to use `symbol`, not `currentSymbol`.
+ */
+ def currentSymbol(implicit ctx: Context) =
+ if (denotationIsCurrent) symbol else NoSymbol
+
+ /** The denotation currently denoted by this type */
+ final def denot(implicit ctx: Context): Denotation = {
+ val now = ctx.period
+ if (checkedPeriod == now) lastDenotation else denotAt(now)
+ }
+
+ /** A first fall back to do a somewhat more expensive calculation in case the first
+ * attempt in `denot` does not yield a denotation.
+ */
+ private def denotAt(now: Period)(implicit ctx: Context): Denotation = {
+ val d = lastDenotation
+ if (d != null && (d.validFor contains now)) {
+ checkedPeriod = now
+ d
+ }
+ else computeDenot
+ }
+
+ /** Hook for adding debug check code when denotations are assigned */
+ final def checkDenot()(implicit ctx: Context) = {}
+
+ /** A second fallback to recompute the denotation if necessary */
+ private def computeDenot(implicit ctx: Context): Denotation = {
+ val savedEphemeral = ctx.typerState.ephemeral
+ ctx.typerState.ephemeral = false
+ try {
+ val d = lastDenotation match {
+ case null =>
+ val sym = lastSymbol
+ if (sym == null) loadDenot else denotOfSym(sym)
+ case d: SymDenotation =>
+ if (this.isInstanceOf[WithFixedSym]) d.current
+ else if (d.validFor.runId == ctx.runId || ctx.stillValid(d))
+ if (d.exists && prefix.isTightPrefix(d.owner) || d.isConstructor) d.current
+ else recomputeMember(d) // symbol could have been overridden, recompute membership
+ else {
+ val newd = loadDenot
+ if (newd.exists) newd else d.staleSymbolError
+ }
+ case d =>
+ if (d.validFor.runId != ctx.period.runId) loadDenot
+ else d.current
+ }
+ if (ctx.typerState.ephemeral) record("ephemeral cache miss: loadDenot")
+ else if (d.exists) {
+ // Avoid storing NoDenotations in the cache - we will not be able to recover from
+ // them. The situation might arise that a type has NoDenotation in some later
+ // phase but a defined denotation earlier (e.g. a TypeRef to an abstract type
+ // is undefined after erasure.) We need to be able to do time travel back and
+ // forth also in these cases.
+
+ // Don't use setDenot here; double binding checks can give spurious failures after erasure
+ lastDenotation = d
+ checkDenot()
+ lastSymbol = d.symbol
+ checkedPeriod = ctx.period
+ }
+ d
+ }
+ finally ctx.typerState.ephemeral |= savedEphemeral
+ }
+
+ /** A member of `prefix` (disambiguated by `d.signature`) or, if none was found, `d.current`. */
+ private def recomputeMember(d: SymDenotation)(implicit ctx: Context): Denotation =
+ asMemberOf(prefix) match {
+ case NoDenotation => d.current
+ case newd: SingleDenotation => newd
+ case newd =>
+ newd.atSignature(d.signature) match {
+ case newd1: SingleDenotation if newd1.exists => newd1
+ case _ => d.current
+ }
+ }
+
+ private def denotOfSym(sym: Symbol)(implicit ctx: Context): Denotation = {
+ val d = sym.denot
+ val owner = d.owner
+ if (owner.isTerm) d else d.asSeenFrom(prefix)
+ }
+
+ private def checkSymAssign(sym: Symbol)(implicit ctx: Context) = {
+ def selfTypeOf(sym: Symbol) = sym.owner.info match {
+ case info: ClassInfo => info.givenSelfType
+ case _ => NoType
+ }
+ assert(
+ (lastSymbol eq sym) ||
+ (lastSymbol eq null) || {
+ val lastDefRunId = lastDenotation match {
+ case d: SymDenotation => d.validFor.runId
+ case _ => lastSymbol.defRunId
+ }
+ (lastDefRunId != sym.defRunId) ||
+ (lastDefRunId == NoRunId)
+ } ||
+ (lastSymbol.infoOrCompleter == ErrorType ||
+ sym.owner != lastSymbol.owner &&
+ (sym.owner.derivesFrom(lastSymbol.owner) ||
+ selfTypeOf(sym).derivesFrom(lastSymbol.owner) ||
+ selfTypeOf(lastSymbol).derivesFrom(sym.owner))),
+ i"""data race? overwriting symbol of type $this,
+ |long form = $toString of class $getClass,
+ |last sym id = ${lastSymbol.id}, new sym id = ${sym.id},
+ |last owner = ${lastSymbol.owner}, new owner = ${sym.owner},
+ |period = ${ctx.phase} at run ${ctx.runId}""")
+ }
+
+ protected def sig: Signature = Signature.NotAMethod
+
+ private[dotc] def withDenot(denot: Denotation)(implicit ctx: Context): ThisType =
+ if (sig != denot.signature)
+ withSig(denot.signature).withDenot(denot).asInstanceOf[ThisType]
+ else {
+ setDenot(denot)
+ this
+ }
+
+ private[dotc] final def setDenot(denot: Denotation)(implicit ctx: Context): Unit = {
+ if (Config.checkNoDoubleBindings)
+ if (ctx.settings.YnoDoubleBindings.value)
+ checkSymAssign(denot.symbol)
+
+ // additional checks that intercept `denot` can be added here
+
+ lastDenotation = denot
+ checkDenot()
+ lastSymbol = denot.symbol
+ checkedPeriod = Nowhere
+ }
+
+ private[dotc] def withSym(sym: Symbol, signature: Signature)(implicit ctx: Context): ThisType =
+ if (sig != signature)
+ withSig(signature).withSym(sym, signature).asInstanceOf[ThisType]
+ else {
+ setSym(sym)
+ this
+ }
+
+ private[dotc] final def setSym(sym: Symbol)(implicit ctx: Context): Unit = {
+ if (Config.checkNoDoubleBindings)
+ if (ctx.settings.YnoDoubleBindings.value)
+ checkSymAssign(sym)
+ uncheckedSetSym(sym)
+ }
+
+ private[dotc] final def uncheckedSetSym(sym: Symbol): Unit = {
+ lastDenotation = null
+ lastSymbol = sym
+ checkedPeriod = Nowhere
+ }
+
+ private def withSig(sig: Signature)(implicit ctx: Context): NamedType =
+ TermRef.withSig(prefix, name.asTermName, sig)
+
+ protected def loadDenot(implicit ctx: Context): Denotation = {
+ val d = asMemberOf(prefix)
+ if (d.exists || ctx.phaseId == FirstPhaseId || !lastDenotation.isInstanceOf[SymDenotation])
+ d
+ else { // name has changed; try load in earlier phase and make current
+ val d = loadDenot(ctx.withPhase(ctx.phaseId - 1)).current
+ if (d.exists) d
+ else throw new Error(s"failure to reload $this of class $getClass")
+ }
+ }
+
+ protected def asMemberOf(prefix: Type)(implicit ctx: Context): Denotation =
+ if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed)
+ else prefix.member(name)
+
+
+ /** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type
+ * to an (unbounded) wildcard type.
+ *
+ * (2) Reduce a type-ref `T { X = U; ... } # X` to `U`
+ * provided `U` does not refer with a RecThis to the
+ * refinement type `T { X = U; ... }`
+ */
+ def reduceProjection(implicit ctx: Context): Type = {
+ val reduced = prefix.lookupRefined(name)
+ if (reduced.exists) reduced else this
+ }
+
+ def symbol(implicit ctx: Context): Symbol = {
+ val now = ctx.period
+ if (checkedPeriod == now ||
+ lastDenotation == null && lastSymbol != null) lastSymbol
+ else denot.symbol
+ }
+
+ /** Retrieves currently valid symbol without necessarily updating denotation.
+ * Assumes that symbols do not change between periods in the same run.
+ * Used to get the class underlying a ThisType.
+ */
+ private[Types] def stableInRunSymbol(implicit ctx: Context): Symbol =
+ if (checkedPeriod.runId == ctx.runId) lastSymbol
+ else symbol
+
+ def info(implicit ctx: Context): Type = denot.info
+
+ def isType = isInstanceOf[TypeRef]
+ def isTerm = isInstanceOf[TermRef]
+
+ /** Guard against cycles that can arise if given `op`
+ * follows info. The problematic cases are a type alias to itself or
+ * bounded by itself or a val typed as itself:
+ *
+ * type T <: T
+ * val x: x.type
+ *
+ * These are errors but we have to make sure that operations do
+ * not loop before the error is detected.
+ */
+ final def controlled[T](op: => T)(implicit ctx: Context): T = try {
+ ctx.underlyingRecursions += 1
+ if (ctx.underlyingRecursions < Config.LogPendingUnderlyingThreshold)
+ op
+ else if (ctx.pendingUnderlying contains this)
+ throw CyclicReference(symbol)
+ else
+ try {
+ ctx.pendingUnderlying += this
+ op
+ } finally {
+ ctx.pendingUnderlying -= this
+ }
+ } finally {
+ ctx.underlyingRecursions -= 1
+ }
+
+ /** A selection of the same kind, but with potentially a different prefix.
+ * The following normalizations are performed for type selections T#A:
+ *
+ * T#A --> B if A is bound to an alias `= B` in T
+ *
+ * If Config.splitProjections is set:
+ *
+ * (S & T)#A --> S#A if T does not have a member named A
+ * --> T#A if S does not have a member named A
+ * --> S#A & T#A otherwise
+ * (S | T)#A --> S#A | T#A
+ */
+ def derivedSelect(prefix: Type)(implicit ctx: Context): Type =
+ if (prefix eq this.prefix) this
+ else if (isType) {
+ val res = prefix.lookupRefined(name)
+ if (res.exists) res
+ else if (Config.splitProjections)
+ prefix match {
+ case prefix: AndType =>
+ def isMissing(tp: Type) = tp match {
+ case tp: TypeRef => !tp.info.exists
+ case _ => false
+ }
+ val derived1 = derivedSelect(prefix.tp1)
+ val derived2 = derivedSelect(prefix.tp2)
+ return (
+ if (isMissing(derived1)) derived2
+ else if (isMissing(derived2)) derived1
+ else prefix.derivedAndType(derived1, derived2))
+ case prefix: OrType =>
+ val derived1 = derivedSelect(prefix.tp1)
+ val derived2 = derivedSelect(prefix.tp2)
+ return prefix.derivedOrType(derived1, derived2)
+ case _ =>
+ newLikeThis(prefix)
+ }
+ else newLikeThis(prefix)
+ }
+ else newLikeThis(prefix)
+
+ /** Create a NamedType of the same kind as this type, but with a new prefix.
+ */
+ def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType =
+ NamedType(prefix, name)
+
+ /** Create a NamedType of the same kind as this type, but with a "inherited name".
+ * This is necessary to in situations like the following:
+ *
+ * class B { def m: T1 }
+ * class C extends B { private def m: T2; ... C.m }
+ * object C extends C
+ * object X { ... C.m }
+ *
+ * The two references of C.m in class C and object X refer to different
+ * definitions: The one in C refers to C#m whereas the one in X refers to B#m.
+ * But the type C.m must have only one denotation, so it can't refer to two
+ * members depending on context.
+ *
+ * In situations like this, the reference in X would get the type
+ * `<C.m>.shadowed` to make clear that we mean the inherited member, not
+ * the private one.
+ *
+ * Note: An alternative, possibly more robust scheme would be to give
+ * private members special names. A private definition would have a special
+ * name (say m' in the example above), but would be entered in its enclosing
+ * under both private and public names, so it could still be found by looking up
+ * the public name.
+ */
+ final def shadowed(implicit ctx: Context): NamedType =
+ NamedType(prefix, name.shadowedName)
+
+ override def equals(that: Any) = that match {
+ case that: NamedType =>
+ this.name == that.name &&
+ this.prefix == that.prefix &&
+ !that.isInstanceOf[TermRefWithSignature] &&
+ !that.isInstanceOf[WithFixedSym]
+ case _ =>
+ false
+ }
+
+ /* A version of toString which also prints aliases. Can be used for debugging
+ override def toString =
+ if (isTerm) s"TermRef($prefix, $name)"
+ else s"TypeRef($prefix, $name)${
+ if (lastDenotation != null && lastDenotation.infoOrCompleter.isAlias)
+ s"@@@ ${lastDenotation.infoOrCompleter.asInstanceOf[TypeAlias].hi}"
+ else ""}"
+ */
+ }
+
+ abstract case class TermRef(override val prefix: Type, name: TermName) extends NamedType with SingletonType {
+
+ type ThisType = TermRef
+
+ //assert(name.toString != "<local Coder>")
+ override def underlying(implicit ctx: Context): Type = {
+ val d = denot
+ if (d.isOverloaded) NoType else d.info
+ }
+
+ override def signature(implicit ctx: Context): Signature = denot.signature
+
+ override def isOverloaded(implicit ctx: Context) = denot.isOverloaded
+
+ private def rewrap(sd: SingleDenotation)(implicit ctx: Context) =
+ TermRef.withSigAndDenot(prefix, name, sd.signature, sd)
+
+ def alternatives(implicit ctx: Context): List[TermRef] =
+ denot.alternatives map rewrap
+
+ def altsWith(p: Symbol => Boolean)(implicit ctx: Context): List[TermRef] =
+ denot.altsWith(p) map rewrap
+ }
+
+ abstract case class TypeRef(override val prefix: Type, name: TypeName) extends NamedType {
+
+ type ThisType = TypeRef
+
+ override def underlying(implicit ctx: Context): Type = info
+
+ override def superType(implicit ctx: Context): Type = info match {
+ case TypeBounds(_, hi) => hi
+ case _ => info
+ }
+ }
+
+ final class TermRefWithSignature(prefix: Type, name: TermName, override val sig: Signature) extends TermRef(prefix, name) {
+ assert(prefix ne NoPrefix)
+ override def signature(implicit ctx: Context) = sig
+ override def loadDenot(implicit ctx: Context): Denotation = {
+ val d = super.loadDenot
+ if (sig eq Signature.OverloadedSignature) d
+ else d.atSignature(sig).checkUnique
+ }
+
+ override def newLikeThis(prefix: Type)(implicit ctx: Context): TermRef = {
+ val candidate = TermRef.withSig(prefix, name, sig)
+ if (symbol.exists && !candidate.symbol.exists) { // recompute from previous symbol
+ val ownSym = symbol
+ val newd = asMemberOf(prefix)
+ candidate.withDenot(newd.suchThat(_.signature == ownSym.signature))
+ }
+ else candidate
+ }
+
+ override def equals(that: Any) = that match {
+ case that: TermRefWithSignature =>
+ this.prefix == that.prefix &&
+ this.name == that.name &&
+ this.sig == that.sig
+ case _ =>
+ false
+ }
+ override def computeHash = doHash((name, sig), prefix)
+ override def toString = super.toString ++ s"/withSig($sig)"
+ }
+
+ trait WithFixedSym extends NamedType {
+ def fixedSym: Symbol
+ assert(fixedSym ne NoSymbol)
+ uncheckedSetSym(fixedSym)
+
+ override def withDenot(denot: Denotation)(implicit ctx: Context): ThisType = {
+ assert(denot.symbol eq fixedSym)
+ setDenot(denot)
+ this
+ }
+
+ override def withSym(sym: Symbol, signature: Signature)(implicit ctx: Context): ThisType =
+ unsupported("withSym")
+
+ override def newLikeThis(prefix: Type)(implicit ctx: Context): NamedType =
+ NamedType.withFixedSym(prefix, fixedSym)
+
+ override def equals(that: Any) = that match {
+ case that: WithFixedSym => this.prefix == that.prefix && (this.fixedSym eq that.fixedSym)
+ case _ => false
+ }
+ override def computeHash = doHash(fixedSym, prefix)
+ }
+
+ final class CachedTermRef(prefix: Type, name: TermName, hc: Int) extends TermRef(prefix, name) {
+ assert(prefix ne NoPrefix)
+ myHash = hc
+ override def computeHash = unsupported("computeHash")
+ }
+
+ final class CachedTypeRef(prefix: Type, name: TypeName, hc: Int) extends TypeRef(prefix, name) {
+ assert(prefix ne NoPrefix)
+ myHash = hc
+ override def computeHash = unsupported("computeHash")
+ }
+
+ // Those classes are non final as Linker extends them.
+ class TermRefWithFixedSym(prefix: Type, name: TermName, val fixedSym: TermSymbol) extends TermRef(prefix, name) with WithFixedSym
+ class TypeRefWithFixedSym(prefix: Type, name: TypeName, val fixedSym: TypeSymbol) extends TypeRef(prefix, name) with WithFixedSym
+
+ /** Assert current phase does not have erasure semantics */
+ private def assertUnerased()(implicit ctx: Context) =
+ if (Config.checkUnerased) assert(!ctx.phase.erasedTypes)
+
+ object NamedType {
+ def apply(prefix: Type, name: Name)(implicit ctx: Context) =
+ if (name.isTermName) TermRef.all(prefix, name.asTermName)
+ else TypeRef(prefix, name.asTypeName)
+ def apply(prefix: Type, name: Name, denot: Denotation)(implicit ctx: Context) =
+ if (name.isTermName) TermRef(prefix, name.asTermName, denot)
+ else TypeRef(prefix, name.asTypeName, denot)
+ def withFixedSym(prefix: Type, sym: Symbol)(implicit ctx: Context) =
+ if (sym.isType) TypeRef.withFixedSym(prefix, sym.name.asTypeName, sym.asType)
+ else TermRef.withFixedSym(prefix, sym.name.asTermName, sym.asTerm)
+ def withSymAndName(prefix: Type, sym: Symbol, name: Name)(implicit ctx: Context): NamedType =
+ if (sym.isType) TypeRef.withSymAndName(prefix, sym.asType, name.asTypeName)
+ else TermRef.withSymAndName(prefix, sym.asTerm, name.asTermName)
+ }
+
+ object TermRef {
+
+ private def symbolicRefs(implicit ctx: Context) = ctx.phase.symbolicRefs
+
+ /** Create term ref with given name, without specifying a signature.
+ * Its meaning is the (potentially multi-) denotation of the member(s)
+ * of prefix with given name.
+ */
+ def all(prefix: Type, name: TermName)(implicit ctx: Context): TermRef = {
+ ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TermRef]
+ }
+
+ /** Create term ref referring to given symbol, taking the signature
+ * from the symbol if it is completed, or creating a term ref without
+ * signature, if symbol is not yet completed.
+ */
+ def apply(prefix: Type, sym: TermSymbol)(implicit ctx: Context): TermRef =
+ withSymAndName(prefix, sym, sym.name)
+
+ /** Create term ref to given initial denotation, taking the signature
+ * from the denotation if it is completed, or creating a term ref without
+ * signature, if denotation is not yet completed.
+ */
+ def apply(prefix: Type, name: TermName, denot: Denotation)(implicit ctx: Context): TermRef = {
+ if ((prefix eq NoPrefix) || denot.symbol.isFresh || symbolicRefs)
+ apply(prefix, denot.symbol.asTerm)
+ else denot match {
+ case denot: SymDenotation if denot.isCompleted => withSig(prefix, name, denot.signature)
+ case _ => all(prefix, name)
+ }
+ } withDenot denot
+
+ /** Create a non-member term ref (which cannot be reloaded using `member`),
+ * with given prefix, name, and signature
+ */
+ def withFixedSym(prefix: Type, name: TermName, sym: TermSymbol)(implicit ctx: Context): TermRef =
+ unique(new TermRefWithFixedSym(prefix, name, sym))
+
+ /** Create a term ref referring to given symbol with given name, taking the signature
+ * from the symbol if it is completed, or creating a term ref without
+ * signature, if symbol is not yet completed. This is very similar to TermRef(Type, Symbol),
+ * except for two differences:
+ * (1) The symbol might not yet have a denotation, so the name needs to be given explicitly.
+ * (2) The name in the term ref need not be the same as the name of the Symbol.
+ */
+ def withSymAndName(prefix: Type, sym: TermSymbol, name: TermName)(implicit ctx: Context): TermRef =
+ if ((prefix eq NoPrefix) || sym.isFresh || symbolicRefs)
+ withFixedSym(prefix, name, sym)
+ else if (sym.defRunId != NoRunId && sym.isCompleted)
+ withSig(prefix, name, sym.signature) withSym (sym, sym.signature)
+ // Linker note:
+ // this is problematic, as withSig method could return a hash-consed refference
+ // that could have symbol already set making withSym trigger a double-binding error
+ // ./tests/run/absoverride.scala demonstates this
+ else
+ all(prefix, name) withSym (sym, Signature.NotAMethod)
+
+ /** Create a term ref to given symbol, taking the signature from the symbol
+ * (which must be completed).
+ */
+ def withSig(prefix: Type, sym: TermSymbol)(implicit ctx: Context): TermRef =
+ if ((prefix eq NoPrefix) || sym.isFresh || symbolicRefs) withFixedSym(prefix, sym.name, sym)
+ else withSig(prefix, sym.name, sym.signature).withSym(sym, sym.signature)
+
+ /** Create a term ref with given prefix, name and signature */
+ def withSig(prefix: Type, name: TermName, sig: Signature)(implicit ctx: Context): TermRef =
+ unique(new TermRefWithSignature(prefix, name, sig))
+
+ /** Create a term ref with given prefix, name, signature, and initial denotation */
+ def withSigAndDenot(prefix: Type, name: TermName, sig: Signature, denot: Denotation)(implicit ctx: Context): TermRef = {
+ if ((prefix eq NoPrefix) || denot.symbol.isFresh || symbolicRefs)
+ withFixedSym(prefix, denot.symbol.asTerm.name, denot.symbol.asTerm)
+ else
+ withSig(prefix, name, sig)
+ } withDenot denot
+ }
+
+ object TypeRef {
+ /** Create type ref with given prefix and name */
+ def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef =
+ ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TypeRef]
+
+ /** Create type ref to given symbol */
+ def apply(prefix: Type, sym: TypeSymbol)(implicit ctx: Context): TypeRef =
+ withSymAndName(prefix, sym, sym.name)
+
+ /** Create a non-member type ref (which cannot be reloaded using `member`),
+ * with given prefix, name, and symbol.
+ */
+ def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef =
+ unique(new TypeRefWithFixedSym(prefix, name, sym))
+
+ /** Create a type ref referring to given symbol with given name.
+ * This is very similar to TypeRef(Type, Symbol),
+ * except for two differences:
+ * (1) The symbol might not yet have a denotation, so the name needs to be given explicitly.
+ * (2) The name in the type ref need not be the same as the name of the Symbol.
+ */
+ def withSymAndName(prefix: Type, sym: TypeSymbol, name: TypeName)(implicit ctx: Context): TypeRef =
+ if ((prefix eq NoPrefix) || sym.isFresh) withFixedSym(prefix, name, sym)
+ else apply(prefix, name).withSym(sym, Signature.NotAMethod)
+
+ /** Create a type ref with given name and initial denotation */
+ def apply(prefix: Type, name: TypeName, denot: Denotation)(implicit ctx: Context): TypeRef = {
+ if ((prefix eq NoPrefix) || denot.symbol.isFresh) apply(prefix, denot.symbol.asType)
+ else apply(prefix, name)
+ } withDenot denot
+ }
+
+ // --- Other SingletonTypes: ThisType/SuperType/ConstantType ---------------------------
+
+ /** The type cls.this
+ * @param tref A type ref which indicates the class `cls`.
+ * Note: we do not pass a class symbol directly, because symbols
+ * do not survive runs whereas typerefs do.
+ */
+ abstract case class ThisType(tref: TypeRef) extends CachedProxyType with SingletonType {
+ def cls(implicit ctx: Context): ClassSymbol = tref.stableInRunSymbol.asClass
+ override def underlying(implicit ctx: Context): Type =
+ if (ctx.erasedTypes) tref else cls.classInfo.selfType
+ override def computeHash = doHash(tref)
+ }
+
+ final class CachedThisType(tref: TypeRef) extends ThisType(tref)
+
+ object ThisType {
+ /** Normally one should use ClassSymbol#thisType instead */
+ def raw(tref: TypeRef)(implicit ctx: Context) =
+ unique(new CachedThisType(tref))
+ }
+
+ /** The type of a super reference cls.super where
+ * `thistpe` is cls.this and `supertpe` is the type of the value referenced
+ * by `super`.
+ */
+ abstract case class SuperType(thistpe: Type, supertpe: Type) extends CachedProxyType with SingletonType {
+ override def underlying(implicit ctx: Context) = supertpe
+ def derivedSuperType(thistpe: Type, supertpe: Type)(implicit ctx: Context) =
+ if ((thistpe eq this.thistpe) && (supertpe eq this.supertpe)) this
+ else SuperType(thistpe, supertpe)
+ override def computeHash = doHash(thistpe, supertpe)
+ }
+
+ final class CachedSuperType(thistpe: Type, supertpe: Type) extends SuperType(thistpe, supertpe)
+
+ object SuperType {
+ def apply(thistpe: Type, supertpe: Type)(implicit ctx: Context): Type = {
+ assert(thistpe != NoPrefix)
+ unique(new CachedSuperType(thistpe, supertpe))
+ }
+ }
+
+ /** A constant type with single `value`. */
+ abstract case class ConstantType(value: Constant) extends CachedProxyType with SingletonType {
+ override def underlying(implicit ctx: Context) = value.tpe
+ override def computeHash = doHash(value)
+ }
+
+ final class CachedConstantType(value: Constant) extends ConstantType(value)
+
+ object ConstantType {
+ def apply(value: Constant)(implicit ctx: Context) = {
+ assertUnerased()
+ unique(new CachedConstantType(value))
+ }
+ }
+
+ case class LazyRef(refFn: () => Type) extends UncachedProxyType with ValueType {
+ private var myRef: Type = null
+ private var computed = false
+ def ref = {
+ if (computed) assert(myRef != null)
+ else {
+ computed = true
+ myRef = refFn()
+ }
+ myRef
+ }
+ def evaluating = computed && myRef == null
+ override def underlying(implicit ctx: Context) = ref
+ override def toString = s"LazyRef($ref)"
+ override def equals(other: Any) = other match {
+ case other: LazyRef => this.ref.equals(other.ref)
+ case _ => false
+ }
+ override def hashCode = ref.hashCode + 37
+ }
+
+ // --- Refined Type and RecType ------------------------------------------------
+
+ abstract class RefinedOrRecType extends CachedProxyType with ValueType {
+ def parent: Type
+ }
+
+ /** A refined type parent { refinement }
+ * @param refinedName The name of the refinement declaration
+ * @param infoFn: A function that produces the info of the refinement declaration,
+ * given the refined type itself.
+ */
+ abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedOrRecType {
+
+ override def underlying(implicit ctx: Context) = parent
+
+ private def badInst =
+ throw new AssertionError(s"bad instantiation: $this")
+
+ def checkInst(implicit ctx: Context): this.type = this // debug hook
+
+ def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type =
+ if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this
+ else RefinedType(parent, refinedName, refinedInfo)
+
+ /** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */
+ def wrapIfMember(parent: Type)(implicit ctx: Context): Type =
+ if (parent.member(refinedName).exists) derivedRefinedType(parent, refinedName, refinedInfo)
+ else parent
+
+ override def equals(that: Any) = that match {
+ case that: RefinedType =>
+ this.parent == that.parent &&
+ this.refinedName == that.refinedName &&
+ this.refinedInfo == that.refinedInfo
+ case _ =>
+ false
+ }
+ override def computeHash = doHash(refinedName, refinedInfo, parent)
+ override def toString = s"RefinedType($parent, $refinedName, $refinedInfo)"
+ }
+
+ class CachedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type, hc: Int)
+ extends RefinedType(parent, refinedName, refinedInfo) {
+ myHash = hc
+ override def computeHash = unsupported("computeHash")
+ }
+
+ object RefinedType {
+ def make(parent: Type, names: List[Name], infos: List[Type])(implicit ctx: Context): Type =
+ if (names.isEmpty) parent
+ else make(RefinedType(parent, names.head, infos.head), names.tail, infos.tail)
+
+ def apply(parent: Type, name: Name, info: Type)(implicit ctx: Context): RefinedType = {
+ assert(!ctx.erasedTypes)
+ ctx.base.uniqueRefinedTypes.enterIfNew(parent, name, info).checkInst
+ }
+ }
+
+ class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType {
+
+ // See discussion in findMember#goRec why these vars are needed
+ private[Types] var opened: Boolean = false
+ private[Types] var openedTwice: Boolean = false
+
+ val parent = parentExp(this)
+
+ override def underlying(implicit ctx: Context): Type = parent
+
+ def derivedRecType(parent: Type)(implicit ctx: Context): RecType =
+ if (parent eq this.parent) this
+ else RecType(rt => parent.substRecThis(this, RecThis(rt)))
+
+ def rebind(parent: Type)(implicit ctx: Context): Type =
+ if (parent eq this.parent) this
+ else RecType.closeOver(rt => parent.substRecThis(this, RecThis(rt)))
+
+ override def equals(other: Any) = other match {
+ case other: RecType => other.parent == this.parent
+ case _ => false
+ }
+
+ def isReferredToBy(tp: Type)(implicit ctx: Context): Boolean = {
+ val refacc = new TypeAccumulator[Boolean] {
+ override def apply(x: Boolean, tp: Type) = x || {
+ tp match {
+ case tp: TypeRef => apply(x, tp.prefix)
+ case tp: RecThis => RecType.this eq tp.binder
+ case tp: LazyRef => true // To be safe, assume a reference exists
+ case _ => foldOver(x, tp)
+ }
+ }
+ }
+ refacc.apply(false, tp)
+ }
+
+ override def computeHash = doHash(parent)
+ override def toString = s"RecType($parent | $hashCode)"
+
+ private def checkInst(implicit ctx: Context): this.type = this // debug hook
+ }
+
+ object RecType {
+
+ /** Create a RecType, normalizing its contents. This means:
+ *
+ * 1. Nested Rec types on the type's spine are merged with the outer one.
+ * 2. Any refinement of the form `type T = z.T` on the spine of the type
+ * where `z` refers to the created rec-type is replaced by
+ * `type T`. This avoids infinite recursons later when we
+ * try to follow these references.
+ * TODO: Figure out how to guarantee absence of cycles
+ * of length > 1
+ */
+ def apply(parentExp: RecType => Type)(implicit ctx: Context): RecType = {
+ val rt = new RecType(parentExp)
+ def normalize(tp: Type): Type = tp.stripTypeVar match {
+ case tp: RecType =>
+ normalize(tp.parent.substRecThis(tp, RecThis(rt)))
+ case tp @ RefinedType(parent, rname, rinfo) =>
+ val rinfo1 = rinfo match {
+ case TypeAlias(TypeRef(RecThis(`rt`), `rname`)) => TypeBounds.empty
+ case _ => rinfo
+ }
+ tp.derivedRefinedType(normalize(parent), rname, rinfo1)
+ case tp =>
+ tp
+ }
+ unique(rt.derivedRecType(normalize(rt.parent))).checkInst
+ }
+ def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = {
+ val rt = this(parentExp)
+ if (rt.isReferredToBy(rt.parent)) rt else rt.parent
+ }
+ }
+
+ // --- AndType/OrType ---------------------------------------------------------------
+
+ trait AndOrType extends ValueType { // todo: check where we can simplify using AndOrType
+ def tp1: Type
+ def tp2: Type
+ def isAnd: Boolean
+ def derivedAndOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type // needed?
+ }
+
+ abstract case class AndType(tp1: Type, tp2: Type) extends CachedGroundType with AndOrType {
+
+ def isAnd = true
+
+ def derivedAndType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else AndType.make(tp1, tp2)
+
+ def derived_& (tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else tp1 & tp2
+
+ def derivedAndOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ derivedAndType(tp1, tp2)
+
+ override def computeHash = doHash(tp1, tp2)
+ }
+
+ final class CachedAndType(tp1: Type, tp2: Type) extends AndType(tp1, tp2)
+
+ object AndType {
+ def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ assert(tp1.isValueType && tp2.isValueType, i"$tp1 & $tp2 / " + s"$tp1 & $tp2")
+ unchecked(tp1, tp2)
+ }
+ def unchecked(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ assertUnerased()
+ unique(new CachedAndType(tp1, tp2))
+ }
+ def make(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if ((tp1 eq tp2) || (tp2 eq defn.AnyType))
+ tp1
+ else if (tp1 eq defn.AnyType)
+ tp2
+ else
+ apply(tp1, tp2)
+ }
+
+ abstract case class OrType(tp1: Type, tp2: Type) extends CachedGroundType with AndOrType {
+
+ assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType])
+ def isAnd = false
+
+ private[this] var myJoin: Type = _
+ private[this] var myJoinPeriod: Period = Nowhere
+
+ /** Replace or type by the closest non-or type above it */
+ def join(implicit ctx: Context): Type = {
+ if (myJoinPeriod != ctx.period) {
+ myJoin = ctx.orDominator(this)
+ core.println(i"join of $this == $myJoin")
+ assert(myJoin != this)
+ myJoinPeriod = ctx.period
+ }
+ myJoin
+ }
+
+ def derivedOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
+ else OrType.make(tp1, tp2)
+
+ def derivedAndOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ derivedOrType(tp1, tp2)
+
+ override def computeHash = doHash(tp1, tp2)
+ }
+
+ final class CachedOrType(tp1: Type, tp2: Type) extends OrType(tp1, tp2)
+
+ object OrType {
+ def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ assertUnerased()
+ unique(new CachedOrType(tp1, tp2))
+ }
+ def make(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
+ if (tp1 eq tp2) tp1 else apply(tp1, tp2)
+ }
+
+ // ----- Method types: MethodType/ExprType/PolyType -------------------------------
+
+ // Note: method types are cached whereas poly types are not. The reason
+ // is that most poly types are cyclic via poly params,
+ // and therefore two different poly types would never be equal.
+
+ /** A trait that mixes in functionality for signature caching */
+ trait MethodicType extends TermType {
+
+ private[this] var mySignature: Signature = _
+ private[this] var mySignatureRunId: Int = NoRunId
+
+ protected def computeSignature(implicit ctx: Context): Signature
+
+ protected def resultSignature(implicit ctx: Context) = try resultType match {
+ case rtp: MethodicType => rtp.signature
+ case tp => Signature(tp, isJava = false)
+ }
+ catch {
+ case ex: AssertionError =>
+ println(i"failure while taking result signture of $this: $resultType")
+ throw ex
+ }
+
+ final override def signature(implicit ctx: Context): Signature = {
+ if (ctx.runId != mySignatureRunId) {
+ mySignature = computeSignature
+ if (!mySignature.isUnderDefined) mySignatureRunId = ctx.runId
+ }
+ mySignature
+ }
+ }
+
+ trait MethodOrPoly extends MethodicType
+
+ abstract case class MethodType(paramNames: List[TermName], paramTypes: List[Type])
+ (resultTypeExp: MethodType => Type)
+ extends CachedGroundType with BindingType with TermType with MethodOrPoly with NarrowCached { thisMethodType =>
+ import MethodType._
+
+ def isJava = false
+ def isImplicit = false
+
+ private[core] val resType = resultTypeExp(this)
+ assert(resType.exists)
+
+ override def resultType(implicit ctx: Context): Type =
+ if (dependencyStatus == FalseDeps) { // dealias all false dependencies
+ val dealiasMap = new TypeMap {
+ def apply(tp: Type) = tp match {
+ case tp @ TypeRef(pre, name) =>
+ tp.info match {
+ case TypeAlias(alias) if depStatus(pre) == TrueDeps => apply(alias)
+ case _ => mapOver(tp)
+ }
+ case _ =>
+ mapOver(tp)
+ }
+ }
+ dealiasMap(resType)
+ }
+ else resType
+
+ var myDependencyStatus: DependencyStatus = Unknown
+
+ private def depStatus(tp: Type)(implicit ctx: Context): DependencyStatus = {
+ def combine(x: DependencyStatus, y: DependencyStatus) = {
+ val status = (x & StatusMask) max (y & StatusMask)
+ val provisional = (x | y) & Provisional
+ (if (status == TrueDeps) status else status | provisional).toByte
+ }
+ val depStatusAcc = new TypeAccumulator[DependencyStatus] {
+ def apply(status: DependencyStatus, tp: Type) =
+ if (status == TrueDeps) status
+ else
+ tp match {
+ case MethodParam(`thisMethodType`, _) => TrueDeps
+ case tp: TypeRef =>
+ val status1 = foldOver(status, tp)
+ tp.info match { // follow type alias to avoid dependency
+ case TypeAlias(alias) if status1 == TrueDeps && status != TrueDeps =>
+ combine(apply(status, alias), FalseDeps)
+ case _ =>
+ status1
+ }
+ case tp: TypeVar if !tp.isInstantiated => combine(status, Provisional)
+ case _ => foldOver(status, tp)
+ }
+ }
+ depStatusAcc(NoDeps, tp)
+ }
+
+ /** The dependency status of this method. Some examples:
+ *
+ * class C extends { type S; type T = String }
+ * def f(x: C)(y: Boolean) // dependencyStatus = NoDeps
+ * def f(x: C)(y: x.S) // dependencyStatus = TrueDeps
+ * def f(x: C)(y: x.T) // dependencyStatus = FalseDeps, i.e.
+ * // dependency can be eliminated by dealiasing.
+ */
+ private def dependencyStatus(implicit ctx: Context): DependencyStatus = {
+ if (myDependencyStatus != Unknown) myDependencyStatus
+ else {
+ val result = depStatus(resType)
+ if ((result & Provisional) == 0) myDependencyStatus = result
+ (result & StatusMask).toByte
+ }
+ }
+
+ /** Does result type contain references to parameters of this method type,
+ * which cannot be eliminated by de-aliasing?
+ */
+ def isDependent(implicit ctx: Context): Boolean = dependencyStatus == TrueDeps
+
+ protected def computeSignature(implicit ctx: Context): Signature =
+ resultSignature.prepend(paramTypes, isJava)
+
+ def derivedMethodType(paramNames: List[TermName], paramTypes: List[Type], resType: Type)(implicit ctx: Context) =
+ if ((paramNames eq this.paramNames) && (paramTypes eq this.paramTypes) && (resType eq this.resType)) this
+ else {
+ val resTypeFn = (x: MethodType) => resType.subst(this, x)
+ if (isJava) JavaMethodType(paramNames, paramTypes)(resTypeFn)
+ else if (isImplicit) ImplicitMethodType(paramNames, paramTypes)(resTypeFn)
+ else MethodType(paramNames, paramTypes)(resTypeFn)
+ }
+
+ def instantiate(argTypes: => List[Type])(implicit ctx: Context): Type =
+ if (isDependent) resultType.substParams(this, argTypes)
+ else resultType
+
+ override def equals(that: Any) = that match {
+ case that: MethodType =>
+ this.paramNames == that.paramNames &&
+ this.paramTypes == that.paramTypes &&
+ this.resType == that.resType
+ case _ =>
+ false
+ }
+
+ override def computeHash = doHash(paramNames, resType, paramTypes)
+
+ protected def prefixString = "MethodType"
+ override def toString = s"$prefixString($paramNames, $paramTypes, $resType)"
+ }
+
+ final class CachedMethodType(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)
+ extends MethodType(paramNames, paramTypes)(resultTypeExp) {
+ override def equals(that: Any) = super.equals(that) && that.isInstanceOf[CachedMethodType]
+ }
+
+ final class JavaMethodType(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)
+ extends MethodType(paramNames, paramTypes)(resultTypeExp) {
+ override def isJava = true
+ override def equals(that: Any) = super.equals(that) && that.isInstanceOf[JavaMethodType]
+ override def computeHash = addDelta(super.computeHash, 1)
+ override protected def prefixString = "JavaMethodType"
+ }
+
+ final class ImplicitMethodType(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)
+ extends MethodType(paramNames, paramTypes)(resultTypeExp) {
+ override def isImplicit = true
+ override def equals(that: Any) = super.equals(that) && that.isInstanceOf[ImplicitMethodType]
+ override def computeHash = addDelta(super.computeHash, 2)
+ override protected def prefixString = "ImplicitMethodType"
+ }
+
+ abstract class MethodTypeCompanion {
+ def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context): MethodType
+ def apply(paramNames: List[TermName], paramTypes: List[Type], resultType: Type)(implicit ctx: Context): MethodType =
+ apply(paramNames, paramTypes)(_ => resultType)
+ def apply(paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context): MethodType =
+ apply(nme.syntheticParamNames(paramTypes.length), paramTypes)(resultTypeExp)
+ def apply(paramTypes: List[Type], resultType: Type)(implicit ctx: Context): MethodType =
+ apply(nme.syntheticParamNames(paramTypes.length), paramTypes, resultType)
+
+ /** Produce method type from parameter symbols, with special mappings for repeated
+ * and inline parameters.
+ */
+ def fromSymbols(params: List[Symbol], resultType: Type)(implicit ctx: Context) = {
+ /** Replace @repeated annotations on Seq or Array types by <repeated> types */
+ def translateRepeated(tp: Type): Type = tp match {
+ case tp @ ExprType(tp1) => tp.derivedExprType(translateRepeated(tp1))
+ case AnnotatedType(tp, annot) if annot matches defn.RepeatedAnnot =>
+ val typeSym = tp.typeSymbol.asClass
+ assert(typeSym == defn.SeqClass || typeSym == defn.ArrayClass)
+ tp.translateParameterized(typeSym, defn.RepeatedParamClass)
+ case tp =>
+ tp
+ }
+ /** Add @inlineParam to inline call-by-value parameters */
+ def translateInline(tp: Type): Type = tp match {
+ case _: ExprType => tp
+ case _ => AnnotatedType(tp, Annotation(defn.InlineParamAnnot))
+ }
+ def paramInfo(param: Symbol): Type = {
+ val paramType = translateRepeated(param.info)
+ if (param.is(Inline)) translateInline(paramType) else paramType
+ }
+ def transformResult(mt: MethodType) =
+ resultType.subst(params, (0 until params.length).toList map (MethodParam(mt, _)))
+ apply(params map (_.name.asTermName), params map paramInfo)(transformResult _)
+ }
+ }
+
+ object MethodType extends MethodTypeCompanion {
+ def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context) =
+ unique(new CachedMethodType(paramNames, paramTypes)(resultTypeExp))
+
+ private type DependencyStatus = Byte
+ private final val Unknown: DependencyStatus = 0 // not yet computed
+ private final val NoDeps: DependencyStatus = 1 // no dependent parameters found
+ private final val FalseDeps: DependencyStatus = 2 // all dependent parameters are prefixes of non-depended alias types
+ private final val TrueDeps: DependencyStatus = 3 // some truly dependent parameters exist
+ private final val StatusMask: DependencyStatus = 3 // the bits indicating actual dependency status
+ private final val Provisional: DependencyStatus = 4 // set if dependency status can still change due to type variable instantiations
+ }
+
+ object JavaMethodType extends MethodTypeCompanion {
+ def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context) =
+ unique(new JavaMethodType(paramNames, paramTypes)(resultTypeExp))
+ }
+
+ object ImplicitMethodType extends MethodTypeCompanion {
+ def apply(paramNames: List[TermName], paramTypes: List[Type])(resultTypeExp: MethodType => Type)(implicit ctx: Context) =
+ unique(new ImplicitMethodType(paramNames, paramTypes)(resultTypeExp))
+ }
+
+ /** A by-name parameter type of the form `=> T`, or the type of a method with no parameter list. */
+ abstract case class ExprType(resType: Type)
+ extends CachedProxyType with TermType with MethodicType {
+ override def resultType(implicit ctx: Context): Type = resType
+ override def underlying(implicit ctx: Context): Type = resType
+ protected def computeSignature(implicit ctx: Context): Signature = resultSignature
+ def derivedExprType(resType: Type)(implicit ctx: Context) =
+ if (resType eq this.resType) this else ExprType(resType)
+ override def computeHash = doHash(resType)
+ }
+
+ final class CachedExprType(resultType: Type) extends ExprType(resultType)
+
+ object ExprType {
+ def apply(resultType: Type)(implicit ctx: Context) = {
+ assertUnerased()
+ unique(new CachedExprType(resultType))
+ }
+ }
+
+ /** A type lambda of the form `[v_0 X_0, ..., v_n X_n] => T` */
+ class PolyType(val paramNames: List[TypeName], val variances: List[Int])(
+ paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)
+ extends CachedProxyType with BindingType with MethodOrPoly {
+
+ /** The bounds of the type parameters */
+ val paramBounds: List[TypeBounds] = paramBoundsExp(this)
+
+ /** The result type of a PolyType / body of a type lambda */
+ val resType: Type = resultTypeExp(this)
+
+ assert(resType.isInstanceOf[TermType], this)
+ assert(paramNames.nonEmpty)
+
+ protected def computeSignature(implicit ctx: Context) = resultSignature
+
+ def isPolymorphicMethodType: Boolean = resType match {
+ case _: MethodType => true
+ case _ => false
+ }
+
+ /** Is this polytype a higher-kinded type lambda as opposed to a polymorphic?
+ * method type? Only type lambdas get created with variances, that's how we can tell.
+ */
+ def isTypeLambda: Boolean = variances.nonEmpty
+
+ /** PolyParam references to all type parameters of this type */
+ lazy val paramRefs: List[PolyParam] = paramNames.indices.toList.map(PolyParam(this, _))
+
+ lazy val typeParams: List[LambdaParam] =
+ paramNames.indices.toList.map(new LambdaParam(this, _))
+
+ override def resultType(implicit ctx: Context) = resType
+ override def underlying(implicit ctx: Context) = resType
+
+ /** Instantiate result type by substituting parameters with given arguments */
+ final def instantiate(argTypes: List[Type])(implicit ctx: Context): Type =
+ resultType.substParams(this, argTypes)
+
+ /** Instantiate parameter bounds by substituting parameters with given arguments */
+ final def instantiateBounds(argTypes: List[Type])(implicit ctx: Context): List[TypeBounds] =
+ paramBounds.mapConserve(_.substParams(this, argTypes).bounds)
+
+ def newLikeThis(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): PolyType =
+ PolyType.apply(paramNames, variances)(
+ x => paramBounds mapConserve (_.subst(this, x).bounds),
+ x => resType.subst(this, x))
+
+ def derivedPolyType(paramNames: List[TypeName] = this.paramNames,
+ paramBounds: List[TypeBounds] = this.paramBounds,
+ resType: Type = this.resType)(implicit ctx: Context) =
+ if ((paramNames eq this.paramNames) && (paramBounds eq this.paramBounds) && (resType eq this.resType)) this
+ else newLikeThis(paramNames, paramBounds, resType)
+
+ def derivedLambdaAbstraction(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): Type =
+ resType match {
+ case resType @ TypeAlias(alias) =>
+ resType.derivedTypeAlias(newLikeThis(paramNames, paramBounds, alias))
+ case resType @ TypeBounds(lo, hi) =>
+ resType.derivedTypeBounds(
+ if (lo.isRef(defn.NothingClass)) lo else newLikeThis(paramNames, paramBounds, lo),
+ newLikeThis(paramNames, paramBounds, hi))
+ case _ =>
+ derivedPolyType(paramNames, paramBounds, resType)
+ }
+
+ /** Merge nested polytypes into one polytype. nested polytypes are normally not supported
+ * but can arise as temporary data structures.
+ */
+ def flatten(implicit ctx: Context): PolyType = resType match {
+ case that: PolyType =>
+ val shift = new TypeMap {
+ def apply(t: Type) = t match {
+ case PolyParam(`that`, n) => PolyParam(that, n + paramNames.length)
+ case t => mapOver(t)
+ }
+ }
+ PolyType(paramNames ++ that.paramNames)(
+ x => this.paramBounds.mapConserve(_.subst(this, x).bounds) ++
+ that.paramBounds.mapConserve(shift(_).subst(that, x).bounds),
+ x => shift(that.resultType).subst(that, x).subst(this, x))
+ case _ => this
+ }
+
+ /** The type `[tparams := paramRefs] tp`, where `tparams` can be
+ * either a list of type parameter symbols or a list of lambda parameters
+ */
+ def lifted(tparams: List[TypeParamInfo], tp: Type)(implicit ctx: Context): Type =
+ tparams match {
+ case LambdaParam(poly, _) :: _ => tp.subst(poly, this)
+ case tparams: List[Symbol @unchecked] => tp.subst(tparams, paramRefs)
+ }
+
+ override def equals(other: Any) = other match {
+ case other: PolyType =>
+ other.paramNames == this.paramNames &&
+ other.paramBounds == this.paramBounds &&
+ other.resType == this.resType &&
+ other.variances == this.variances
+ case _ => false
+ }
+
+ override def toString = s"PolyType($variances, $paramNames, $paramBounds, $resType)"
+
+ override def computeHash = doHash(variances ::: paramNames, resType, paramBounds)
+ }
+
+ object PolyType {
+ def apply(paramNames: List[TypeName], variances: List[Int] = Nil)(
+ paramBoundsExp: PolyType => List[TypeBounds],
+ resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = {
+ val vs = if (variances.isEmpty) paramNames.map(alwaysZero) else variances
+ unique(new PolyType(paramNames, vs)(paramBoundsExp, resultTypeExp))
+ }
+
+ def unapply(tl: PolyType): Some[(List[LambdaParam], Type)] =
+ Some((tl.typeParams, tl.resType))
+
+ def any(n: Int)(implicit ctx: Context) =
+ apply(tpnme.syntheticTypeParamNames(n), List.fill(n)(0))(
+ pt => List.fill(n)(TypeBounds.empty), pt => defn.AnyType)
+ }
+
+ // ----- HK types: LambdaParam, HKApply ---------------------
+
+ /** The parameter of a type lambda */
+ case class LambdaParam(tl: PolyType, n: Int) extends TypeParamInfo {
+ def isTypeParam(implicit ctx: Context) = true
+ def paramName(implicit ctx: Context): TypeName = tl.paramNames(n)
+ def paramBounds(implicit ctx: Context): TypeBounds = tl.paramBounds(n)
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = paramBounds
+ def paramBoundsOrCompleter(implicit ctx: Context): Type = paramBounds
+ def paramVariance(implicit ctx: Context): Int = tl.variances(n)
+ def toArg: Type = PolyParam(tl, n)
+ def paramRef(implicit ctx: Context): Type = PolyParam(tl, n)
+ }
+
+ /** A higher kinded type application `C[T_1, ..., T_n]` */
+ abstract case class HKApply(tycon: Type, args: List[Type])
+ extends CachedProxyType with ValueType {
+
+ private var validSuper: Period = Nowhere
+ private var cachedSuper: Type = _
+
+ override def underlying(implicit ctx: Context): Type = tycon
+
+ override def superType(implicit ctx: Context): Type = {
+ if (ctx.period != validSuper) {
+ cachedSuper = tycon match {
+ case tp: PolyType => defn.AnyType
+ case tp: TypeVar if !tp.inst.exists =>
+ // supertype not stable, since underlying might change
+ return tp.underlying.applyIfParameterized(args)
+ case tp: TypeProxy => tp.superType.applyIfParameterized(args)
+ case _ => defn.AnyType
+ }
+ validSuper = ctx.period
+ }
+ cachedSuper
+ }
+
+ def lowerBound(implicit ctx: Context) = tycon.stripTypeVar match {
+ case tycon: TypeRef =>
+ tycon.info match {
+ case TypeBounds(lo, hi) =>
+ if (lo eq hi) superType // optimization, can profit from caching in this case
+ else lo.applyIfParameterized(args)
+ case _ => NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ def typeParams(implicit ctx: Context): List[TypeParamInfo] = {
+ val tparams = tycon.typeParams
+ if (tparams.isEmpty) PolyType.any(args.length).typeParams else tparams
+ }
+
+ def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type =
+ if ((tycon eq this.tycon) && (args eq this.args)) this
+ else tycon.appliedTo(args)
+
+ override def computeHash = doHash(tycon, args)
+
+ protected def checkInst(implicit ctx: Context): this.type = {
+ def check(tycon: Type): Unit = tycon.stripTypeVar match {
+ case tycon: TypeRef if !tycon.symbol.isClass =>
+ case _: PolyParam | ErrorType | _: WildcardType =>
+ case _: PolyType =>
+ assert(args.exists(_.isInstanceOf[TypeBounds]), s"unreduced type apply: $this")
+ case tycon: AnnotatedType =>
+ check(tycon.underlying)
+ case _ =>
+ assert(false, s"illegal type constructor in $this")
+ }
+ if (Config.checkHKApplications) check(tycon)
+ this
+ }
+ }
+
+ final class CachedHKApply(tycon: Type, args: List[Type]) extends HKApply(tycon, args)
+
+ object HKApply {
+ def apply(tycon: Type, args: List[Type])(implicit ctx: Context) =
+ unique(new CachedHKApply(tycon, args)).checkInst
+ }
+
+ // ----- Bound types: MethodParam, PolyParam --------------------------
+
+ abstract class BoundType extends CachedProxyType with ValueType {
+ type BT <: Type
+ def binder: BT
+ // Dotty deviation: copyBoundType was copy, but
+ // dotty generates copy methods always automatically, and therefore
+ // does not accept same-named method definitions in subclasses.
+ // Scala2x, on the other hand, requires them (not sure why!)
+ def copyBoundType(bt: BT): Type
+ }
+
+ abstract class ParamType extends BoundType {
+ def paramNum: Int
+ def paramName: Name
+ }
+
+ abstract case class MethodParam(binder: MethodType, paramNum: Int) extends ParamType with SingletonType {
+ type BT = MethodType
+
+ def paramName = binder.paramNames(paramNum)
+
+ override def underlying(implicit ctx: Context): Type = binder.paramTypes(paramNum)
+ def copyBoundType(bt: BT) = new MethodParamImpl(bt, paramNum)
+
+ // need to customize hashCode and equals to prevent infinite recursion for dep meth types.
+ override def computeHash = addDelta(binder.identityHash, paramNum)
+ override def equals(that: Any) = that match {
+ case that: MethodParam =>
+ (this.binder eq that.binder) && this.paramNum == that.paramNum
+ case _ =>
+ false
+ }
+
+ override def toString = s"MethodParam($paramName)"
+ }
+
+ class MethodParamImpl(binder: MethodType, paramNum: Int) extends MethodParam(binder, paramNum)
+
+ object MethodParam {
+ def apply(binder: MethodType, paramNum: Int)(implicit ctx: Context): MethodParam = {
+ assertUnerased()
+ new MethodParamImpl(binder, paramNum)
+ }
+ }
+
+ /** TODO Some docs would be nice here! */
+ case class PolyParam(binder: PolyType, paramNum: Int) extends ParamType {
+ type BT = PolyType
+ def copyBoundType(bt: BT) = PolyParam(bt, paramNum)
+
+ /** Looking only at the structure of `bound`, is one of the following true?
+ * - fromBelow and param <:< bound
+ * - !fromBelow and param >:> bound
+ */
+ def occursIn(bound: Type, fromBelow: Boolean)(implicit ctx: Context): Boolean = bound.stripTypeVar match {
+ case bound: PolyParam => bound == this
+ case bound: AndOrType =>
+ def occ1 = occursIn(bound.tp1, fromBelow)
+ def occ2 = occursIn(bound.tp2, fromBelow)
+ if (fromBelow == bound.isAnd) occ1 && occ2 else occ1 || occ2
+ case _ => false
+ }
+
+ def paramName = binder.paramNames(paramNum)
+
+ override def underlying(implicit ctx: Context): Type = {
+ val bounds = binder.paramBounds
+ if (bounds == null) NoType // this can happen if the referenced generic type is not initialized yet
+ else bounds(paramNum)
+ }
+ // no customized hashCode/equals needed because cycle is broken in PolyType
+ override def toString =
+ try s"PolyParam($paramName)"
+ catch {
+ case ex: IndexOutOfBoundsException => s"PolyParam(<bad index: $paramNum>)"
+ }
+
+ override def computeHash = doHash(paramNum, binder.identityHash)
+
+ override def equals(that: Any) = that match {
+ case that: PolyParam =>
+ (this.binder eq that.binder) && this.paramNum == that.paramNum
+ case _ =>
+ false
+ }
+ }
+
+ /** a self-reference to an enclosing recursive type. */
+ case class RecThis(binder: RecType) extends BoundType with SingletonType {
+ type BT = RecType
+ override def underlying(implicit ctx: Context) = binder
+ def copyBoundType(bt: BT) = RecThis(bt)
+
+ // need to customize hashCode and equals to prevent infinite recursion
+ // between RecTypes and RecRefs.
+ override def computeHash = addDelta(binder.identityHash, 41)
+ override def equals(that: Any) = that match {
+ case that: RecThis => this.binder eq that.binder
+ case _ => false
+ }
+ override def toString =
+ try s"RecThis(${binder.hashCode})"
+ catch {
+ case ex: NullPointerException => s"RecThis(<under construction>)"
+ }
+ }
+
+ // ----- Skolem types -----------------------------------------------
+
+ /** A skolem type reference with underlying type `binder`. */
+ abstract case class SkolemType(info: Type) extends UncachedProxyType with ValueType with SingletonType {
+ override def underlying(implicit ctx: Context) = info
+ def derivedSkolemType(info: Type)(implicit ctx: Context) =
+ if (info eq this.info) this else SkolemType(info)
+ override def hashCode: Int = identityHash
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+
+ private var myRepr: String = null
+ def repr(implicit ctx: Context) = {
+ if (myRepr == null) myRepr = ctx.freshName("?")
+ myRepr
+ }
+
+ override def toString = s"Skolem($hashCode)"
+ }
+
+ final class CachedSkolemType(info: Type) extends SkolemType(info)
+
+ object SkolemType {
+ def apply(info: Type)(implicit ctx: Context) =
+ unique(new CachedSkolemType(info))
+ }
+
+ // ------------ Type variables ----------------------------------------
+
+ /** In a TypeApply tree, a TypeVar is created for each argument type to be inferred.
+ * Every type variable is referred to by exactly one inferred type parameter of some
+ * TypeApply tree.
+ *
+ * A type variable is essentially a switch that models some part of a substitution.
+ * It is first linked to `origin`, a poly param that's in the current constraint set.
+ * It can then be (once) instantiated to some other type. The instantiation is
+ * recorded in the type variable itself, or else, if the current type state
+ * is different from the variable's creation state (meaning unrolls are possible)
+ * in the current typer state.
+ *
+ * @param origin The parameter that's tracked by the type variable.
+ * @param creatorState The typer state in which the variable was created.
+ * @param owningTree The function part of the TypeApply tree tree that introduces
+ * the type variable.
+ * @paran owner The current owner if the context where the variable was created.
+ *
+ * `owningTree` and `owner` are used to determine whether a type-variable can be instantiated
+ * at some given point. See `Inferencing#interpolateUndetVars`.
+ */
+ final class TypeVar(val origin: PolyParam, creatorState: TyperState, val owningTree: untpd.Tree, val owner: Symbol) extends CachedProxyType with ValueType {
+
+ /** The permanent instance type of the variable, or NoType is none is given yet */
+ private[core] var inst: Type = NoType
+
+ /** The state owning the variable. This is at first `creatorState`, but it can
+ * be changed to an enclosing state on a commit.
+ */
+ private[core] var owningState = creatorState
+
+ /** The instance type of this variable, or NoType if the variable is currently
+ * uninstantiated
+ */
+ def instanceOpt(implicit ctx: Context): Type =
+ if (inst.exists) inst else {
+ ctx.typerState.ephemeral = true
+ ctx.typerState.instType(this)
+ }
+
+ /** Is the variable already instantiated? */
+ def isInstantiated(implicit ctx: Context) = instanceOpt.exists
+
+ /** Instantiate variable with given type */
+ private def instantiateWith(tp: Type)(implicit ctx: Context): Type = {
+ assert(tp ne this, s"self instantiation of ${tp.show}, constraint = ${ctx.typerState.constraint.show}")
+ typr.println(s"instantiating ${this.show} with ${tp.show}")
+ assert(ctx.typerState.constraint contains this) // !!! DEBUG
+ if ((ctx.typerState eq owningState) && !ctx.typeComparer.subtypeCheckInProgress)
+ inst = tp
+ ctx.typerState.constraint = ctx.typerState.constraint.replace(origin, tp)
+ tp
+ }
+
+ /** Instantiate variable from the constraints over its `origin`.
+ * If `fromBelow` is true, the variable is instantiated to the lub
+ * of its lower bounds in the current constraint; otherwise it is
+ * instantiated to the glb of its upper bounds. However, a lower bound
+ * instantiation can be a singleton type only if the upper bound
+ * is also a singleton type.
+ */
+ def instantiate(fromBelow: Boolean)(implicit ctx: Context): Type = {
+ val inst = ctx.typeComparer.instanceType(origin, fromBelow)
+ if (ctx.typerState.isGlobalCommittable)
+ inst match {
+ case inst: PolyParam =>
+ assert(inst.binder.isTypeLambda, i"bad inst $this := $inst, constr = ${ctx.typerState.constraint}")
+ // If this fails, you might want to turn on Config.debugCheckConstraintsClosed
+ // to help find the root of the problem.
+ // Note: Parameters of type lambdas are excluded from the assertion because
+ // they might arise from ill-kinded code. See #1652
+ case _ =>
+ }
+ instantiateWith(inst)
+ }
+
+ /** Unwrap to instance (if instantiated) or origin (if not), until result
+ * is no longer a TypeVar
+ */
+ override def stripTypeVar(implicit ctx: Context): Type = {
+ val inst = instanceOpt
+ if (inst.exists) inst.stripTypeVar else origin
+ }
+
+ /** If the variable is instantiated, its instance, otherwise its origin */
+ override def underlying(implicit ctx: Context): Type = {
+ val inst = instanceOpt
+ if (inst.exists) inst
+ else {
+ ctx.typerState.ephemeral = true
+ origin
+ }
+ }
+
+ override def computeHash: Int = identityHash
+ override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
+
+ override def toString = {
+ def instStr = if (inst.exists) s" -> $inst" else ""
+ s"TypeVar($origin$instStr)"
+ }
+ }
+
+ // ------ ClassInfo, Type Bounds ------------------------------------------------------------
+
+ /** Roughly: the info of a class during a period.
+ * @param prefix The prefix on which parents, decls, and selfType need to be rebased.
+ * @param cls The class symbol.
+ * @param classParents The parent types of this class.
+ * These are all normalized to be TypeRefs by moving any refinements
+ * to be member definitions of the class itself.
+ * @param decls The symbols defined directly in this class.
+ * @param selfInfo The type of `this` in this class, if explicitly given,
+ * NoType otherwise. If class is compiled from source, can also
+ * be a reference to the self symbol containing the type.
+ */
+ abstract case class ClassInfo(
+ prefix: Type,
+ cls: ClassSymbol,
+ classParents: List[TypeRef],
+ decls: Scope,
+ selfInfo: DotClass /* should be: Type | Symbol */) extends CachedGroundType with TypeType {
+
+ /** The self type of a class is the conjunction of
+ * - the explicit self type if given (or the info of a given self symbol), and
+ * - the fully applied reference to the class itself.
+ */
+ def selfType(implicit ctx: Context): Type = {
+ if (selfTypeCache == null)
+ selfTypeCache = {
+ def fullRef = fullyAppliedRef
+ val given = givenSelfType
+ val raw =
+ if (!given.exists) fullRef
+ else if (cls is Module) given
+ else if (ctx.erasedTypes) fullRef
+ else AndType(given, fullRef)
+ raw//.asSeenFrom(prefix, cls.owner)
+ }
+ selfTypeCache
+ }
+
+ /** The explicitly given self type (self types of modules are assumed to be
+ * explcitly given here).
+ */
+ override def givenSelfType(implicit ctx: Context): Type = selfInfo match {
+ case tp: Type => tp
+ case self: Symbol => self.info
+ }
+
+ private var selfTypeCache: Type = null
+
+ private def fullyAppliedRef(base: Type, tparams: List[TypeSymbol])(implicit ctx: Context): Type = tparams match {
+ case tparam :: tparams1 =>
+ fullyAppliedRef(
+ RefinedType(base, tparam.name, TypeRef(cls.thisType, tparam).toBounds(tparam)),
+ tparams1)
+ case nil =>
+ base
+ }
+
+ /** The class type with all type parameters */
+ def fullyAppliedRef(implicit ctx: Context): Type = fullyAppliedRef(cls.typeRef, cls.typeParams)
+
+ private var typeRefCache: TypeRef = null
+
+ def typeRef(implicit ctx: Context): TypeRef = {
+ def clsDenot = if (prefix eq cls.owner.thisType) cls.denot else cls.denot.copySymDenotation(info = this)
+ if (typeRefCache == null)
+ typeRefCache =
+ if ((cls is PackageClass) || cls.owner.isTerm) symbolicTypeRef
+ else TypeRef(prefix, cls.name, clsDenot)
+ typeRefCache
+ }
+
+ def symbolicTypeRef(implicit ctx: Context): TypeRef = TypeRef(prefix, cls)
+
+ // cached because baseType needs parents
+ private var parentsCache: List[TypeRef] = null
+
+ /** The parent type refs as seen from the given prefix */
+ override def parents(implicit ctx: Context): List[TypeRef] = {
+ if (parentsCache == null)
+ parentsCache = cls.classParents.mapConserve(_.asSeenFrom(prefix, cls.owner).asInstanceOf[TypeRef])
+ parentsCache
+ }
+
+ /** The parent types with all type arguments */
+ override def parentsWithArgs(implicit ctx: Context): List[Type] =
+ parents mapConserve { pref =>
+ ((pref: Type) /: pref.classSymbol.typeParams) { (parent, tparam) =>
+ val targSym = decls.lookup(tparam.name)
+ if (targSym.exists) RefinedType(parent, targSym.name, targSym.info)
+ else parent
+ }
+ }
+
+ def derivedClassInfo(prefix: Type)(implicit ctx: Context) =
+ if (prefix eq this.prefix) this
+ else ClassInfo(prefix, cls, classParents, decls, selfInfo)
+
+ def derivedClassInfo(prefix: Type = this.prefix, classParents: List[TypeRef] = classParents, decls: Scope = this.decls, selfInfo: DotClass = this.selfInfo)(implicit ctx: Context) =
+ if ((prefix eq this.prefix) && (classParents eq this.classParents) && (decls eq this.decls) && (selfInfo eq this.selfInfo)) this
+ else ClassInfo(prefix, cls, classParents, decls, selfInfo)
+
+ override def computeHash = doHash(cls, prefix)
+
+ override def toString = s"ClassInfo($prefix, $cls)"
+ }
+
+ class CachedClassInfo(prefix: Type, cls: ClassSymbol, classParents: List[TypeRef], decls: Scope, selfInfo: DotClass)
+ extends ClassInfo(prefix, cls, classParents, decls, selfInfo)
+
+ /** A class for temporary class infos where `parents` are not yet known. */
+ final class TempClassInfo(prefix: Type, cls: ClassSymbol, decls: Scope, selfInfo: DotClass)
+ extends CachedClassInfo(prefix, cls, Nil, decls, selfInfo) {
+
+ /** A list of actions that were because they rely on the class info of `cls` to
+ * be no longer temporary. These actions will be performed once `cls` gets a real
+ * ClassInfo.
+ */
+ private var suspensions: List[() => Unit] = Nil
+
+ def addSuspension(suspension: () => Unit): Unit = suspensions ::= suspension
+
+ /** Install classinfo with known parents in `denot` and resume all suspensions */
+ def finalize(denot: SymDenotation, parents: List[TypeRef])(implicit ctx: Context) = {
+ denot.info = derivedClassInfo(classParents = parents)
+ suspensions.foreach(_())
+ }
+ }
+
+ object ClassInfo {
+ def apply(prefix: Type, cls: ClassSymbol, classParents: List[TypeRef], decls: Scope, selfInfo: DotClass = NoType)(implicit ctx: Context) =
+ unique(new CachedClassInfo(prefix, cls, classParents, decls, selfInfo))
+ }
+
+ /** Type bounds >: lo <: hi */
+ abstract case class TypeBounds(lo: Type, hi: Type) extends CachedProxyType with TypeType {
+
+ assert(lo.isInstanceOf[TermType])
+ assert(hi.isInstanceOf[TermType])
+
+ def variance: Int = 0
+
+ override def underlying(implicit ctx: Context): Type = hi
+
+ /** The non-alias type bounds type with given bounds */
+ def derivedTypeBounds(lo: Type, hi: Type)(implicit ctx: Context) =
+ if ((lo eq this.lo) && (hi eq this.hi) && (variance == 0)) this
+ else TypeBounds(lo, hi)
+
+ /** If this is an alias, a derived alias with the new variance,
+ * Otherwise the type itself.
+ */
+ def withVariance(variance: Int)(implicit ctx: Context) = this match {
+ case tp: TypeAlias => tp.derivedTypeAlias(tp.alias, variance)
+ case _ => this
+ }
+
+ def contains(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case tp: TypeBounds => lo <:< tp.lo && tp.hi <:< hi
+ case tp: ClassInfo =>
+ // Note: Taking a normal typeRef does not work here. A normal ref might contain
+ // also other information about the named type (e.g. bounds).
+ contains(tp.symbolicTypeRef)
+ case _ => lo <:< tp && tp <:< hi
+ }
+
+ def & (that: TypeBounds)(implicit ctx: Context): TypeBounds =
+ if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) that
+ else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) this
+ else TypeBounds(this.lo | that.lo, this.hi & that.hi)
+
+ def | (that: TypeBounds)(implicit ctx: Context): TypeBounds =
+ if ((this.lo frozen_<:< that.lo) && (that.hi frozen_<:< this.hi)) this
+ else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) that
+ else TypeBounds(this.lo & that.lo, this.hi | that.hi)
+
+ override def & (that: Type)(implicit ctx: Context) = that match {
+ case that: TypeBounds => this & that
+ case _ => super.& (that)
+ }
+
+ override def | (that: Type)(implicit ctx: Context) = that match {
+ case that: TypeBounds => this | that
+ case _ => super.| (that)
+ }
+
+ /** The implied bounds, where aliases are mapped to intervals from
+ * Nothing/Any
+ */
+ def boundsInterval(implicit ctx: Context): TypeBounds = this
+
+ /** If this type and that type have the same variance, this variance, otherwise 0 */
+ final def commonVariance(that: TypeBounds): Int = (this.variance + that.variance) / 2
+
+ override def computeHash = doHash(variance, lo, hi)
+ override def equals(that: Any): Boolean = that match {
+ case that: TypeBounds =>
+ (this.lo eq that.lo) && (this.hi eq that.hi) && (this.variance == that.variance)
+ case _ =>
+ false
+ }
+
+ override def toString =
+ if (lo eq hi) s"TypeAlias($lo, $variance)" else s"TypeBounds($lo, $hi)"
+ }
+
+ class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
+
+ abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias) {
+ /** pre: this is a type alias */
+ def derivedTypeAlias(alias: Type, variance: Int = this.variance)(implicit ctx: Context) =
+ if ((alias eq this.alias) && (variance == this.variance)) this
+ else TypeAlias(alias, variance)
+
+ override def & (that: TypeBounds)(implicit ctx: Context): TypeBounds = {
+ val v = this commonVariance that
+ if (v > 0) derivedTypeAlias(this.hi & that.hi, v)
+ else if (v < 0) derivedTypeAlias(this.lo | that.lo, v)
+ else super.& (that)
+ }
+
+ override def | (that: TypeBounds)(implicit ctx: Context): TypeBounds = {
+ val v = this commonVariance that
+ if (v > 0) derivedTypeAlias(this.hi | that.hi, v)
+ else if (v < 0) derivedTypeAlias(this.lo & that.lo, v)
+ else super.| (that)
+ }
+
+ override def boundsInterval(implicit ctx: Context): TypeBounds =
+ if (variance == 0) this
+ else if (variance < 0) TypeBounds.lower(alias)
+ else TypeBounds.upper(alias)
+ }
+
+ class CachedTypeAlias(alias: Type, variance: Int, hc: Int) extends TypeAlias(alias, variance) {
+ myHash = hc
+ }
+
+ object TypeBounds {
+ def apply(lo: Type, hi: Type)(implicit ctx: Context): TypeBounds =
+ unique(new RealTypeBounds(lo, hi))
+ def empty(implicit ctx: Context) = apply(defn.NothingType, defn.AnyType)
+ def upper(hi: Type)(implicit ctx: Context) = apply(defn.NothingType, hi)
+ def lower(lo: Type)(implicit ctx: Context) = apply(lo, defn.AnyType)
+ }
+
+ object TypeAlias {
+ def apply(alias: Type, variance: Int = 0)(implicit ctx: Context) =
+ ctx.uniqueTypeAliases.enterIfNew(alias, variance)
+ def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias)
+ }
+
+ // ----- Annotated and Import types -----------------------------------------------
+
+ /** An annotated type tpe @ annot */
+ case class AnnotatedType(tpe: Type, annot: Annotation)
+ extends UncachedProxyType with ValueType {
+ // todo: cache them? but this makes only sense if annotations and trees are also cached.
+ override def underlying(implicit ctx: Context): Type = tpe
+ def derivedAnnotatedType(tpe: Type, annot: Annotation) =
+ if ((tpe eq this.tpe) && (annot eq this.annot)) this
+ else AnnotatedType(tpe, annot)
+
+ override def stripTypeVar(implicit ctx: Context): Type =
+ derivedAnnotatedType(tpe.stripTypeVar, annot)
+ override def stripAnnots(implicit ctx: Context): Type = tpe.stripAnnots
+ }
+
+ object AnnotatedType {
+ def make(underlying: Type, annots: List[Annotation]) =
+ (underlying /: annots)(AnnotatedType(_, _))
+ }
+
+ // Special type objects and classes -----------------------------------------------------
+
+ /** The type of an erased array */
+ abstract case class JavaArrayType(elemType: Type) extends CachedGroundType with ValueType {
+ override def computeHash = doHash(elemType)
+ def derivedJavaArrayType(elemtp: Type)(implicit ctx: Context) =
+ if (elemtp eq this.elemType) this else JavaArrayType(elemtp)
+ }
+ final class CachedJavaArrayType(elemType: Type) extends JavaArrayType(elemType)
+ object JavaArrayType {
+ def apply(elemType: Type)(implicit ctx: Context) = unique(new CachedJavaArrayType(elemType))
+ }
+
+ /** The type of an import clause tree */
+ case class ImportType(expr: Tree) extends UncachedGroundType
+
+ /** Sentinel for "missing type" */
+ @sharable case object NoType extends CachedGroundType {
+ override def exists = false
+ override def computeHash = hashSeed
+ }
+
+ /** Missing prefix */
+ @sharable case object NoPrefix extends CachedGroundType {
+ override def computeHash = hashSeed
+ }
+
+ abstract class ErrorType extends UncachedGroundType with ValueType
+
+ object ErrorType extends ErrorType
+
+ /* Type used to track Select nodes that could not resolve a member and their qualifier is a scala.Dynamic. */
+ object TryDynamicCallType extends ErrorType
+
+ /** Wildcard type, possibly with bounds */
+ abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType {
+ def derivedWildcardType(optBounds: Type)(implicit ctx: Context) =
+ if (optBounds eq this.optBounds) this
+ else if (!optBounds.exists) WildcardType
+ else WildcardType(optBounds.asInstanceOf[TypeBounds])
+ override def computeHash = doHash(optBounds)
+ }
+
+ final class CachedWildcardType(optBounds: Type) extends WildcardType(optBounds)
+
+ @sharable object WildcardType extends WildcardType(NoType) {
+ def apply(bounds: TypeBounds)(implicit ctx: Context) = unique(new CachedWildcardType(bounds))
+ }
+
+ /** An extractor for single abstract method types.
+ * A type is a SAM type if it is a reference to a class or trait, which
+ *
+ * - has a single abstract method with a method type (ExprType
+ * and PolyType not allowed!)
+ * - can be instantiated without arguments or with just () as argument.
+ *
+ * The pattern `SAMType(denot)` matches a SAM type, where `denot` is the
+ * denotation of the single abstract method as a member of the type.
+ */
+ object SAMType {
+ def zeroParamClass(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp: ClassInfo =>
+ def zeroParams(tp: Type): Boolean = tp match {
+ case pt: PolyType => zeroParams(pt.resultType)
+ case mt: MethodType => mt.paramTypes.isEmpty && !mt.resultType.isInstanceOf[MethodType]
+ case et: ExprType => true
+ case _ => false
+ }
+ if ((tp.cls is Trait) || zeroParams(tp.cls.primaryConstructor.info)) tp // !!! needs to be adapted once traits have parameters
+ else NoType
+ case tp: TypeRef =>
+ zeroParamClass(tp.underlying)
+ case tp: RefinedType =>
+ zeroParamClass(tp.underlying)
+ case tp: TypeBounds =>
+ zeroParamClass(tp.underlying)
+ case tp: TypeVar =>
+ zeroParamClass(tp.underlying)
+ case _ =>
+ NoType
+ }
+ def isInstantiatable(tp: Type)(implicit ctx: Context): Boolean = zeroParamClass(tp) match {
+ case cinfo: ClassInfo =>
+ val tref = tp.narrow
+ val selfType = cinfo.selfType.asSeenFrom(tref, cinfo.cls)
+ tref <:< selfType
+ case _ =>
+ false
+ }
+ def unapply(tp: Type)(implicit ctx: Context): Option[SingleDenotation] =
+ if (isInstantiatable(tp)) {
+ val absMems = tp.abstractTermMembers
+ // println(s"absMems: ${absMems map (_.show) mkString ", "}")
+ if (absMems.size == 1)
+ absMems.head.info match {
+ case mt: MethodType if !mt.isDependent => Some(absMems.head)
+ case _ => None
+ }
+ else if (tp isRef defn.PartialFunctionClass)
+ // To maintain compatibility with 2.x, we treat PartialFunction specially,
+ // pretending it is a SAM type. In the future it would be better to merge
+ // Function and PartialFunction, have Function1 contain a isDefinedAt method
+ // def isDefinedAt(x: T) = true
+ // and overwrite that method whenever the function body is a sequence of
+ // case clauses.
+ absMems.find(_.symbol.name == nme.apply)
+ else None
+ }
+ else None
+ }
+
+ // ----- TypeMaps --------------------------------------------------------------------
+
+ abstract class TypeMap(implicit protected val ctx: Context) extends (Type => Type) { thisMap =>
+
+ protected def stopAtStatic = true
+
+ def apply(tp: Type): Type
+
+ protected var variance = 1
+
+ protected def derivedSelect(tp: NamedType, pre: Type): Type =
+ tp.derivedSelect(pre)
+ protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type): Type =
+ tp.derivedRefinedType(parent, tp.refinedName, info)
+ protected def derivedRecType(tp: RecType, parent: Type): Type =
+ tp.rebind(parent)
+ protected def derivedTypeAlias(tp: TypeAlias, alias: Type): Type =
+ tp.derivedTypeAlias(alias)
+ protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type =
+ tp.derivedTypeBounds(lo, hi)
+ protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type =
+ tp.derivedSuperType(thistp, supertp)
+ protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type =
+ tp.derivedAppliedType(tycon, args)
+ protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type): Type =
+ tp.derivedAndOrType(tp1, tp2)
+ protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type =
+ tp.derivedAnnotatedType(underlying, annot)
+ protected def derivedWildcardType(tp: WildcardType, bounds: Type): Type =
+ tp.derivedWildcardType(bounds)
+ protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type =
+ tp.derivedClassInfo(pre)
+ protected def derivedJavaArrayType(tp: JavaArrayType, elemtp: Type): Type =
+ tp.derivedJavaArrayType(elemtp)
+ protected def derivedMethodType(tp: MethodType, formals: List[Type], restpe: Type): Type =
+ tp.derivedMethodType(tp.paramNames, formals, restpe)
+ protected def derivedExprType(tp: ExprType, restpe: Type): Type =
+ tp.derivedExprType(restpe)
+ protected def derivedPolyType(tp: PolyType, pbounds: List[TypeBounds], restpe: Type): Type =
+ tp.derivedPolyType(tp.paramNames, pbounds, restpe)
+
+ /** Map this function over given type */
+ def mapOver(tp: Type): Type = {
+ implicit val ctx: Context = this.ctx // Dotty deviation: implicits need explicit type
+ tp match {
+ case tp: NamedType =>
+ if (stopAtStatic && tp.symbol.isStatic) tp
+ else derivedSelect(tp, this(tp.prefix))
+
+ case _: ThisType
+ | _: BoundType
+ | NoPrefix => tp
+
+ case tp: RefinedType =>
+ derivedRefinedType(tp, this(tp.parent), this(tp.refinedInfo))
+
+ case tp: TypeAlias =>
+ val saved = variance
+ variance = variance * tp.variance
+ val alias1 = this(tp.alias)
+ variance = saved
+ derivedTypeAlias(tp, alias1)
+
+ case tp: TypeBounds =>
+ variance = -variance
+ val lo1 = this(tp.lo)
+ variance = -variance
+ derivedTypeBounds(tp, lo1, this(tp.hi))
+
+ case tp: MethodType =>
+ def mapOverMethod = {
+ variance = -variance
+ val ptypes1 = tp.paramTypes mapConserve this
+ variance = -variance
+ derivedMethodType(tp, ptypes1, this(tp.resultType))
+ }
+ mapOverMethod
+
+ case tp: ExprType =>
+ derivedExprType(tp, this(tp.resultType))
+
+ case tp: PolyType =>
+ def mapOverPoly = {
+ variance = -variance
+ val bounds1 = tp.paramBounds.mapConserve(this).asInstanceOf[List[TypeBounds]]
+ variance = -variance
+ derivedPolyType(tp, bounds1, this(tp.resultType))
+ }
+ mapOverPoly
+
+ case tp: RecType =>
+ derivedRecType(tp, this(tp.parent))
+
+ case tp @ SuperType(thistp, supertp) =>
+ derivedSuperType(tp, this(thistp), this(supertp))
+
+ case tp: LazyRef =>
+ LazyRef(() => this(tp.ref))
+
+ case tp: ClassInfo =>
+ mapClassInfo(tp)
+
+ case tp: TypeVar =>
+ val inst = tp.instanceOpt
+ if (inst.exists) apply(inst) else tp
+
+ case tp: HKApply =>
+ def mapArg(arg: Type, tparam: TypeParamInfo): Type = {
+ val saved = variance
+ variance *= tparam.paramVariance
+ try this(arg)
+ finally variance = saved
+ }
+ derivedAppliedType(tp, this(tp.tycon),
+ tp.args.zipWithConserve(tp.typeParams)(mapArg))
+
+ case tp: AndOrType =>
+ derivedAndOrType(tp, this(tp.tp1), this(tp.tp2))
+
+ case tp: SkolemType =>
+ tp
+
+ case tp @ AnnotatedType(underlying, annot) =>
+ val underlying1 = this(underlying)
+ if (underlying1 eq underlying) tp
+ else derivedAnnotatedType(tp, underlying1, mapOver(annot))
+
+ case tp @ WildcardType =>
+ derivedWildcardType(tp, mapOver(tp.optBounds))
+
+ case tp: JavaArrayType =>
+ derivedJavaArrayType(tp, this(tp.elemType))
+
+ case tp: ProtoType =>
+ tp.map(this)
+
+ case _ =>
+ tp
+ }
+ }
+
+ private def treeTypeMap = new TreeTypeMap(typeMap = this)
+
+ def mapOver(syms: List[Symbol]): List[Symbol] = ctx.mapSymbols(syms, treeTypeMap)
+
+ def mapOver(scope: Scope): Scope = {
+ val elems = scope.toList
+ val elems1 = mapOver(elems)
+ if (elems1 eq elems) scope
+ else newScopeWith(elems1: _*)
+ }
+
+ def mapOver(annot: Annotation): Annotation =
+ annot.derivedAnnotation(mapOver(annot.tree))
+
+ def mapOver(tree: Tree): Tree = treeTypeMap(tree)
+
+ /** Can be overridden. By default, only the prefix is mapped. */
+ protected def mapClassInfo(tp: ClassInfo): Type =
+ derivedClassInfo(tp, this(tp.prefix))
+
+ def andThen(f: Type => Type): TypeMap = new TypeMap {
+ override def stopAtStatic = thisMap.stopAtStatic
+ def apply(tp: Type) = f(thisMap(tp))
+ }
+ }
+
+ /** A type map that maps also parents and self type of a ClassInfo */
+ abstract class DeepTypeMap(implicit ctx: Context) extends TypeMap {
+ override def mapClassInfo(tp: ClassInfo) = {
+ val prefix1 = this(tp.prefix)
+ val parents1 = (tp.parents mapConserve this).asInstanceOf[List[TypeRef]]
+ val selfInfo1 = tp.selfInfo match {
+ case selfInfo: Type => this(selfInfo)
+ case selfInfo => selfInfo
+ }
+ tp.derivedClassInfo(prefix1, parents1, tp.decls, selfInfo1)
+ }
+ }
+
+ @sharable object IdentityTypeMap extends TypeMap()(NoContext) {
+ override def stopAtStatic = true
+ def apply(tp: Type) = tp
+ }
+
+ abstract class ApproximatingTypeMap(implicit ctx: Context) extends TypeMap { thisMap =>
+ def approx(lo: Type = defn.NothingType, hi: Type = defn.AnyType) =
+ if (variance == 0) NoType
+ else apply(if (variance < 0) lo else hi)
+
+ override protected def derivedSelect(tp: NamedType, pre: Type) =
+ if (pre eq tp.prefix) tp
+ else tp.info match {
+ case TypeAlias(alias) => apply(alias) // try to heal by following aliases
+ case _ =>
+ if (pre.exists && !pre.isRef(defn.NothingClass) && variance > 0) tp.derivedSelect(pre)
+ else tp.info match {
+ case TypeBounds(lo, hi) => approx(lo, hi)
+ case _ => approx()
+ }
+ }
+ override protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type) =
+ if (parent.exists && info.exists) tp.derivedRefinedType(parent, tp.refinedName, info)
+ else approx(hi = parent)
+ override protected def derivedRecType(tp: RecType, parent: Type) =
+ if (parent.exists) tp.rebind(parent)
+ else approx()
+ override protected def derivedTypeAlias(tp: TypeAlias, alias: Type) =
+ if (alias.exists) tp.derivedTypeAlias(alias)
+ else approx(NoType, TypeBounds.empty)
+ override protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type) =
+ if (lo.exists && hi.exists) tp.derivedTypeBounds(lo, hi)
+ else approx(NoType,
+ if (lo.exists) TypeBounds.lower(lo)
+ else if (hi.exists) TypeBounds.upper(hi)
+ else TypeBounds.empty)
+ override protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type) =
+ if (thistp.exists && supertp.exists) tp.derivedSuperType(thistp, supertp)
+ else NoType
+ override protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type =
+ if (tycon.exists && args.forall(_.exists)) tp.derivedAppliedType(tycon, args)
+ else approx() // This is rather coarse, but to do better is a bit complicated
+ override protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type) =
+ if (tp1.exists && tp2.exists) tp.derivedAndOrType(tp1, tp2)
+ else if (tp.isAnd) approx(hi = tp1 & tp2) // if one of tp1d, tp2d exists, it is the result of tp1d & tp2d
+ else approx(lo = tp1 & tp2)
+ override protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation) =
+ if (underlying.exists) tp.derivedAnnotatedType(underlying, annot)
+ else NoType
+ override protected def derivedWildcardType(tp: WildcardType, bounds: Type) =
+ if (bounds.exists) tp.derivedWildcardType(bounds)
+ else WildcardType
+ override protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type =
+ if (pre.exists) tp.derivedClassInfo(pre)
+ else NoType
+ }
+
+ // ----- TypeAccumulators ----------------------------------------------------
+
+ abstract class TypeAccumulator[T](implicit protected val ctx: Context) extends ((T, Type) => T) {
+
+ protected def stopAtStatic = true
+
+ def apply(x: T, tp: Type): T
+
+ protected def applyToAnnot(x: T, annot: Annotation): T = x // don't go into annotations
+
+ protected var variance = 1
+
+ protected def applyToPrefix(x: T, tp: NamedType) = {
+ val saved = variance
+ variance = 0
+ val result = this(x, tp.prefix)
+ variance = saved
+ result
+ }
+
+ def foldOver(x: T, tp: Type): T = tp match {
+ case tp: TypeRef =>
+ if (stopAtStatic && tp.symbol.isStatic) x
+ else {
+ val tp1 = tp.prefix.lookupRefined(tp.name)
+ if (tp1.exists) this(x, tp1) else applyToPrefix(x, tp)
+ }
+ case tp: TermRef =>
+ if (stopAtStatic && tp.currentSymbol.isStatic) x
+ else applyToPrefix(x, tp)
+
+ case _: ThisType
+ | _: BoundType
+ | NoPrefix => x
+
+ case tp: RefinedType =>
+ this(this(x, tp.parent), tp.refinedInfo)
+
+ case bounds @ TypeBounds(lo, hi) =>
+ if (lo eq hi) {
+ val saved = variance
+ variance = variance * bounds.variance
+ val result = this(x, lo)
+ variance = saved
+ result
+ }
+ else {
+ variance = -variance
+ val y = this(x, lo)
+ variance = -variance
+ this(y, hi)
+ }
+
+ case tp @ MethodType(pnames, ptypes) =>
+ variance = -variance
+ val y = foldOver(x, ptypes)
+ variance = -variance
+ this(y, tp.resultType)
+
+ case ExprType(restpe) =>
+ this(x, restpe)
+
+ case tp: PolyType =>
+ variance = -variance
+ val y = foldOver(x, tp.paramBounds)
+ variance = -variance
+ this(y, tp.resultType)
+
+ case tp: RecType =>
+ this(x, tp.parent)
+
+ case SuperType(thistp, supertp) =>
+ this(this(x, thistp), supertp)
+
+ case tp @ ClassInfo(prefix, _, _, _, _) =>
+ this(x, prefix)
+
+ case tp @ HKApply(tycon, args) =>
+ def foldArgs(x: T, tparams: List[TypeParamInfo], args: List[Type]): T =
+ if (args.isEmpty) {
+ assert(tparams.isEmpty)
+ x
+ }
+ else {
+ val tparam = tparams.head
+ val saved = variance
+ variance *= tparam.paramVariance
+ val acc =
+ try this(x, args.head)
+ finally variance = saved
+ foldArgs(acc, tparams.tail, args.tail)
+ }
+ foldArgs(this(x, tycon), tp.typeParams, args)
+
+ case tp: AndOrType =>
+ this(this(x, tp.tp1), tp.tp2)
+
+ case tp: SkolemType =>
+ this(x, tp.info)
+
+ case AnnotatedType(underlying, annot) =>
+ this(applyToAnnot(x, annot), underlying)
+
+ case tp: TypeVar =>
+ this(x, tp.underlying)
+
+ case tp: WildcardType =>
+ this(x, tp.optBounds)
+
+ case tp: JavaArrayType =>
+ this(x, tp.elemType)
+
+ case tp: LazyRef =>
+ this(x, tp.ref)
+
+ case tp: ProtoType =>
+ tp.fold(x, this)
+
+ case _ => x
+ }
+
+ final def foldOver(x: T, ts: List[Type]): T = ts match {
+ case t :: ts1 => foldOver(apply(x, t), ts1)
+ case nil => x
+ }
+ }
+
+ abstract class TypeTraverser(implicit ctx: Context) extends TypeAccumulator[Unit] {
+ def traverse(tp: Type): Unit
+ def apply(x: Unit, tp: Type): Unit = traverse(tp)
+ protected def traverseChildren(tp: Type) = foldOver((), tp)
+ }
+
+ class ExistsAccumulator(p: Type => Boolean, forceLazy: Boolean = true)(implicit ctx: Context) extends TypeAccumulator[Boolean] {
+ override def stopAtStatic = false
+ def apply(x: Boolean, tp: Type) =
+ x || p(tp) || (forceLazy || !tp.isInstanceOf[LazyRef]) && foldOver(x, tp)
+ }
+
+ class ForeachAccumulator(p: Type => Unit, override val stopAtStatic: Boolean)(implicit ctx: Context) extends TypeAccumulator[Unit] {
+ def apply(x: Unit, tp: Type): Unit = foldOver(p(tp), tp)
+ }
+
+ class HasUnsafeNonAccumulator(implicit ctx: Context) extends TypeAccumulator[Boolean] {
+ def apply(x: Boolean, tp: Type) = x || tp.isUnsafeNonvariant || foldOver(x, tp)
+ }
+
+ class NamedPartsAccumulator(p: NamedType => Boolean, excludeLowerBounds: Boolean = false)
+ (implicit ctx: Context) extends TypeAccumulator[mutable.Set[NamedType]] {
+ override def stopAtStatic = false
+ def maybeAdd(x: mutable.Set[NamedType], tp: NamedType) = if (p(tp)) x += tp else x
+ val seen: mutable.Set[Type] = mutable.Set()
+ def apply(x: mutable.Set[NamedType], tp: Type): mutable.Set[NamedType] =
+ if (seen contains tp) x
+ else {
+ seen += tp
+ tp match {
+ case tp: TermRef =>
+ apply(foldOver(maybeAdd(x, tp), tp), tp.underlying)
+ case tp: TypeRef =>
+ foldOver(maybeAdd(x, tp), tp)
+ case TypeBounds(lo, hi) =>
+ if (!excludeLowerBounds) apply(x, lo)
+ apply(x, hi)
+ case tp: ThisType =>
+ apply(x, tp.tref)
+ case tp: ConstantType =>
+ apply(x, tp.underlying)
+ case tp: MethodParam =>
+ apply(x, tp.underlying)
+ case tp: PolyParam =>
+ apply(x, tp.underlying)
+ case _ =>
+ foldOver(x, tp)
+ }
+ }
+ }
+
+ // ----- Name Filters --------------------------------------------------
+
+ /** A name filter selects or discards a member name of a type `pre`.
+ * To enable efficient caching, name filters have to satisfy the
+ * following invariant: If `keep` is a name filter, and `pre` has
+ * class `C` as a base class, then
+ *
+ * keep(pre, name) implies keep(C.this, name)
+ */
+ abstract class NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean
+ }
+
+ /** A filter for names of abstract types of a given type */
+ object abstractTypeNameFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
+ name.isTypeName && {
+ val mbr = pre.nonPrivateMember(name)
+ (mbr.symbol is Deferred) && mbr.info.isInstanceOf[RealTypeBounds]
+ }
+ }
+
+ /** A filter for names of abstract types of a given type */
+ object nonClassTypeNameFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
+ name.isTypeName && {
+ val mbr = pre.member(name)
+ mbr.symbol.isType && !mbr.symbol.isClass
+ }
+ }
+
+ /** A filter for names of deferred term definitions of a given type */
+ object abstractTermNameFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
+ name.isTermName && pre.nonPrivateMember(name).hasAltWith(_.symbol is Deferred)
+ }
+
+ object typeNameFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = name.isTypeName
+ }
+
+ object fieldFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean =
+ name.isTermName && (pre member name).hasAltWith(!_.symbol.is(Method))
+ }
+
+ object takeAllFilter extends NameFilter {
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = true
+ }
+
+ object implicitFilter extends NameFilter {
+ /** A dummy filter method.
+ * Implicit filtering is handled specially in computeMemberNames, so
+ * no post-filtering is needed.
+ */
+ def apply(pre: Type, name: Name)(implicit ctx: Context): Boolean = true
+ }
+
+ // ----- Exceptions -------------------------------------------------------------
+
+ class TypeError(msg: String) extends Exception(msg)
+
+ class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])
+ extends TypeError(
+ s"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}")
+
+ class MissingType(pre: Type, name: Name)(implicit ctx: Context) extends TypeError(
+ i"""cannot resolve reference to type $pre.$name
+ |the classfile defining the type might be missing from the classpath${otherReason(pre)}""") {
+ if (ctx.debug) printStackTrace()
+ }
+
+ private def otherReason(pre: Type)(implicit ctx: Context): String = pre match {
+ case pre: ThisType if pre.givenSelfType.exists =>
+ i"\nor the self type of $pre might not contain all transitive dependencies"
+ case _ => ""
+ }
+
+ class CyclicReference private (val denot: SymDenotation)
+ extends TypeError(s"cyclic reference involving $denot") {
+ def show(implicit ctx: Context) = s"cyclic reference involving ${denot.show}"
+ }
+
+ object CyclicReference {
+ def apply(denot: SymDenotation)(implicit ctx: Context): CyclicReference = {
+ val ex = new CyclicReference(denot)
+ if (!(ctx.mode is Mode.CheckCyclic)) {
+ cyclicErrors.println(ex.getMessage)
+ for (elem <- ex.getStackTrace take 200)
+ cyclicErrors.println(elem.toString)
+ }
+ ex
+ }
+ }
+
+ class MergeError(msg: String, val tp1: Type, val tp2: Type) extends TypeError(msg)
+
+ // ----- Debug ---------------------------------------------------------
+
+ @sharable var debugTrace = false
+
+ val watchList = List[String](
+ ) map (_.toTypeName)
+
+ def isWatched(tp: Type) = tp match {
+ case TypeRef(_, name) => watchList contains name
+ case _ => false
+ }
+
+ // ----- Decorator implicits --------------------------------------------
+
+ implicit def decorateTypeApplications(tpe: Type): TypeApplications = new TypeApplications(tpe)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/Uniques.scala b/compiler/src/dotty/tools/dotc/core/Uniques.scala
new file mode 100644
index 000000000..cb9670c69
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/Uniques.scala
@@ -0,0 +1,128 @@
+package dotty.tools.dotc
+package core
+
+import Types._, Contexts._, util.Stats._, Hashable._, Names._
+import config.Config
+import util.HashSet
+
+/** Defines operation `unique` for hash-consing types.
+ * Also defines specialized hash sets for hash consing uniques of a specific type.
+ * All sets offer a `enterIfNew` method which checks whether a type
+ * with the given parts exists already and creates a new one if not.
+ */
+object Uniques {
+
+ private def recordCaching(tp: Type): Unit = recordCaching(tp.hash, tp.getClass)
+ private def recordCaching(h: Int, clazz: Class[_]): Unit =
+ if (h == NotCached) {
+ record("uncached-types")
+ record(s"uncached: $clazz")
+ } else {
+ record("cached-types")
+ record(s"cached: $clazz")
+ }
+
+ def unique[T <: Type](tp: T)(implicit ctx: Context): T = {
+ if (monitored) recordCaching(tp)
+ if (tp.hash == NotCached) tp
+ else if (monitored) {
+ val size = ctx.uniques.size
+ val result = ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T]
+ if (ctx.uniques.size > size) record(s"fresh unique ${tp.getClass}")
+ result
+ } else ctx.uniques.findEntryOrUpdate(tp).asInstanceOf[T]
+ } /* !!! DEBUG
+ ensuring (
+ result => tp.toString == result.toString || {
+ println(s"cache mismatch; tp = $tp, cached = $result")
+ false
+ }
+ )
+ */
+
+ final class NamedTypeUniques extends HashSet[NamedType](Config.initialUniquesCapacity) with Hashable {
+ override def hash(x: NamedType): Int = x.hash
+
+ private def findPrevious(h: Int, prefix: Type, name: Name): NamedType = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.prefix eq prefix) && (e.name eq name)) return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(prefix: Type, name: Name): NamedType = {
+ val h = doHash(name, prefix)
+ if (monitored) recordCaching(h, classOf[CachedTermRef])
+ def newType =
+ if (name.isTypeName) new CachedTypeRef(prefix, name.asTypeName, h)
+ else new CachedTermRef(prefix, name.asTermName, h)
+ if (h == NotCached) newType
+ else {
+ val r = findPrevious(h, prefix, name)
+ if (r ne null) r else addEntryAfterScan(newType)
+ }
+ }
+ }
+
+ final class TypeAliasUniques extends HashSet[TypeAlias](Config.initialUniquesCapacity) with Hashable {
+ override def hash(x: TypeAlias): Int = x.hash
+
+ private def findPrevious(h: Int, alias: Type, variance: Int): TypeAlias = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.alias eq alias) && (e.variance == variance)) return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(alias: Type, variance: Int): TypeAlias = {
+ val h = doHash(variance, alias)
+ if (monitored) recordCaching(h, classOf[TypeAlias])
+ def newAlias = new CachedTypeAlias(alias, variance, h)
+ if (h == NotCached) newAlias
+ else {
+ val r = findPrevious(h, alias, variance)
+ if (r ne null) r
+ else addEntryAfterScan(newAlias)
+ }
+ }
+ }
+
+ final class RefinedUniques extends HashSet[RefinedType](Config.initialUniquesCapacity) with Hashable {
+ override val hashSeed = classOf[CachedRefinedType].hashCode // some types start life as CachedRefinedTypes, need to have same hash seed
+ override def hash(x: RefinedType): Int = x.hash
+
+ private def findPrevious(h: Int, parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = {
+ var e = findEntryByHash(h)
+ while (e != null) {
+ if ((e.parent eq parent) && (e.refinedName eq refinedName) && (e.refinedInfo eq refinedInfo))
+ return e
+ e = nextEntryByHash(h)
+ }
+ e
+ }
+
+ def enterIfNew(parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = {
+ val h = doHash(refinedName, refinedInfo, parent)
+ def newType = new CachedRefinedType(parent, refinedName, refinedInfo, h)
+ if (monitored) recordCaching(h, classOf[CachedRefinedType])
+ if (h == NotCached) newType
+ else {
+ val r = findPrevious(h, parent, refinedName, refinedInfo)
+ if (r ne null) r else addEntryAfterScan(newType)
+ }
+ }
+
+ def enterIfNew(rt: RefinedType) = {
+ if (monitored) recordCaching(rt)
+ if (rt.hash == NotCached) rt
+ else {
+ val r = findPrevious(rt.hash, rt.parent, rt.refinedName, rt.refinedInfo)
+ if (r ne null) r else addEntryAfterScan(rt)
+ }
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala
new file mode 100644
index 000000000..cad3a4132
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala
@@ -0,0 +1,88 @@
+package dotty.tools
+package dotc
+package core
+package classfile
+
+import java.lang.Float.intBitsToFloat
+import java.lang.Double.longBitsToDouble
+
+import io.AbstractFile
+
+/**
+ * This class reads files byte per byte. Only used by ClassFileParser
+ *
+ * @author Philippe Altherr
+ * @version 1.0, 23/03/2004
+ */
+class AbstractFileReader(val file: AbstractFile) {
+
+ /** the buffer containing the file
+ */
+ val buf: Array[Byte] = file.toByteArray
+
+ /** the current input pointer
+ */
+ var bp: Int = 0
+
+ /** return byte at offset 'pos'
+ */
+ @throws(classOf[IndexOutOfBoundsException])
+ def byteAt(pos: Int): Byte = buf(pos)
+
+ /** read a byte
+ */
+ @throws(classOf[IndexOutOfBoundsException])
+ def nextByte: Byte = {
+ val b = buf(bp)
+ bp += 1
+ b
+ }
+
+ /** read some bytes
+ */
+ def nextBytes(len: Int): Array[Byte] = {
+ bp += len
+ buf.slice(bp - len, bp)
+ }
+
+ /** read a character
+ */
+ def nextChar: Char =
+ (((nextByte & 0xff) << 8) + (nextByte & 0xff)).toChar
+
+ /** read an integer
+ */
+ def nextInt: Int =
+ ((nextByte & 0xff) << 24) + ((nextByte & 0xff) << 16) +
+ ((nextByte & 0xff) << 8) + (nextByte & 0xff)
+
+
+ /** extract a character at position bp from buf
+ */
+ def getChar(mybp: Int): Char =
+ (((buf(mybp) & 0xff) << 8) + (buf(mybp + 1) & 0xff)).toChar
+
+ /** extract an integer at position bp from buf
+ */
+ def getInt(mybp: Int): Int =
+ ((buf(mybp ) & 0xff) << 24) + ((buf(mybp + 1) & 0xff) << 16) +
+ ((buf(mybp + 2) & 0xff) << 8) + (buf(mybp + 3) & 0xff)
+
+ /** extract a long integer at position bp from buf
+ */
+ def getLong(mybp: Int): Long =
+ (getInt(mybp).toLong << 32) + (getInt(mybp + 4) & 0xffffffffL)
+
+ /** extract a float at position bp from buf
+ */
+ def getFloat(mybp: Int): Float = intBitsToFloat(getInt(mybp))
+
+ /** extract a double at position bp from buf
+ */
+ def getDouble(mybp: Int): Double = longBitsToDouble(getLong(mybp))
+
+ /** skip next 'n' bytes
+ */
+ def skip(n: Int): Unit = { bp += n }
+
+}
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala b/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala
new file mode 100644
index 000000000..badd9e560
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala
@@ -0,0 +1,221 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package dotty.tools.dotc.core.classfile
+
+object ByteCodecs {
+
+ def avoidZero(src: Array[Byte]): Array[Byte] = {
+ var i = 0
+ val srclen = src.length
+ var count = 0
+ while (i < srclen) {
+ if (src(i) == 0x7f) count += 1
+ i += 1
+ }
+ val dst = new Array[Byte](srclen + count)
+ i = 0
+ var j = 0
+ while (i < srclen) {
+ val in = src(i)
+ if (in == 0x7f) {
+ dst(j) = (0xc0).toByte
+ dst(j + 1) = (0x80).toByte
+ j += 2
+ } else {
+ dst(j) = (in + 1).toByte
+ j += 1
+ }
+ i += 1
+ }
+ dst
+ }
+
+ def regenerateZero(src: Array[Byte]): Int = {
+ var i = 0
+ val srclen = src.length
+ var j = 0
+ while (i < srclen) {
+ val in: Int = src(i) & 0xff
+ if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) {
+ src(j) = 0x7f
+ i += 2
+ } else if (in == 0) {
+ src(j) = 0x7f
+ i += 1
+ } else {
+ src(j) = (in - 1).toByte
+ i += 1
+ }
+ j += 1
+ }
+ j
+ }
+
+ def encode8to7(src: Array[Byte]): Array[Byte] = {
+ val srclen = src.length
+ val dstlen = (srclen * 8 + 6) / 7
+ val dst = new Array[Byte](dstlen)
+ var i = 0
+ var j = 0
+ while (i + 6 < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte
+ var out: Int = in >>> 7
+ in = src(i + 1) & 0xff
+ dst(j + 1) = (out | (in << 1) & 0x7f).toByte
+ out = in >>> 6
+ in = src(i + 2) & 0xff
+ dst(j + 2) = (out | (in << 2) & 0x7f).toByte
+ out = in >>> 5
+ in = src(i + 3) & 0xff
+ dst(j + 3) = (out | (in << 3) & 0x7f).toByte
+ out = in >>> 4
+ in = src(i + 4) & 0xff
+ dst(j + 4) = (out | (in << 4) & 0x7f).toByte
+ out = in >>> 3
+ in = src(i + 5) & 0xff
+ dst(j + 5) = (out | (in << 5) & 0x7f).toByte
+ out = in >>> 2
+ in = src(i + 6) & 0xff
+ dst(j + 6) = (out | (in << 6) & 0x7f).toByte
+ out = in >>> 1
+ dst(j + 7) = out.toByte
+ i += 7
+ j += 8
+ }
+ if (i < srclen) {
+ var in: Int = src(i) & 0xff
+ dst(j) = (in & 0x7f).toByte; j += 1
+ var out: Int = in >>> 7
+ if (i + 1 < srclen) {
+ in = src(i + 1) & 0xff
+ dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1
+ out = in >>> 6
+ if (i + 2 < srclen) {
+ in = src(i + 2) & 0xff
+ dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1
+ out = in >>> 5
+ if (i + 3 < srclen) {
+ in = src(i + 3) & 0xff
+ dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1
+ out = in >>> 4
+ if (i + 4 < srclen) {
+ in = src(i + 4) & 0xff
+ dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1
+ out = in >>> 3
+ if (i + 5 < srclen) {
+ in = src(i + 5) & 0xff
+ dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1
+ out = in >>> 2
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) dst(j) = out.toByte
+ }
+ dst
+ }
+
+ def decode7to8(src: Array[Byte], srclen: Int): Int = {
+ var i = 0
+ var j = 0
+ val dstlen = (srclen * 7 + 7) / 8
+ while (i + 7 < srclen) {
+ var out: Int = src(i)
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte
+ out = in >>> 1
+ in = src(i + 2)
+ src(j + 1) = (out | (in & 0x03) << 6).toByte
+ out = in >>> 2
+ in = src(i + 3)
+ src(j + 2) = (out | (in & 0x07) << 5).toByte
+ out = in >>> 3
+ in = src(i + 4)
+ src(j + 3) = (out | (in & 0x0f) << 4).toByte
+ out = in >>> 4
+ in = src(i + 5)
+ src(j + 4) = (out | (in & 0x1f) << 3).toByte
+ out = in >>> 5
+ in = src(i + 6)
+ src(j + 5) = (out | (in & 0x3f) << 2).toByte
+ out = in >>> 6
+ in = src(i + 7)
+ src(j + 6) = (out | in << 1).toByte
+ i += 8
+ j += 7
+ }
+ if (i < srclen) {
+ var out: Int = src(i)
+ if (i + 1 < srclen) {
+ var in: Byte = src(i + 1)
+ src(j) = (out | (in & 0x01) << 7).toByte; j += 1
+ out = in >>> 1
+ if (i + 2 < srclen) {
+ in = src(i + 2)
+ src(j) = (out | (in & 0x03) << 6).toByte; j += 1
+ out = in >>> 2
+ if (i + 3 < srclen) {
+ in = src(i + 3)
+ src(j) = (out | (in & 0x07) << 5).toByte; j += 1
+ out = in >>> 3
+ if (i + 4 < srclen) {
+ in = src(i + 4)
+ src(j) = (out | (in & 0x0f) << 4).toByte; j += 1
+ out = in >>> 4
+ if (i + 5 < srclen) {
+ in = src(i + 5)
+ src(j) = (out | (in & 0x1f) << 3).toByte; j += 1
+ out = in >>> 5
+ if (i + 6 < srclen) {
+ in = src(i + 6)
+ src(j) = (out | (in & 0x3f) << 2).toByte; j += 1
+ out = in >>> 6
+ }
+ }
+ }
+ }
+ }
+ }
+ if (j < dstlen) src(j) = out.toByte
+ }
+ dstlen
+ }
+
+ def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs))
+
+ /**
+ * Destructively decodes array xs and returns the length of the decoded array.
+ *
+ * Sometimes returns (length + 1) of the decoded array. Example:
+ *
+ * scala> val enc = reflect.generic.ByteCodecs.encode(Array(1,2,3))
+ * enc: Array[Byte] = Array(2, 5, 13, 1)
+ *
+ * scala> reflect.generic.ByteCodecs.decode(enc)
+ * res43: Int = 4
+ *
+ * scala> enc
+ * res44: Array[Byte] = Array(1, 2, 3, 0)
+ *
+ * However, this does not always happen.
+ */
+ def decode(xs: Array[Byte]): Int = {
+ val len = regenerateZero(xs)
+ decode7to8(xs, len)
+ }
+}
+
+
+
+
+
+
+
+
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
new file mode 100644
index 000000000..dd29fa49d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala
@@ -0,0 +1,378 @@
+package dotty.tools.dotc
+package core
+package classfile
+
+import scala.annotation.switch
+
+object ClassfileConstants {
+
+ final val JAVA_MAGIC = 0xCAFEBABE
+ final val JAVA_MAJOR_VERSION = 45
+ final val JAVA_MINOR_VERSION = 3
+
+ /** (see http://java.sun.com/docs/books/jvms/second_edition/jvms-clarify.html)
+ *
+ * If the `ACC_INTERFACE` flag is set, the `ACC_ABSTRACT` flag must also
+ * be set (ch. 2.13.1).
+ *
+ * A class file cannot have both its `ACC_FINAL` and `ACC_ABSTRACT` flags
+ * set (ch. 2.8.2).
+ *
+ * A field may have at most one of its `ACC_PRIVATE`, `ACC_PROTECTED`,
+ * `ACC_PUBLIC` flags set (ch. 2.7.4).
+ *
+ * A field may not have both its `ACC_FINAL` and `ACC_VOLATILE` flags set
+ * (ch. 2.9.1).
+ *
+ * If a method has its `ACC_ABSTRACT` flag set it must not have any of its
+ * `ACC_FINAL`, `ACC_NATIVE`, `ACC_PRIVATE`, `ACC_STATIC`, `ACC_STRICT`,
+ * or `ACC_SYNCHRONIZED` flags set (ch. 2.13.3.2).
+ *
+ * All interface methods must have their `ACC_ABSTRACT` and
+ * `ACC_PUBLIC` flags set.
+ *
+ * Note for future reference: see this thread on ACC_SUPER and
+ * how its enforcement differs on the android vm.
+ * https://groups.google.com/forum/?hl=en#!topic/jvm-languages/jVhzvq8-ZIk
+ *
+ */ // Class Field Method
+ final val JAVA_ACC_PUBLIC = 0x0001 // X X X
+ final val JAVA_ACC_PRIVATE = 0x0002 // X X
+ final val JAVA_ACC_PROTECTED = 0x0004 // X X
+ final val JAVA_ACC_STATIC = 0x0008 // X X
+ final val JAVA_ACC_FINAL = 0x0010 // X X X
+ final val JAVA_ACC_SUPER = 0x0020 // X
+ final val JAVA_ACC_SYNCHRONIZED = 0x0020 // X
+ final val JAVA_ACC_VOLATILE = 0x0040 // X
+ final val JAVA_ACC_BRIDGE = 0x0040 // X
+ final val JAVA_ACC_TRANSIENT = 0x0080 // X
+ final val JAVA_ACC_VARARGS = 0x0080 // X
+ final val JAVA_ACC_NATIVE = 0x0100 // X
+ final val JAVA_ACC_INTERFACE = 0x0200 // X
+ final val JAVA_ACC_ABSTRACT = 0x0400 // X X
+ final val JAVA_ACC_STRICT = 0x0800 // X
+ final val JAVA_ACC_SYNTHETIC = 0x1000 // X X X
+ final val JAVA_ACC_ANNOTATION = 0x2000 // X
+ final val JAVA_ACC_ENUM = 0x4000 // X X
+
+ // tags describing the type of a literal in the constant pool
+ final val CONSTANT_UTF8 = 1
+ final val CONSTANT_UNICODE = 2
+ final val CONSTANT_INTEGER = 3
+ final val CONSTANT_FLOAT = 4
+ final val CONSTANT_LONG = 5
+ final val CONSTANT_DOUBLE = 6
+ final val CONSTANT_CLASS = 7
+ final val CONSTANT_STRING = 8
+ final val CONSTANT_FIELDREF = 9
+ final val CONSTANT_METHODREF = 10
+ final val CONSTANT_INTFMETHODREF = 11
+ final val CONSTANT_NAMEANDTYPE = 12
+
+ final val CONSTANT_METHODHANDLE = 15
+ final val CONSTANT_METHODTYPE = 16
+ final val CONSTANT_INVOKEDYNAMIC = 18
+
+ // tags describing the type of a literal in attribute values
+ final val BYTE_TAG = 'B'
+ final val CHAR_TAG = 'C'
+ final val DOUBLE_TAG = 'D'
+ final val FLOAT_TAG = 'F'
+ final val INT_TAG = 'I'
+ final val LONG_TAG = 'J'
+ final val SHORT_TAG = 'S'
+ final val BOOL_TAG = 'Z'
+ final val STRING_TAG = 's'
+ final val ENUM_TAG = 'e'
+ final val CLASS_TAG = 'c'
+ final val ARRAY_TAG = '['
+ final val VOID_TAG = 'V'
+ final val TVAR_TAG = 'T'
+ final val OBJECT_TAG = 'L'
+ final val ANNOTATION_TAG = '@'
+ final val SCALA_NOTHING = "scala.runtime.Nothing$"
+ final val SCALA_NULL = "scala.runtime.Null$"
+
+
+ // tags describing the type of newarray
+ final val T_BOOLEAN = 4
+ final val T_CHAR = 5
+ final val T_FLOAT = 6
+ final val T_DOUBLE = 7
+ final val T_BYTE = 8
+ final val T_SHORT = 9
+ final val T_INT = 10
+ final val T_LONG = 11
+
+ // JVM mnemonics
+ final val nop = 0x00
+ final val aconst_null = 0x01
+ final val iconst_m1 = 0x02
+
+ final val iconst_0 = 0x03
+ final val iconst_1 = 0x04
+ final val iconst_2 = 0x05
+ final val iconst_3 = 0x06
+ final val iconst_4 = 0x07
+ final val iconst_5 = 0x08
+
+ final val lconst_0 = 0x09
+ final val lconst_1 = 0x0a
+ final val fconst_0 = 0x0b
+ final val fconst_1 = 0x0c
+ final val fconst_2 = 0x0d
+ final val dconst_0 = 0x0e
+ final val dconst_1 = 0x0f
+
+ final val bipush = 0x10
+ final val sipush = 0x11
+ final val ldc = 0x12
+ final val ldc_w = 0x13
+ final val ldc2_w = 0x14
+
+ final val iload = 0x15
+ final val lload = 0x16
+ final val fload = 0x17
+ final val dload = 0x18
+ final val aload = 0x19
+
+ final val iload_0 = 0x1a
+ final val iload_1 = 0x1b
+ final val iload_2 = 0x1c
+ final val iload_3 = 0x1d
+ final val lload_0 = 0x1e
+ final val lload_1 = 0x1f
+ final val lload_2 = 0x20
+ final val lload_3 = 0x21
+ final val fload_0 = 0x22
+ final val fload_1 = 0x23
+ final val fload_2 = 0x24
+ final val fload_3 = 0x25
+ final val dload_0 = 0x26
+ final val dload_1 = 0x27
+ final val dload_2 = 0x28
+ final val dload_3 = 0x29
+ final val aload_0 = 0x2a
+ final val aload_1 = 0x2b
+ final val aload_2 = 0x2c
+ final val aload_3 = 0x2d
+ final val iaload = 0x2e
+ final val laload = 0x2f
+ final val faload = 0x30
+ final val daload = 0x31
+ final val aaload = 0x32
+ final val baload = 0x33
+ final val caload = 0x34
+ final val saload = 0x35
+
+ final val istore = 0x36
+ final val lstore = 0x37
+ final val fstore = 0x38
+ final val dstore = 0x39
+ final val astore = 0x3a
+ final val istore_0 = 0x3b
+ final val istore_1 = 0x3c
+ final val istore_2 = 0x3d
+ final val istore_3 = 0x3e
+ final val lstore_0 = 0x3f
+ final val lstore_1 = 0x40
+ final val lstore_2 = 0x41
+ final val lstore_3 = 0x42
+ final val fstore_0 = 0x43
+ final val fstore_1 = 0x44
+ final val fstore_2 = 0x45
+ final val fstore_3 = 0x46
+ final val dstore_0 = 0x47
+ final val dstore_1 = 0x48
+ final val dstore_2 = 0x49
+ final val dstore_3 = 0x4a
+ final val astore_0 = 0x4b
+ final val astore_1 = 0x4c
+ final val astore_2 = 0x4d
+ final val astore_3 = 0x4e
+ final val iastore = 0x4f
+ final val lastore = 0x50
+ final val fastore = 0x51
+ final val dastore = 0x52
+ final val aastore = 0x53
+ final val bastore = 0x54
+ final val castore = 0x55
+ final val sastore = 0x56
+
+ final val pop = 0x57
+ final val pop2 = 0x58
+ final val dup = 0x59
+ final val dup_x1 = 0x5a
+ final val dup_x2 = 0x5b
+ final val dup2 = 0x5c
+ final val dup2_x1 = 0x5d
+ final val dup2_x2 = 0x5e
+ final val swap = 0x5f
+
+ final val iadd = 0x60
+ final val ladd = 0x61
+ final val fadd = 0x62
+ final val dadd = 0x63
+ final val isub = 0x64
+ final val lsub = 0x65
+ final val fsub = 0x66
+ final val dsub = 0x67
+ final val imul = 0x68
+ final val lmul = 0x69
+ final val fmul = 0x6a
+ final val dmul = 0x6b
+ final val idiv = 0x6c
+ final val ldiv = 0x6d
+ final val fdiv = 0x6e
+ final val ddiv = 0x6f
+ final val irem = 0x70
+ final val lrem = 0x71
+ final val frem = 0x72
+ final val drem = 0x73
+
+ final val ineg = 0x74
+ final val lneg = 0x75
+ final val fneg = 0x76
+ final val dneg = 0x77
+
+ final val ishl = 0x78
+ final val lshl = 0x79
+ final val ishr = 0x7a
+ final val lshr = 0x7b
+ final val iushr = 0x7c
+ final val lushr = 0x7d
+ final val iand = 0x7e
+ final val land = 0x7f
+ final val ior = 0x80
+ final val lor = 0x81
+ final val ixor = 0x82
+ final val lxor = 0x83
+ final val iinc = 0x84
+
+ final val i2l = 0x85
+ final val i2f = 0x86
+ final val i2d = 0x87
+ final val l2i = 0x88
+ final val l2f = 0x89
+ final val l2d = 0x8a
+ final val f2i = 0x8b
+ final val f2l = 0x8c
+ final val f2d = 0x8d
+ final val d2i = 0x8e
+ final val d2l = 0x8f
+ final val d2f = 0x90
+ final val i2b = 0x91
+ final val i2c = 0x92
+ final val i2s = 0x93
+
+ final val lcmp = 0x94
+ final val fcmpl = 0x95
+ final val fcmpg = 0x96
+ final val dcmpl = 0x97
+ final val dcmpg = 0x98
+
+ final val ifeq = 0x99
+ final val ifne = 0x9a
+ final val iflt = 0x9b
+ final val ifge = 0x9c
+ final val ifgt = 0x9d
+ final val ifle = 0x9e
+ final val if_icmpeq = 0x9f
+ final val if_icmpne = 0xa0
+ final val if_icmplt = 0xa1
+ final val if_icmpge = 0xa2
+ final val if_icmpgt = 0xa3
+ final val if_icmple = 0xa4
+ final val if_acmpeq = 0xa5
+ final val if_acmpne = 0xa6
+ final val goto = 0xa7
+ final val jsr = 0xa8
+ final val ret = 0xa9
+ final val tableswitch = 0xaa
+ final val lookupswitch = 0xab
+ final val ireturn = 0xac
+ final val lreturn = 0xad
+ final val freturn = 0xae
+ final val dreturn = 0xaf
+ final val areturn = 0xb0
+ final val return_ = 0xb1
+
+ final val getstatic = 0xb2
+ final val putstatic = 0xb3
+ final val getfield = 0xb4
+ final val putfield = 0xb5
+
+ final val invokevirtual = 0xb6
+ final val invokespecial = 0xb7
+ final val invokestatic = 0xb8
+ final val invokeinterface = 0xb9
+ final val xxxunusedxxxx = 0xba
+
+ final val new_ = 0xbb
+ final val newarray = 0xbc
+ final val anewarray = 0xbd
+ final val arraylength = 0xbe
+ final val athrow = 0xbf
+ final val checkcast = 0xc0
+ final val instanceof = 0xc1
+ final val monitorenter = 0xc2
+ final val monitorexit = 0xc3
+ final val wide = 0xc4
+ final val multianewarray = 0xc5
+ final val ifnull = 0xc6
+ final val ifnonnull = 0xc7
+ final val goto_w = 0xc8
+ final val jsr_w = 0xc9
+
+ // reserved opcodes
+ final val breakpoint = 0xca
+ final val impdep1 = 0xfe
+ final val impdep2 = 0xff
+
+ import Flags._
+ abstract class FlagTranslation {
+
+ protected def baseFlags(jflags: Int) = EmptyFlags
+ protected def isClass: Boolean = false
+
+ private def translateFlag(jflag: Int): FlagSet = (jflag: @switch) match {
+ case JAVA_ACC_PRIVATE => Private
+ case JAVA_ACC_PROTECTED => Protected
+ case JAVA_ACC_FINAL => Final
+ case JAVA_ACC_SYNTHETIC => Synthetic
+ case JAVA_ACC_STATIC => JavaStatic
+ case JAVA_ACC_ABSTRACT => if (isClass) Abstract else Deferred
+ case JAVA_ACC_INTERFACE => PureInterfaceCreationFlags | JavaDefined
+ case _ => EmptyFlags
+ }
+
+ private def addFlag(base: FlagSet, jflag: Int): FlagSet =
+ if (jflag == 0) base else base | translateFlag(jflag)
+
+ private def translateFlags(jflags: Int, baseFlags: FlagSet): FlagSet = {
+ val nflags =
+ if ((jflags & JAVA_ACC_ANNOTATION) == 0) jflags
+ else jflags & ~(JAVA_ACC_ABSTRACT | JAVA_ACC_INTERFACE) // annotations are neither abstract nor interfaces
+ var res: FlagSet = baseFlags | JavaDefined
+ res = addFlag(res, nflags & JAVA_ACC_PRIVATE)
+ res = addFlag(res, nflags & JAVA_ACC_PROTECTED)
+ res = addFlag(res, nflags & JAVA_ACC_FINAL)
+ res = addFlag(res, nflags & JAVA_ACC_SYNTHETIC)
+ res = addFlag(res, nflags & JAVA_ACC_STATIC)
+ res = addFlag(res, nflags & JAVA_ACC_ABSTRACT)
+ res = addFlag(res, nflags & JAVA_ACC_INTERFACE)
+ res
+ }
+
+ def flags(jflags: Int): FlagSet = translateFlags(jflags, baseFlags(jflags))
+ }
+ val classTranslation = new FlagTranslation {
+ override def isClass = true
+ }
+ val fieldTranslation = new FlagTranslation {
+ override def baseFlags(jflags: Int) = if ((jflags & JAVA_ACC_FINAL) == 0) Mutable else EmptyFlags
+ }
+ val methodTranslation = new FlagTranslation {
+ override def baseFlags(jflags: Int) = if ((jflags & JAVA_ACC_BRIDGE) != 0) Bridge else EmptyFlags
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
new file mode 100644
index 000000000..97a82e80d
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
@@ -0,0 +1,1100 @@
+package dotty.tools
+package dotc
+package core
+package classfile
+
+import Contexts._, Symbols._, Types._, Names._, StdNames._, NameOps._, Scopes._, Decorators._
+import SymDenotations._, unpickleScala2.Scala2Unpickler._, Constants._, Annotations._, util.Positions._
+import ast.tpd._
+import java.io.{ File, IOException }
+import java.lang.Integer.toHexString
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
+import scala.annotation.switch
+import typer.Checking.checkNonCyclic
+import io.AbstractFile
+import scala.util.control.NonFatal
+
+object ClassfileParser {
+ /** Marker trait for unpicklers that can be embedded in classfiles. */
+ trait Embedded
+}
+
+class ClassfileParser(
+ classfile: AbstractFile,
+ classRoot: ClassDenotation,
+ moduleRoot: ClassDenotation)(ictx: Context) {
+
+ import ClassfileConstants._
+ import ClassfileParser._
+
+ protected val in = new AbstractFileReader(classfile)
+
+ protected val staticModule: Symbol = moduleRoot.sourceModule(ictx)
+
+ protected val instanceScope: MutableScope = newScope // the scope of all instance definitions
+ protected val staticScope: MutableScope = newScope // the scope of all static definitions
+ protected var pool: ConstantPool = _ // the classfile's constant pool
+
+ protected var currentClassName: Name = _ // JVM name of the current class
+ protected var classTParams = Map[Name,Symbol]()
+
+ classRoot.info = (new NoCompleter).withDecls(instanceScope)
+ moduleRoot.info = (new NoCompleter).withDecls(staticScope).withSourceModule(_ => staticModule)
+
+ private def currentIsTopLevel(implicit ctx: Context) = classRoot.owner is Flags.PackageClass
+
+ private def mismatchError(c: Symbol) =
+ throw new IOException(s"class file '${in.file}' has location not matching its contents: contains $c")
+
+ def run()(implicit ctx: Context): Option[Embedded] = try {
+ ctx.debuglog("[class] >> " + classRoot.fullName)
+ parseHeader
+ this.pool = new ConstantPool
+ parseClass()
+ } catch {
+ case e: RuntimeException =>
+ if (ctx.debug) e.printStackTrace()
+ throw new IOException(
+ i"""class file $classfile is broken, reading aborted with ${e.getClass}
+ |${Option(e.getMessage).getOrElse("")}""")
+ }
+
+ private def parseHeader(): Unit = {
+ val magic = in.nextInt
+ if (magic != JAVA_MAGIC)
+ throw new IOException(s"class file '${in.file}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}")
+ val minorVersion = in.nextChar.toInt
+ val majorVersion = in.nextChar.toInt
+ if ((majorVersion < JAVA_MAJOR_VERSION) ||
+ ((majorVersion == JAVA_MAJOR_VERSION) &&
+ (minorVersion < JAVA_MINOR_VERSION)))
+ throw new IOException(
+ s"class file '${in.file}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
+ }
+
+ /** Return the class symbol of the given name. */
+ def classNameToSymbol(name: Name)(implicit ctx: Context): Symbol = innerClasses.get(name) match {
+ case Some(entry) => innerClasses.classSymbol(entry.externalName)
+ case None => ctx.requiredClass(name)
+ }
+
+ var sawPrivateConstructor = false
+
+ def parseClass()(implicit ctx: Context): Option[Embedded] = {
+ val jflags = in.nextChar
+ val isAnnotation = hasAnnotation(jflags)
+ val sflags = classTranslation.flags(jflags)
+ val isEnum = (jflags & JAVA_ACC_ENUM) != 0
+ val nameIdx = in.nextChar
+ currentClassName = pool.getClassName(nameIdx)
+
+ if (currentIsTopLevel) {
+ val c = pool.getClassSymbol(nameIdx)
+ if (c != classRoot.symbol) mismatchError(c)
+ }
+
+ addEnclosingTParams()
+
+ /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled.
+ * Updates the read pointer of 'in'. */
+ def parseParents: List[Type] = {
+ val superType = if (isAnnotation) { in.nextChar; defn.AnnotationType }
+ else pool.getSuperClass(in.nextChar).typeRef
+ val ifaceCount = in.nextChar
+ var ifaces = for (i <- (0 until ifaceCount).toList) yield pool.getSuperClass(in.nextChar).typeRef
+ // Dotty deviation: was
+ // var ifaces = for (i <- List.range(0 until ifaceCount)) ...
+ // This does not typecheck because the type parameter of List is now lower-bounded by Int | Char.
+ // Consequently, no best implicit for the "Integral" evidence parameter of "range"
+ // is found. If we treat constant subtyping specially, we might be able
+ // to do something there. But in any case, the until should be more efficient.
+
+ if (isAnnotation) ifaces = defn.ClassfileAnnotationType :: ifaces
+ superType :: ifaces
+ }
+
+ val result = unpickleOrParseInnerClasses()
+ if (!result.isDefined) {
+ var classInfo: Type = TempClassInfoType(parseParents, instanceScope, classRoot.symbol)
+ // might be reassigned by later parseAttributes
+ val staticInfo = TempClassInfoType(List(), staticScope, moduleRoot.symbol)
+
+ enterOwnInnerClasses
+
+ classRoot.setFlag(sflags)
+ moduleRoot.setFlag(Flags.JavaDefined | Flags.ModuleClassCreationFlags)
+ setPrivateWithin(classRoot, jflags)
+ setPrivateWithin(moduleRoot, jflags)
+ setPrivateWithin(moduleRoot.sourceModule, jflags)
+
+ for (i <- 0 until in.nextChar) parseMember(method = false)
+ for (i <- 0 until in.nextChar) parseMember(method = true)
+ classInfo = parseAttributes(classRoot.symbol, classInfo)
+ if (isAnnotation) addAnnotationConstructor(classInfo)
+
+ val companionClassMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_CLASS_METHOD, classRoot, moduleRoot)
+ if (companionClassMethod.exists) companionClassMethod.entered
+ val companionModuleMethod = ctx.synthesizeCompanionMethod(nme.COMPANION_MODULE_METHOD, moduleRoot, classRoot)
+ if (companionModuleMethod.exists) companionModuleMethod.entered
+
+ setClassInfo(classRoot, classInfo)
+ setClassInfo(moduleRoot, staticInfo)
+ }
+
+ // eager load java enum definitions for exhaustivity check of pattern match
+ if (isEnum) {
+ instanceScope.toList.map(_.ensureCompleted())
+ staticScope.toList.map(_.ensureCompleted())
+ classRoot.setFlag(Flags.Enum)
+ moduleRoot.setFlag(Flags.Enum)
+ }
+
+ result
+ }
+
+ /** Add type parameters of enclosing classes */
+ def addEnclosingTParams()(implicit ctx: Context): Unit = {
+ var sym = classRoot.owner
+ while (sym.isClass && !(sym is Flags.ModuleClass)) {
+ for (tparam <- sym.typeParams) {
+ classTParams = classTParams.updated(tparam.name.unexpandedName, tparam)
+ }
+ sym = sym.owner
+ }
+ }
+
+ def parseMember(method: Boolean)(implicit ctx: Context): Unit = {
+ val start = indexCoord(in.bp)
+ val jflags = in.nextChar
+ val sflags =
+ if (method) Flags.Method | methodTranslation.flags(jflags)
+ else fieldTranslation.flags(jflags)
+ val name = pool.getName(in.nextChar)
+ if (!(sflags is Flags.Private) || name == nme.CONSTRUCTOR || ctx.settings.optimise.value) {
+ val member = ctx.newSymbol(
+ getOwner(jflags), name, sflags, memberCompleter, coord = start)
+ getScope(jflags).enter(member)
+ }
+ // skip rest of member for now
+ in.nextChar // info
+ skipAttributes
+ }
+
+ val memberCompleter = new LazyType {
+
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val oldbp = in.bp
+ try {
+ in.bp = denot.symbol.coord.toIndex
+ val sym = denot.symbol
+ val jflags = in.nextChar
+ val isEnum = (jflags & JAVA_ACC_ENUM) != 0
+ val name = pool.getName(in.nextChar)
+ val isConstructor = name eq nme.CONSTRUCTOR
+
+ /** Strip leading outer param from constructor.
+ * Todo: Also strip trailing access tag for private inner constructors?
+ */
+ def stripOuterParamFromConstructor() = innerClasses.get(currentClassName) match {
+ case Some(entry) if !isStatic(entry.jflags) =>
+ val mt @ MethodType(paramnames, paramtypes) = denot.info
+ denot.info = mt.derivedMethodType(paramnames.tail, paramtypes.tail, mt.resultType)
+ case _ =>
+ }
+
+ /** Make return type of constructor be the enclosing class type,
+ * and make constructor type polymorphic in the type parameters of the class
+ */
+ def normalizeConstructorInfo() = {
+ val mt @ MethodType(paramnames, paramtypes) = denot.info
+ val rt = classRoot.typeRef appliedTo (classRoot.typeParams map (_.typeRef))
+ denot.info = mt.derivedMethodType(paramnames, paramtypes, rt)
+ addConstructorTypeParams(denot)
+ }
+
+ denot.info = pool.getType(in.nextChar)
+ if (isEnum) denot.info = ConstantType(Constant(sym))
+ if (isConstructor) stripOuterParamFromConstructor()
+ setPrivateWithin(denot, jflags)
+ denot.info = translateTempPoly(parseAttributes(sym, denot.info))
+ if (isConstructor) normalizeConstructorInfo()
+
+ if ((denot is Flags.Method) && (jflags & JAVA_ACC_VARARGS) != 0)
+ denot.info = arrayToRepeated(denot.info)
+
+ // seal java enums
+ if (isEnum) {
+ val enumClass = sym.owner.linkedClass
+ if (!(enumClass is Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed)
+ enumClass.addAnnotation(Annotation.makeChild(sym))
+ }
+ } finally {
+ in.bp = oldbp
+ }
+ }
+ }
+
+ /** Map direct references to Object to references to Any */
+ final def objToAny(tp: Type)(implicit ctx: Context) =
+ if (tp.isDirectRef(defn.ObjectClass) && !ctx.phase.erasedTypes) defn.AnyType else tp
+
+ private def sigToType(sig: TermName, owner: Symbol = null)(implicit ctx: Context): Type = {
+ var index = 0
+ val end = sig.length
+ def accept(ch: Char): Unit = {
+ assert(sig(index) == ch, (sig(index), ch))
+ index += 1
+ }
+ def subName(isDelimiter: Char => Boolean): TermName = {
+ val start = index
+ while (!isDelimiter(sig(index))) { index += 1 }
+ sig.slice(start, index)
+ }
+ // Warning: sigToType contains nested completers which might be forced in a later run!
+ // So local methods need their own ctx parameters.
+ def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean)(implicit ctx: Context): Type = {
+ val tag = sig(index); index += 1
+ (tag: @switch) match {
+ case BYTE_TAG => defn.ByteType
+ case CHAR_TAG => defn.CharType
+ case DOUBLE_TAG => defn.DoubleType
+ case FLOAT_TAG => defn.FloatType
+ case INT_TAG => defn.IntType
+ case LONG_TAG => defn.LongType
+ case SHORT_TAG => defn.ShortType
+ case VOID_TAG => defn.UnitType
+ case BOOL_TAG => defn.BooleanType
+ case 'L' =>
+ def processInner(tp: Type): Type = tp match {
+ case tp: TypeRef if !(tp.symbol.owner is Flags.ModuleClass) =>
+ TypeRef(processInner(tp.prefix.widen), tp.name)
+ case _ =>
+ tp
+ }
+ def processClassType(tp: Type): Type = tp match {
+ case tp: TypeRef =>
+ if (sig(index) == '<') {
+ accept('<')
+ var tp1: Type = tp
+ var formals = tp.typeParamSymbols
+ while (sig(index) != '>') {
+ sig(index) match {
+ case variance @ ('+' | '-' | '*') =>
+ index += 1
+ val bounds = variance match {
+ case '+' => objToAny(TypeBounds.upper(sig2type(tparams, skiptvs)))
+ case '-' =>
+ val tp = sig2type(tparams, skiptvs)
+ // sig2type seems to return AnyClass regardless of the situation:
+ // we don't want Any as a LOWER bound.
+ if (tp.isDirectRef(defn.AnyClass)) TypeBounds.empty
+ else TypeBounds.lower(tp)
+ case '*' => TypeBounds.empty
+ }
+ tp1 = RefinedType(tp1, formals.head.name, bounds)
+ case _ =>
+ tp1 = RefinedType(tp1, formals.head.name, TypeAlias(sig2type(tparams, skiptvs)))
+ }
+ formals = formals.tail
+ }
+ accept('>')
+ tp1
+ } else tp
+ case tp =>
+ assert(sig(index) != '<', tp)
+ tp
+ }
+
+ val classSym = classNameToSymbol(subName(c => c == ';' || c == '<'))
+ var tpe = processClassType(processInner(classSym.typeRef))
+ while (sig(index) == '.') {
+ accept('.')
+ val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName
+ val clazz = tpe.member(name).symbol
+ tpe = processClassType(processInner(clazz.typeRef))
+ }
+ accept(';')
+ tpe
+ case ARRAY_TAG =>
+ while ('0' <= sig(index) && sig(index) <= '9') index += 1
+ var elemtp = sig2type(tparams, skiptvs)
+ // make unbounded Array[T] where T is a type variable into Ar ray[T with Object]
+ // (this is necessary because such arrays have a representation which is incompatible
+ // with arrays of primitive types.
+ // NOTE that the comparison to Object only works for abstract types bounded by classes that are strict subclasses of Object
+ // if the bound is exactly Object, it will have been converted to Any, and the comparison will fail
+ // see also RestrictJavaArraysMap (when compiling java sources directly)
+ if (elemtp.typeSymbol.isAbstractType && !(elemtp.derivesFrom(defn.ObjectClass))) {
+ elemtp = AndType(elemtp, defn.ObjectType)
+ }
+ defn.ArrayOf(elemtp)
+ case '(' =>
+ // we need a method symbol. given in line 486 by calling getType(methodSym, ..)
+ val paramtypes = new ListBuffer[Type]()
+ var paramnames = new ListBuffer[TermName]()
+ while (sig(index) != ')') {
+ paramnames += nme.syntheticParamName(paramtypes.length)
+ paramtypes += objToAny(sig2type(tparams, skiptvs))
+ }
+ index += 1
+ val restype = sig2type(tparams, skiptvs)
+ JavaMethodType(paramnames.toList, paramtypes.toList)(_ => restype)
+ case 'T' =>
+ val n = subName(';'.==).toTypeName
+ index += 1
+ //assert(tparams contains n, s"classTparams = $classTParams, tparams = $tparams, key = $n")
+ if (skiptvs) defn.AnyType else tparams(n).typeRef
+ }
+ } // sig2type(tparams, skiptvs)
+
+ def sig2typeBounds(tparams: immutable.Map[Name, Symbol], skiptvs: Boolean)(implicit ctx: Context): Type = {
+ val ts = new ListBuffer[Type]
+ while (sig(index) == ':') {
+ index += 1
+ if (sig(index) != ':') // guard against empty class bound
+ ts += objToAny(sig2type(tparams, skiptvs))
+ }
+ TypeBounds.upper(((NoType: Type) /: ts)(_ & _) orElse defn.AnyType)
+ }
+
+ var tparams = classTParams
+
+ def typeParamCompleter(start: Int) = new LazyType {
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ val savedIndex = index
+ try {
+ index = start
+ denot.info =
+ checkNonCyclic( // we need the checkNonCyclic call to insert LazyRefs for F-bounded cycles
+ denot.symbol,
+ sig2typeBounds(tparams, skiptvs = false),
+ reportErrors = false)
+ } finally {
+ index = savedIndex
+ }
+ }
+ }
+
+ val newTParams = new ListBuffer[Symbol]()
+ if (sig(index) == '<') {
+ assert(owner != null)
+ index += 1
+ val start = index
+ while (sig(index) != '>') {
+ val tpname = subName(':'.==).toTypeName
+ val expname = if (owner.isClass) tpname.expandedName(owner) else tpname
+ val s = ctx.newSymbol(
+ owner, expname, owner.typeParamCreationFlags,
+ typeParamCompleter(index), coord = indexCoord(index))
+ if (owner.isClass) owner.asClass.enter(s)
+ tparams = tparams + (tpname -> s)
+ sig2typeBounds(tparams, skiptvs = true)
+ newTParams += s
+ }
+ index += 1
+ }
+ val ownTypeParams = newTParams.toList.asInstanceOf[List[TypeSymbol]]
+ val tpe =
+ if ((owner == null) || !owner.isClass)
+ sig2type(tparams, skiptvs = false)
+ else {
+ classTParams = tparams
+ val parents = new ListBuffer[Type]()
+ while (index < end) {
+ parents += sig2type(tparams, skiptvs = false) // here the variance doesnt'matter
+ }
+ TempClassInfoType(parents.toList, instanceScope, owner)
+ }
+ if (ownTypeParams.isEmpty) tpe else TempPolyType(ownTypeParams, tpe)
+ } // sigToType
+
+ def parseAnnotArg(skip: Boolean = false)(implicit ctx: Context): Option[Tree] = {
+ val tag = in.nextByte.toChar
+ val index = in.nextChar
+ tag match {
+ case STRING_TAG =>
+ if (skip) None else Some(Literal(Constant(pool.getName(index).toString)))
+ case BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG | INT_TAG |
+ LONG_TAG | FLOAT_TAG | DOUBLE_TAG =>
+ if (skip) None else Some(Literal(pool.getConstant(index)))
+ case CLASS_TAG =>
+ if (skip) None else Some(Literal(Constant(pool.getType(index))))
+ case ENUM_TAG =>
+ val t = pool.getType(index)
+ val n = pool.getName(in.nextChar)
+ val module = t.typeSymbol.companionModule
+ val s = module.info.decls.lookup(n)
+ if (skip) {
+ None
+ } else if (s != NoSymbol) {
+ Some(Literal(Constant(s)))
+ } else {
+ ctx.warning(s"""While parsing annotations in ${in.file}, could not find $n in enum $module.\nThis is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (SI-7014).""")
+ None
+ }
+ case ARRAY_TAG =>
+ val arr = new ArrayBuffer[Tree]()
+ var hasError = false
+ for (i <- 0 until index)
+ parseAnnotArg(skip) match {
+ case Some(c) => arr += c
+ case None => hasError = true
+ }
+ if (hasError) None
+ else if (skip) None
+ else {
+ val elems = arr.toList
+ val elemType =
+ if (elems.isEmpty) defn.ObjectType
+ else ctx.typeComparer.lub(elems.tpes).widen
+ Some(JavaSeqLiteral(elems, TypeTree(elemType)))
+ }
+ case ANNOTATION_TAG =>
+ parseAnnotation(index, skip) map (_.tree)
+ }
+ }
+
+ /** Parse and return a single annotation. If it is malformed,
+ * return None.
+ */
+ def parseAnnotation(attrNameIndex: Char, skip: Boolean = false)(implicit ctx: Context): Option[Annotation] = try {
+ val attrType = pool.getType(attrNameIndex)
+ val nargs = in.nextChar
+ val argbuf = new ListBuffer[Tree]
+ var hasError = false
+ for (i <- 0 until nargs) {
+ val name = pool.getName(in.nextChar)
+ parseAnnotArg(skip) match {
+ case Some(arg) => argbuf += NamedArg(name, arg)
+ case None => hasError = !skip
+ }
+ }
+ if (hasError || skip) None
+ else Some(Annotation.deferredResolve(attrType, argbuf.toList))
+ } catch {
+ case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
+ case NonFatal(ex) =>
+ // We want to be robust when annotations are unavailable, so the very least
+ // we can do is warn the user about the exception
+ // There was a reference to ticket 1135, but that is outdated: a reference to a class not on
+ // the classpath would *not* end up here. A class not found is signaled
+ // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
+ // and that should never be swallowed silently.
+ ctx.warning("Caught: " + ex + " while parsing annotations in " + in.file)
+ if (ctx.debug) ex.printStackTrace()
+
+ None // ignore malformed annotations
+ }
+
+ def parseAttributes(sym: Symbol, symtype: Type)(implicit ctx: Context): Type = {
+ def convertTo(c: Constant, pt: Type): Constant = {
+ if (pt == defn.BooleanType && c.tag == IntTag)
+ Constant(c.value != 0)
+ else
+ c convertTo pt
+ }
+ var newType = symtype
+
+ def parseAttribute(): Unit = {
+ val attrName = pool.getName(in.nextChar).toTypeName
+ val attrLen = in.nextInt
+ val end = in.bp + attrLen
+ attrName match {
+ case tpnme.SignatureATTR =>
+ val sig = pool.getExternalName(in.nextChar)
+ newType = sigToType(sig, sym)
+ if (ctx.debug && ctx.verbose)
+ println("" + sym + "; signature = " + sig + " type = " + newType)
+ case tpnme.SyntheticATTR =>
+ sym.setFlag(Flags.SyntheticArtifact)
+ case tpnme.BridgeATTR =>
+ sym.setFlag(Flags.Bridge)
+ case tpnme.DeprecatedATTR =>
+ val msg = Literal(Constant("see corresponding Javadoc for more information."))
+ val since = Literal(Constant(""))
+ sym.addAnnotation(Annotation(defn.DeprecatedAnnot, msg, since))
+ case tpnme.ConstantValueATTR =>
+ val c = pool.getConstant(in.nextChar)
+ val c1 = convertTo(c, symtype)
+ if (c1 ne null) newType = ConstantType(c1)
+ else println("failure to convert " + c + " to " + symtype); //debug
+ case tpnme.AnnotationDefaultATTR =>
+ sym.addAnnotation(Annotation(defn.AnnotationDefaultAnnot, Nil))
+ // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME
+ case tpnme.RuntimeAnnotationATTR =>
+ parseAnnotations(attrLen)
+
+ // TODO 1: parse runtime visible annotations on parameters
+ // case tpnme.RuntimeParamAnnotationATTR
+
+ // TODO 2: also parse RuntimeInvisibleAnnotation / RuntimeInvisibleParamAnnotation,
+ // i.e. java annotations with RetentionPolicy.CLASS?
+
+ case tpnme.ExceptionsATTR =>
+ parseExceptions(attrLen)
+
+ case tpnme.CodeATTR =>
+ if (sym.owner is Flags.JavaTrait) {
+ sym.resetFlag(Flags.Deferred)
+ sym.owner.resetFlag(Flags.PureInterface)
+ ctx.log(s"$sym in ${sym.owner} is a java8+ default method.")
+ }
+ in.skip(attrLen)
+
+ case _ =>
+ }
+ in.bp = end
+ }
+
+ /**
+ * Parse the "Exceptions" attribute which denotes the exceptions
+ * thrown by a method.
+ */
+ def parseExceptions(len: Int): Unit = {
+ val nClasses = in.nextChar
+ for (n <- 0 until nClasses) {
+ // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
+ val cls = pool.getClassSymbol(in.nextChar.toInt)
+ sym.addAnnotation(ThrowsAnnotation(cls.asClass))
+ }
+ }
+
+ /** Parse a sequence of annotations and attaches them to the
+ * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */
+ def parseAnnotations(len: Int): Unit = {
+ val nAttr = in.nextChar
+ for (n <- 0 until nAttr)
+ parseAnnotation(in.nextChar) match {
+ case Some(annot) =>
+ sym.addAnnotation(annot)
+ case None =>
+ }
+ }
+
+ // begin parseAttributes
+ for (i <- 0 until in.nextChar) {
+ parseAttribute()
+ }
+ newType
+ }
+
+ /** Add synthetic constructor(s) and potentially also default getters which
+ * reflects the fields of the annotation with given `classInfo`.
+ * Annotations in Scala are assumed to get all their arguments as constructor
+ * parameters. For Java annotations we need to fake it by making up the constructor.
+ * Note that default getters have type Nothing. That's OK because we need
+ * them only to signal that the corresponding parameter is optional.
+ */
+ def addAnnotationConstructor(classInfo: Type, tparams: List[TypeSymbol] = Nil)(implicit ctx: Context): Unit = {
+ def addDefaultGetter(attr: Symbol, n: Int) =
+ ctx.newSymbol(
+ owner = moduleRoot.symbol,
+ name = nme.CONSTRUCTOR.defaultGetterName(n),
+ flags = attr.flags & Flags.AccessFlags,
+ info = defn.NothingType).entered
+
+ classInfo match {
+ case classInfo @ TempPolyType(tparams, restpe) if tparams.isEmpty =>
+ addAnnotationConstructor(restpe, tparams)
+ case classInfo: TempClassInfoType =>
+ val attrs = classInfo.decls.toList.filter(_.isTerm)
+ val targs = tparams.map(_.typeRef)
+ val paramNames = attrs.map(_.name.asTermName)
+ val paramTypes = attrs.map(_.info.resultType)
+
+ def addConstr(ptypes: List[Type]) = {
+ val mtype = MethodType(paramNames, ptypes, classRoot.typeRef.appliedTo(targs))
+ val constrType = if (tparams.isEmpty) mtype else TempPolyType(tparams, mtype)
+ val constr = ctx.newSymbol(
+ owner = classRoot.symbol,
+ name = nme.CONSTRUCTOR,
+ flags = Flags.Synthetic,
+ info = constrType
+ ).entered
+ for ((attr, i) <- attrs.zipWithIndex)
+ if (attr.hasAnnotation(defn.AnnotationDefaultAnnot)) {
+ constr.setFlag(Flags.HasDefaultParams)
+ addDefaultGetter(attr, i)
+ }
+ }
+
+ addConstr(paramTypes)
+
+ // The code below added an extra constructor to annotations where the
+ // last parameter of the constructor is an Array[X] for some X, the
+ // array was replaced by a vararg argument. Unfortunately this breaks
+ // inference when doing:
+ // @Annot(Array())
+ // The constructor is overloaded so the expected type of `Array()` is
+ // WildcardType, and the type parameter of the Array apply method gets
+ // instantiated to `Nothing` instead of `X`.
+ // I'm leaving this commented out in case we improve inference to make this work.
+ // Note that if this is reenabled then JavaParser will also need to be modified
+ // to add the extra constructor (this was not implemented before).
+ /*
+ if (paramTypes.nonEmpty)
+ paramTypes.last match {
+ case defn.ArrayOf(elemtp) =>
+ addConstr(paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp))
+ case _ =>
+ }
+ */
+ }
+ }
+
+ /** Enter own inner classes in the right scope. It needs the scopes to be set up,
+ * and implicitly current class' superclasses.
+ */
+ private def enterOwnInnerClasses()(implicit ctx: Context): Unit = {
+ def className(name: Name): Name = name.drop(name.lastIndexOf('.') + 1)
+
+ def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) = {
+ ctx.base.loaders.enterClassAndModule(
+ getOwner(jflags),
+ entry.originalName,
+ new ClassfileLoader(file),
+ classTranslation.flags(jflags),
+ getScope(jflags))
+ }
+
+ for (entry <- innerClasses.values) {
+ // create a new class member for immediate inner classes
+ if (entry.outerName == currentClassName) {
+ val file = ctx.platform.classPath.findSourceFile(entry.externalName.toString) getOrElse {
+ throw new AssertionError(entry.externalName)
+ }
+ enterClassAndModule(entry, file, entry.jflags)
+ }
+ }
+ }
+
+ /** Parse inner classes. Expects `in.bp` to point to the superclass entry.
+ * Restores the old `bp`.
+ * @return true iff classfile is from Scala, so no Java info needs to be read.
+ */
+ def unpickleOrParseInnerClasses()(implicit ctx: Context): Option[Embedded] = {
+ val oldbp = in.bp
+ try {
+ skipSuperclasses()
+ skipMembers() // fields
+ skipMembers() // methods
+ val attrs = in.nextChar
+ val attrbp = in.bp
+
+ def scan(target: TypeName): Boolean = {
+ in.bp = attrbp
+ var i = 0
+ while (i < attrs && pool.getName(in.nextChar).toTypeName != target) {
+ val attrLen = in.nextInt
+ in.skip(attrLen)
+ i += 1
+ }
+ i < attrs
+ }
+
+ def unpickleScala(bytes: Array[Byte]): Some[Embedded] = {
+ val unpickler = new unpickleScala2.Scala2Unpickler(bytes, classRoot, moduleRoot)(ctx)
+ unpickler.run()(ctx.addMode(Mode.Scala2Unpickling))
+ Some(unpickler)
+ }
+
+ def unpickleTASTY(bytes: Array[Byte]): Some[Embedded] = {
+ val unpickler = new tasty.DottyUnpickler(bytes)
+ unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))
+ Some(unpickler)
+ }
+
+ def parseScalaSigBytes: Array[Byte] = {
+ val tag = in.nextByte.toChar
+ assert(tag == STRING_TAG, tag)
+ pool getBytes in.nextChar
+ }
+
+ def parseScalaLongSigBytes: Array[Byte] = {
+ val tag = in.nextByte.toChar
+ assert(tag == ARRAY_TAG, tag)
+ val stringCount = in.nextChar
+ val entries =
+ for (i <- 0 until stringCount) yield {
+ val stag = in.nextByte.toChar
+ assert(stag == STRING_TAG, stag)
+ in.nextChar.toInt
+ }
+ pool.getBytes(entries.toList)
+ }
+
+ if (scan(tpnme.TASTYATTR)) {
+ val attrLen = in.nextInt
+ return unpickleTASTY(in.nextBytes(attrLen))
+ }
+
+ if (scan(tpnme.RuntimeAnnotationATTR)) {
+ val attrLen = in.nextInt
+ val nAnnots = in.nextChar
+ var i = 0
+ while (i < nAnnots) {
+ val attrClass = pool.getType(in.nextChar).typeSymbol
+ val nArgs = in.nextChar
+ var j = 0
+ while (j < nArgs) {
+ val argName = pool.getName(in.nextChar)
+ if (argName == nme.bytes)
+ if (attrClass == defn.ScalaSignatureAnnot)
+ return unpickleScala(parseScalaSigBytes)
+ else if (attrClass == defn.ScalaLongSignatureAnnot)
+ return unpickleScala(parseScalaLongSigBytes)
+ else if (attrClass == defn.TASTYSignatureAnnot)
+ return unpickleTASTY(parseScalaSigBytes)
+ else if (attrClass == defn.TASTYLongSignatureAnnot)
+ return unpickleTASTY(parseScalaLongSigBytes)
+ parseAnnotArg(skip = true)
+ j += 1
+ }
+ i += 1
+ }
+ }
+
+ if (scan(tpnme.InnerClassesATTR)) {
+ val attrLen = in.nextInt
+ val entries = in.nextChar.toInt
+ for (i <- 0 until entries) {
+ val innerIndex = in.nextChar
+ val outerIndex = in.nextChar
+ val nameIndex = in.nextChar
+ val jflags = in.nextChar
+ if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) {
+ val entry = InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
+ innerClasses(pool.getClassName(innerIndex)) = entry
+ }
+ }
+ }
+ None
+ } finally in.bp = oldbp
+ }
+
+ /** An entry in the InnerClasses attribute of this class file. */
+ case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: Int) {
+ def externalName = pool.getClassName(external)
+ def outerName = pool.getClassName(outer)
+ def originalName = pool.getName(name)
+
+ override def toString =
+ originalName + " in " + outerName + "(" + externalName + ")"
+ }
+
+ object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] {
+ /** Return the Symbol of the top level class enclosing `name`,
+ * or 'name's symbol if no entry found for `name`.
+ */
+ def topLevelClass(name: Name)(implicit ctx: Context): Symbol = {
+ val tlName = if (isDefinedAt(name)) {
+ var entry = this(name)
+ while (isDefinedAt(entry.outerName))
+ entry = this(entry.outerName)
+ entry.outerName
+ } else
+ name
+ classNameToSymbol(tlName)
+ }
+
+ /** Return the class symbol for `externalName`. It looks it up in its outer class.
+ * Forces all outer class symbols to be completed.
+ *
+ * If the given name is not an inner class, it returns the symbol found in `defn`.
+ */
+ def classSymbol(externalName: Name)(implicit ctx: Context): Symbol = {
+ /** Return the symbol of `innerName`, having the given `externalName`. */
+ def innerSymbol(externalName: Name, innerName: Name, static: Boolean): Symbol = {
+ def getMember(sym: Symbol, name: Name): Symbol =
+ if (static)
+ if (sym == classRoot.symbol) staticScope.lookup(name)
+ else sym.companionModule.info.member(name).symbol
+ else
+ if (sym == classRoot.symbol) instanceScope.lookup(name)
+ else sym.info.member(name).symbol
+
+ innerClasses.get(externalName) match {
+ case Some(entry) =>
+ val outerName = entry.outerName.stripModuleClassSuffix
+ val owner = classSymbol(outerName)
+ val result = ctx.atPhaseNotLaterThanTyper { implicit ctx =>
+ getMember(owner, innerName.toTypeName)
+ }
+ assert(result ne NoSymbol,
+ i"""failure to resolve inner class:
+ |externalName = $externalName,
+ |outerName = $outerName,
+ |innerName = $innerName
+ |owner.fullName = ${owner.showFullName}
+ |while parsing ${classfile}""")
+ result
+
+ case None =>
+ classNameToSymbol(externalName)
+ }
+ }
+
+ get(externalName) match {
+ case Some(entry) =>
+ innerSymbol(entry.externalName, entry.originalName, isStatic(entry.jflags))
+ case None =>
+ classNameToSymbol(externalName)
+ }
+ }
+ }
+
+ def skipAttributes(): Unit = {
+ val attrCount = in.nextChar
+ for (i <- 0 until attrCount) {
+ in.skip(2); in.skip(in.nextInt)
+ }
+ }
+
+ def skipMembers(): Unit = {
+ val memberCount = in.nextChar
+ for (i <- 0 until memberCount) {
+ in.skip(6); skipAttributes()
+ }
+ }
+
+ def skipSuperclasses(): Unit = {
+ in.skip(2) // superclass
+ val ifaces = in.nextChar
+ in.skip(2 * ifaces)
+ }
+
+ protected def getOwner(flags: Int): Symbol =
+ if (isStatic(flags)) moduleRoot.symbol else classRoot.symbol
+
+ protected def getScope(flags: Int): MutableScope =
+ if (isStatic(flags)) staticScope else instanceScope
+
+ private def setPrivateWithin(denot: SymDenotation, jflags: Int)(implicit ctx: Context): Unit = {
+ if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PUBLIC)) == 0)
+ denot.privateWithin = denot.enclosingPackageClass
+ }
+
+ private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
+ private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
+ private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
+
+ class ConstantPool {
+ private val len = in.nextChar
+ private val starts = new Array[Int](len)
+ private val values = new Array[AnyRef](len)
+ private val internalized = new Array[TermName](len)
+
+ { var i = 1
+ while (i < starts.length) {
+ starts(i) = in.bp
+ i += 1
+ (in.nextByte.toInt: @switch) match {
+ case CONSTANT_UTF8 | CONSTANT_UNICODE =>
+ in.skip(in.nextChar)
+ case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE =>
+ in.skip(2)
+ case CONSTANT_METHODHANDLE =>
+ in.skip(3)
+ case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
+ | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
+ | CONSTANT_INVOKEDYNAMIC =>
+ in.skip(4)
+ case CONSTANT_LONG | CONSTANT_DOUBLE =>
+ in.skip(8)
+ i += 1
+ case _ =>
+ errorBadTag(in.bp - 1)
+ }
+ }
+ }
+
+ /** Return the name found at given index. */
+ def getName(index: Int): TermName = {
+ if (index <= 0 || len <= index)
+ errorBadIndex(index)
+
+ values(index) match {
+ case name: TermName => name
+ case null =>
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val name = termName(in.buf, start + 3, in.getChar(start + 1))
+ values(index) = name
+ name
+ }
+ }
+
+ /** Return the name found at given index in the constant pool, with '/' replaced by '.'. */
+ def getExternalName(index: Int): TermName = {
+ if (index <= 0 || len <= index)
+ errorBadIndex(index)
+
+ if (internalized(index) == null)
+ internalized(index) = getName(index).replace('/', '.')
+
+ internalized(index)
+ }
+
+ def getClassSymbol(index: Int)(implicit ctx: Context): Symbol = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var c = values(index).asInstanceOf[Symbol]
+ if (c eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
+ val name = getExternalName(in.getChar(start + 1))
+ if (name.isModuleClassName && (name ne nme.nothingRuntimeClass) && (name ne nme.nullRuntimeClass))
+ // Null$ and Nothing$ ARE classes
+ c = ctx.requiredModule(name.sourceModuleName)
+ else c = classNameToSymbol(name)
+ values(index) = c
+ }
+ c
+ }
+
+ /** Return the external name of the class info structure found at 'index'.
+ * Use 'getClassSymbol' if the class is sure to be a top-level class.
+ */
+ def getClassName(index: Int): TermName = {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
+ getExternalName(in.getChar(start + 1))
+ }
+
+ /** Return a name and a type at the given index.
+ */
+ private def getNameAndType(index: Int, ownerTpe: Type)(implicit ctx: Context): (Name, Type) = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var p = values(index).asInstanceOf[(Name, Type)]
+ if (p eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_NAMEANDTYPE) errorBadTag(start)
+ val name = getName(in.getChar(start + 1).toInt)
+ var tpe = getType(in.getChar(start + 3).toInt)
+ // fix the return type, which is blindly set to the class currently parsed
+ if (name == nme.CONSTRUCTOR)
+ tpe match {
+ case tp: MethodType =>
+ tp.derivedMethodType(tp.paramNames, tp.paramTypes, ownerTpe)
+ }
+ p = (name, tpe)
+ values(index) = p
+ }
+ p
+ }
+
+ /** Return the type of a class constant entry. Since
+ * arrays are considered to be class types, they might
+ * appear as entries in 'newarray' or 'cast' opcodes.
+ */
+ def getClassOrArrayType(index: Int)(implicit ctx: Context): Type = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ val value = values(index)
+ var c: Type = null
+ if (value eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_CLASS) errorBadTag(start)
+ val name = getExternalName(in.getChar(start + 1))
+ if (name(0) == ARRAY_TAG) {
+ c = sigToType(name)
+ values(index) = c
+ } else {
+ val sym = classNameToSymbol(name)
+ values(index) = sym
+ c = sym.typeRef
+ }
+ } else c = value match {
+ case tp: Type => tp
+ case cls: Symbol => cls.typeRef
+ }
+ c
+ }
+
+ def getType(index: Int)(implicit ctx: Context): Type =
+ sigToType(getExternalName(index))
+
+ def getSuperClass(index: Int)(implicit ctx: Context): Symbol = {
+ assert(index != 0, "attempt to parse java.lang.Object from classfile")
+ getClassSymbol(index)
+ }
+
+ def getConstant(index: Int)(implicit ctx: Context): Constant = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var value = values(index)
+ if (value eq null) {
+ val start = starts(index)
+ value = (in.buf(start).toInt: @switch) match {
+ case CONSTANT_STRING =>
+ Constant(getName(in.getChar(start + 1).toInt).toString)
+ case CONSTANT_INTEGER =>
+ Constant(in.getInt(start + 1))
+ case CONSTANT_FLOAT =>
+ Constant(in.getFloat(start + 1))
+ case CONSTANT_LONG =>
+ Constant(in.getLong(start + 1))
+ case CONSTANT_DOUBLE =>
+ Constant(in.getDouble(start + 1))
+ case CONSTANT_CLASS =>
+ getClassOrArrayType(index).typeSymbol
+ case _ =>
+ errorBadTag(start)
+ }
+ values(index) = value
+ }
+ value match {
+ case ct: Constant => ct
+ case cls: Symbol => Constant(cls.typeRef)
+ case arr: Type => Constant(arr)
+ }
+ }
+
+ private def getSubArray(bytes: Array[Byte]): Array[Byte] = {
+ val decodedLength = ByteCodecs.decode(bytes)
+ val arr = new Array[Byte](decodedLength)
+ System.arraycopy(bytes, 0, arr, 0, decodedLength)
+ arr
+ }
+
+ def getBytes(index: Int): Array[Byte] = {
+ if (index <= 0 || len <= index) errorBadIndex(index)
+ var value = values(index).asInstanceOf[Array[Byte]]
+ if (value eq null) {
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val len = in.getChar(start + 1)
+ val bytes = new Array[Byte](len)
+ System.arraycopy(in.buf, start + 3, bytes, 0, len)
+ value = getSubArray(bytes)
+ values(index) = value
+ }
+ value
+ }
+
+ def getBytes(indices: List[Int]): Array[Byte] = {
+ assert(!indices.isEmpty, indices)
+ var value = values(indices.head).asInstanceOf[Array[Byte]]
+ if (value eq null) {
+ val bytesBuffer = ArrayBuffer.empty[Byte]
+ for (index <- indices) {
+ if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index)
+ val start = starts(index)
+ if (in.buf(start).toInt != CONSTANT_UTF8) errorBadTag(start)
+ val len = in.getChar(start + 1)
+ bytesBuffer ++= in.buf.view(start + 3, start + 3 + len)
+ }
+ value = getSubArray(bytesBuffer.toArray)
+ values(indices.head) = value
+ }
+ value
+ }
+
+ /** Throws an exception signaling a bad constant index. */
+ private def errorBadIndex(index: Int) =
+ throw new RuntimeException("bad constant pool index: " + index + " at pos: " + in.bp)
+
+ /** Throws an exception signaling a bad tag at given address. */
+ private def errorBadTag(start: Int) =
+ throw new RuntimeException("bad constant pool tag " + in.buf(start) + " at byte " + start)
+ }
+}
+
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
new file mode 100644
index 000000000..2c93819d5
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
@@ -0,0 +1,53 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import Contexts._, SymDenotations._, Symbols._
+import dotty.tools.dotc.ast.tpd
+import TastyUnpickler._, TastyBuffer._
+import util.Positions._
+import util.{SourceFile, NoSource}
+import Annotations.Annotation
+import core.Mode
+import classfile.ClassfileParser
+
+object DottyUnpickler {
+
+ /** Exception thrown if classfile is corrupted */
+ class BadSignature(msg: String) extends RuntimeException(msg)
+
+ class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler])
+ extends SectionUnpickler[TreeUnpickler]("ASTs") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new TreeUnpickler(reader, tastyName, posUnpickler)
+ }
+
+ class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler]("Positions") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
+ new PositionUnpickler(reader)
+ }
+}
+
+/** A class for unpickling Tasty trees and symbols.
+ * @param bytes the bytearray containing the Tasty file from which we unpickle
+ */
+class DottyUnpickler(bytes: Array[Byte]) extends ClassfileParser.Embedded {
+ import tpd._
+ import DottyUnpickler._
+
+ val unpickler = new TastyUnpickler(bytes)
+ private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler)
+ private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler(posUnpicklerOpt)).get
+
+ /** Enter all toplevel classes and objects into their scopes
+ * @param roots a set of SymDenotations that should be overwritten by unpickling
+ */
+ def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
+ treeUnpickler.enterTopLevel(roots)
+
+ /** The unpickled trees, and the source file they come from. */
+ def body(implicit ctx: Context): List[Tree] = {
+ treeUnpickler.unpickle()
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
new file mode 100644
index 000000000..3ff7298ce
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala
@@ -0,0 +1,101 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import collection.mutable
+import Names.{Name, chrs}
+import Decorators._, NameOps._
+import TastyBuffer._
+import scala.io.Codec
+import TastyName._
+import TastyFormat._
+
+class NameBuffer extends TastyBuffer(10000) {
+ import NameBuffer._
+
+ private val nameRefs = new mutable.LinkedHashMap[TastyName, NameRef]
+
+ def nameIndex(name: TastyName): NameRef = nameRefs.get(name) match {
+ case Some(ref) =>
+ ref
+ case None =>
+ val ref = NameRef(nameRefs.size)
+ nameRefs(name) = ref
+ ref
+ }
+ def nameIndex(name: Name): NameRef = {
+ val tname =
+ if (name.isShadowedName) Shadowed(nameIndex(name.revertShadowed))
+ else Simple(name.toTermName)
+ nameIndex(tname)
+ }
+
+ def nameIndex(str: String): NameRef = nameIndex(str.toTermName)
+
+ def fullNameIndex(name: Name): NameRef = {
+ val pos = name.lastIndexOf('.')
+ if (pos > 0)
+ nameIndex(Qualified(fullNameIndex(name.take(pos)), nameIndex(name.drop(pos + 1))))
+ else
+ nameIndex(name)
+ }
+
+ private def withLength(op: => Unit, lengthWidth: Int = 1): Unit = {
+ val lengthAddr = currentAddr
+ for (i <- 0 until lengthWidth) writeByte(0)
+ op
+ val length = currentAddr.index - lengthAddr.index - 1
+ putNat(lengthAddr, length, lengthWidth)
+ }
+
+ def writeNameRef(ref: NameRef) = writeNat(ref.index)
+
+ def pickleName(name: TastyName): Unit = name match {
+ case Simple(name) =>
+ val bytes =
+ if (name.length == 0) new Array[Byte](0)
+ else Codec.toUTF8(chrs, name.start, name.length)
+ writeByte(UTF8)
+ writeNat(bytes.length)
+ writeBytes(bytes, bytes.length)
+ case Qualified(qualified, selector) =>
+ writeByte(QUALIFIED)
+ withLength { writeNameRef(qualified); writeNameRef(selector) }
+ case Signed(original, params, result) =>
+ writeByte(SIGNED)
+ withLength(
+ { writeNameRef(original); writeNameRef(result); params.foreach(writeNameRef) },
+ if ((params.length + 2) * maxIndexWidth <= maxNumInByte) 1 else 2)
+ case Expanded(prefix, original) =>
+ writeByte(EXPANDED)
+ withLength { writeNameRef(prefix); writeNameRef(original) }
+ case ModuleClass(module) =>
+ writeByte(OBJECTCLASS)
+ withLength { writeNameRef(module) }
+ case SuperAccessor(accessed) =>
+ writeByte(SUPERACCESSOR)
+ withLength { writeNameRef(accessed) }
+ case DefaultGetter(method, paramNumber) =>
+ writeByte(DEFAULTGETTER)
+ withLength { writeNameRef(method); writeNat(paramNumber) }
+ case Shadowed(original) =>
+ writeByte(SHADOWED)
+ withLength { writeNameRef(original) }
+ }
+
+ override def assemble(): Unit = {
+ var i = 0
+ for ((name, ref) <- nameRefs) {
+ assert(ref.index == i)
+ i += 1
+ pickleName(name)
+ }
+ }
+}
+
+object NameBuffer {
+ private val maxIndexWidth = 3 // allows name indices up to 2^21.
+ private val payloadBitsPerByte = 7 // determined by nat encoding in TastyBuffer
+ private val maxNumInByte = (1 << payloadBitsPerByte) - 1
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
new file mode 100644
index 000000000..546894a9e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
@@ -0,0 +1,79 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import ast._
+import ast.Trees._
+import ast.Trees.WithLazyField
+import TastyFormat._
+import core._
+import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._
+import collection.mutable
+import TastyBuffer._
+import util.Positions._
+
+class PositionPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Option[Addr]) {
+ val buf = new TastyBuffer(5000)
+ pickler.newSection("Positions", buf)
+ import buf._
+ import ast.tpd._
+
+ private val remainingAddrs = new java.util.IdentityHashMap[Tree, Iterator[Addr]]
+
+ def header(addrDelta: Int, hasStartDelta: Boolean, hasEndDelta: Boolean, hasPoint: Boolean) = {
+ def toInt(b: Boolean) = if (b) 1 else 0
+ (addrDelta << 3) | (toInt(hasStartDelta) << 2) | (toInt(hasEndDelta) << 1) | toInt(hasPoint)
+ }
+
+ def picklePositions(roots: List[Tree])(implicit ctx: Context) = {
+ var lastIndex = 0
+ var lastPos = Position(0, 0)
+ def pickleDeltas(index: Int, pos: Position) = {
+ val addrDelta = index - lastIndex
+ val startDelta = pos.start - lastPos.start
+ val endDelta = pos.end - lastPos.end
+ buf.writeInt(header(addrDelta, startDelta != 0, endDelta != 0, !pos.isSynthetic))
+ if (startDelta != 0) buf.writeInt(startDelta)
+ if (endDelta != 0) buf.writeInt(endDelta)
+ if (!pos.isSynthetic) buf.writeInt(pos.pointDelta)
+ lastIndex = index
+ lastPos = pos
+ }
+
+ /** True if x's position cannot be reconstructed automatically from its initialPos
+ */
+ def alwaysNeedsPos(x: Positioned) = x match {
+ case _: WithLazyField[_] // initialPos is inaccurate for trees with lazy field
+ | _: Trees.PackageDef[_] => true // package defs might be split into several Tasty files
+ case _ => false
+ }
+
+ def traverse(x: Any): Unit = x match {
+ case x: Tree @unchecked =>
+ val pos = if (x.isInstanceOf[MemberDef]) x.pos else x.pos.toSynthetic
+ if (pos.exists && (pos != x.initialPos.toSynthetic || alwaysNeedsPos(x))) {
+ addrOfTree(x) match {
+ case Some(addr) =>
+ //println(i"pickling $x with $pos at $addr")
+ pickleDeltas(addr.index, pos)
+ case _ =>
+ //println(i"no address for $x")
+ }
+ }
+ //else if (x.pos.exists) println(i"skipping $x")
+ x match {
+ case x: MemberDef @unchecked =>
+ for (ann <- x.symbol.annotations) traverse(ann.tree)
+ case _ =>
+ }
+ traverse(x.productIterator)
+ case xs: TraversableOnce[_] =>
+ xs.foreach(traverse)
+ case x: Annotation =>
+ traverse(x.tree)
+ case _ =>
+ }
+ traverse(roots)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
new file mode 100644
index 000000000..cbe213d89
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
@@ -0,0 +1,39 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+
+import util.Positions._
+import collection.mutable
+import TastyBuffer.{Addr, NoAddr}
+
+/** Unpickler for tree positions */
+class PositionUnpickler(reader: TastyReader) {
+ import reader._
+
+ private[tasty] lazy val positions = {
+ val positions = new mutable.HashMap[Addr, Position]
+ var curIndex = 0
+ var curStart = 0
+ var curEnd = 0
+ while (!isAtEnd) {
+ val header = readInt()
+ val addrDelta = header >> 3
+ val hasStart = (header & 4) != 0
+ val hasEnd = (header & 2) != 0
+ val hasPoint = (header & 1) != 0
+ curIndex += addrDelta
+ assert(curIndex >= 0)
+ if (hasStart) curStart += readInt()
+ if (hasEnd) curEnd += readInt()
+ positions(Addr(curIndex)) =
+ if (hasPoint) Position(curStart, curEnd, curStart + readInt())
+ else Position(curStart, curEnd)
+ }
+ positions
+ }
+
+ def posAt(addr: Addr) = positions.getOrElse(addr, NoPosition)
+}
+
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala
new file mode 100644
index 000000000..13bc95028
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyBuffer.scala
@@ -0,0 +1,188 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import util.Util.dble
+
+object TastyBuffer {
+
+ /** The number of digits of the natural number `nat`, written in base 128 format. */
+ def natSize(nat: Int): Int =
+ if (nat < 128) 1 else natSize(nat >>> 7) + 1
+
+ /** An address pointing to an index in a Tasty buffer's byte array */
+ case class Addr(index: Int) extends AnyVal {
+ def - (delta: Int): Addr = Addr(this.index - delta)
+ def + (delta: Int): Addr = Addr(this.index + delta)
+
+ def relativeTo(base: Addr): Addr = this - base.index - AddrWidth
+ }
+
+ val NoAddr = Addr(-1)
+
+ /** The maximal number of address bytes.
+ * Since addresses are written as base-128 natural numbers,
+ * the value of 4 gives a maximal array size of 256M.
+ */
+ final val AddrWidth = 4
+}
+import TastyBuffer._
+
+/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
+ * and that supports reading and patching addresses represented as natural numbers.
+ */
+class TastyBuffer(initialSize: Int) {
+
+ /** The current byte array, will be expanded as needed */
+ var bytes = new Array[Byte](initialSize)
+
+ /** The number of bytes written */
+ var length = 0
+
+ // -- Output routines --------------------------------------------
+
+ /** Write a byte of data. */
+ def writeByte(b: Int): Unit = {
+ if (length >= bytes.length)
+ bytes = dble(bytes)
+ bytes(length) = b.toByte
+ length += 1
+ }
+
+ /** Write the first `n` bytes of `data`. */
+ def writeBytes(data: Array[Byte], n: Int): Unit = {
+ while (bytes.length < length + n) bytes = dble(bytes)
+ Array.copy(data, 0, bytes, length, n)
+ length += n
+ }
+
+ /** Write a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeNat(x: Int): Unit =
+ writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
+
+ /** Write a natural number in 2's complement big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeInt(x: Int): Unit =
+ writeLongInt(x)
+
+ /**
+ * Like writeNat, but for longs. Note that the
+ * binary representation of LongNat is identical to Nat
+ * if the long value is in the range Int.MIN_VALUE to
+ * Int.MAX_VALUE.
+ */
+ def writeLongNat(x: Long): Unit = {
+ def writePrefix(x: Long): Unit = {
+ val y = x >>> 7
+ if (y != 0L) writePrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+ val y = x >>> 7
+ if (y != 0L) writePrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+
+ /** Like writeInt, but for longs */
+ def writeLongInt(x: Long): Unit = {
+ def writePrefix(x: Long): Unit = {
+ val y = x >> 7
+ if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+ val y = x >> 7
+ if (y != 0L - ((x >> 6) & 1)) writePrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+
+ /** Write an uncompressed Long stored in 8 bytes in big endian format */
+ def writeUncompressedLong(x: Long): Unit = {
+ var y = x
+ val bytes = new Array[Byte](8)
+ for (i <- 7 to 0 by -1) {
+ bytes(i) = (y & 0xff).toByte
+ y = y >>> 8
+ }
+ writeBytes(bytes, 8)
+ }
+
+ // -- Address handling --------------------------------------------
+
+ /** Write natural number `x` right-adjusted in a field of `width` bytes
+ * starting with address `at`.
+ */
+ def putNat(at: Addr, x: Int, width: Int): Unit = {
+ var y = x
+ var w = width
+ if(at.index + w >= bytes.length)
+ bytes = dble(bytes)
+ var digit = y & 0x7f | 0x80
+ while (w > 0) {
+ w -= 1
+ bytes(at.index + w) = digit.toByte
+ y >>>= 7
+ digit = y & 0x7f
+ }
+ assert(y == 0, s"number $x too large to fit in $width bytes")
+ }
+
+ /** The byte at given address */
+ def getByte(at: Addr): Int = bytes(at.index)
+
+ /** The natural number at address `at` */
+ def getNat(at: Addr): Int = getLongNat(at).toInt
+
+ /** The long natural number at address `at` */
+ def getLongNat(at: Addr): Long = {
+ var b = 0L
+ var x = 0L
+ var idx = at.index
+ do {
+ b = bytes(idx)
+ x = (x << 7) | (b & 0x7f)
+ idx += 1
+ } while ((b & 0x80) == 0)
+ x
+ }
+
+ /** The address (represented as a natural number) at address `at` */
+ def getAddr(at: Addr) = Addr(getNat(at))
+
+ /** The smallest address equal to or following `at` which points to a non-zero byte */
+ final def skipZeroes(at: Addr): Addr =
+ if (getByte(at) != 0) at else skipZeroes(at + 1)
+
+ /** The address after the natural number found at address `at`. */
+ final def skipNat(at: Addr): Addr = {
+ val next = at + 1
+ if ((getByte(at) & 0x80) != 0) next else skipNat(next)
+ }
+
+ /** The address referring to the end of data written so far */
+ def currentAddr: Addr = Addr(length)
+
+ /** Reserve `AddrWidth` bytes to write an address into */
+ def reserveAddr(): Addr = {
+ val result = currentAddr
+ length += AddrWidth
+ result
+ }
+
+ /** Fill reserved space at address `at` with address `target` */
+ def fillAddr(at: Addr, target: Addr) =
+ putNat(at, target.index, AddrWidth)
+
+ /** Write address without leading zeroes */
+ def writeAddr(addr: Addr): Unit = writeNat(addr.index)
+
+ // -- Finalization --------------------------------------------
+
+ /** Hook to be overridden in subclasses.
+ * Perform all actions necessary to assemble the final byte array.
+ * After `assemble` no more output actions to this buffer are permitted.
+ */
+ def assemble(): Unit = ()
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
new file mode 100644
index 000000000..cb1b56c3c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
@@ -0,0 +1,553 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+/************************************************************
+Notation:
+
+We use BNF notation. Terminal symbols start with at least two
+consecutive upper case letters. Each terminal is represented as a
+single byte tag. Non-terminals are mixed case. Prefixes of the form
+lower case letter*_ are for explanation of semantic content only, they
+can be dropped without changing the grammar.
+
+Micro-syntax:
+
+ LongInt = Digit* StopDigit // big endian 2's complement, value fits in a Long w/o overflow
+ Int = LongInt // big endian 2's complement, fits in an Int w/o overflow
+ Nat = LongInt // non-negative value, fits in an Int without overflow
+ Digit = 0 | ... | 127
+ StopDigit = 128 | ... | 255 // value = digit - 128
+
+Macro-format:
+
+ File = Header majorVersion_Nat minorVersion_Nat UUID
+ nameTable_Length Name* Section*
+ Header = 0x5CA1AB1F
+ UUID = Byte*16 // random UUID
+
+ Section = NameRef Length Bytes
+ Length = Nat // length of rest of entry in bytes
+
+ Name = UTF8 Length UTF8-CodePoint*
+ QUALIFIED Length qualified_NameRef selector_NameRef
+ SIGNED Length original_NameRef resultSig_NameRef paramSig_NameRef*
+ EXPANDED Length original_NameRef
+ OBJECTCLASS Length module_NameRef
+ SUPERACCESSOR Length accessed_NameRef
+ DEFAULTGETTER Length method_NameRef paramNumber_Nat
+ SHADOWED Length original_NameRef
+ MANGLED Length mangle_NameRef name_NameRef
+ ...
+
+ NameRef = Nat // ordinal number of name in name table, starting from 1.
+
+Note: Unqualified names in the name table are strings. The context decides whether a name is
+a type-name or a term-name. The same string can represent both.
+
+Standard-Section: "ASTs" TopLevelStat*
+
+ TopLevelStat = PACKAGE Length Path TopLevelStat*
+ Stat
+
+ Stat = Term
+ VALDEF Length NameRef Type rhs_Term? Modifier*
+ DEFDEF Length NameRef TypeParam* Params* return_Type rhs_Term?
+ Modifier*
+ TYPEDEF Length NameRef (Type | Template) Modifier*
+ IMPORT Length qual_Term Selector*
+ Selector = IMPORTED name_NameRef
+ RENAMED to_NameRef
+
+ // Imports are for scala.meta, they are not used in the backend
+
+ TypeParam = TYPEPARAM Length NameRef Type Modifier*
+ Params = PARAMS Length Param*
+ Param = PARAM Length NameRef Type rhs_Term? Modifier* // rhs_Term is present in the case of an aliased class parameter
+ Template = TEMPLATE Length TypeParam* Param* Parent* Self? Stat* // Stat* always starts with the primary constructor.
+ Parent = Application
+ Type
+ Self = SELFDEF selfName_NameRef selfType_Type
+
+ Term = Path
+ Application
+ IDENT NameRef Type // used when term ident’s type is not a TermRef
+ SELECT possiblySigned_NameRef qual_Term
+ QUALTHIS typeIdent_Tree
+ NEW cls_Type
+ SUPER Length this_Term mixinTypeIdent_Tree?
+ TYPED Length expr_Term ascription_Type
+ NAMEDARG Length paramName_NameRef arg_Term
+ ASSIGN Length lhs_Term rhs_Term
+ BLOCK Length expr_Term Stat*
+ INLINED Length call_Term expr_Term Stat*
+ LAMBDA Length meth_Term target_Type
+ IF Length cond_Term then_Term else_Term
+ MATCH Length sel_Term CaseDef*
+ TRY Length expr_Term CaseDef* finalizer_Term?
+ RETURN Length meth_ASTRef expr_Term?
+ REPEATED Length elem_Type elem_Term*
+ BIND Length boundName_NameRef patType_Type pat_Term
+ ALTERNATIVE Length alt_Term*
+ UNAPPLY Length fun_Term ImplicitArg* pat_Type pat_Term*
+ IDENTtpt NameRef Type // used for all type idents
+ SELECTtpt NameRef qual_Term
+ SINGLETONtpt Path
+ REFINEDtpt Length underlying_Term refinement_Stat*
+ APPLIEDtpt Length tycon_Term arg_Term*
+ POLYtpt Length TypeParam* body_Term
+ TYPEBOUNDStpt Length low_Term high_Term
+ ANNOTATEDtpt Length underlying_Term fullAnnotation_Term
+ ANDtpt Length left_Term right_Term
+ ORtpt Length left_Term right_Term
+ BYNAMEtpt underlying_Term
+ EMPTYTREE
+ SHARED term_ASTRef
+ Application = APPLY Length fn_Term arg_Term*
+
+ TYPEAPPLY Length fn_Term arg_Type*
+ CaseDef = CASEDEF Length pat_Term rhs_Tree guard_Tree?
+ ImplicitArg = IMPLICITARG arg_Term
+ ASTRef = Nat // byte position in AST payload
+
+ Path = Constant
+ TERMREFdirect sym_ASTRef
+ TERMREFsymbol sym_ASTRef qual_Type
+ TERMREFpkg fullyQualified_NameRef
+ TERMREF possiblySigned_NameRef qual_Type
+ THIS clsRef_Type
+ RECthis recType_ASTRef
+ SHARED path_ASTRef
+
+ Constant = UNITconst
+ FALSEconst
+ TRUEconst
+ BYTEconst Int
+ SHORTconst Int
+ CHARconst Nat
+ INTconst Int
+ LONGconst LongInt
+ FLOATconst Int
+ DOUBLEconst LongInt
+ STRINGconst NameRef
+ NULLconst
+ CLASSconst Type
+ ENUMconst Path
+
+ Type = Path
+ TYPEREFdirect sym_ASTRef
+ TYPEREFsymbol sym_ASTRef qual_Type
+ TYPEREFpkg fullyQualified_NameRef
+ TYPEREF possiblySigned_NameRef qual_Type
+ RECtype parent_Type
+ SUPERtype Length this_Type underlying_Type
+ REFINEDtype Length underlying_Type refinement_NameRef info_Type
+ APPLIEDtype Length tycon_Type arg_Type*
+ TYPEBOUNDS Length low_Type high_Type
+ TYPEALIAS Length alias_Type (COVARIANT | CONTRAVARIANT)?
+ ANNOTATEDtype Length underlying_Type fullAnnotation_Term
+ ANDtype Length left_Type right_Type
+ ORtype Length left_Type right_Type
+ BIND Length boundName_NameRef bounds_Type
+ // for type-variables defined in a type pattern
+ BYNAMEtype underlying_Type
+ POLYtype Length result_Type NamesTypes // variance encoded in front of name: +/-/=
+ METHODtype Length result_Type NamesTypes // needed for refinements
+ PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements
+ SHARED type_ASTRef
+ NamesTypes = NameType*
+ NameType = paramName_NameRef typeOrBounds_ASTRef
+
+ Modifier = PRIVATE
+ INTERNAL // package private
+ PROTECTED
+ PRIVATEqualified qualifier_Type // will be dropped
+ PROTECTEDqualified qualifier_Type // will be dropped
+ ABSTRACT
+ FINAL
+ SEALED
+ CASE
+ IMPLICIT
+ LAZY
+ OVERRIDE
+ INLINE // macro
+ STATIC // mapped to static Java member
+ OBJECT // an object or its class
+ TRAIT // a trait
+ LOCAL // private[this] or protected[this]
+ SYNTHETIC // generated by Scala compiler
+ ARTIFACT // to be tagged Java Synthetic
+ MUTABLE // a var
+ LABEL // method generated as a label
+ FIELDaccessor // getter or setter
+ CASEaccessor // getter for case class param
+ COVARIANT // type param marked “+”
+ CONTRAVARIANT // type param marked “-”
+ SCALA2X // Imported from Scala2.x
+ DEFAULTparameterized // Method with default params
+ INSUPERCALL // defined in the argument of a constructor supercall
+ STABLE // Method that is assumed to be stable
+ Annotation
+ Annotation = ANNOTATION Length tycon_Type fullAnnotation_Term
+
+Note: Tree tags are grouped into 5 categories that determine what follows, and thus allow to compute the size of the tagged tree in a generic way.
+
+ Category 1 (tags 0-63) : tag
+ Category 2 (tags 64-95) : tag Nat
+ Category 3 (tags 96-111) : tag AST
+ Category 4 (tags 112-127): tag Nat AST
+ Category 5 (tags 128-255): tag Length <payload>
+
+Standard Section: "Positions" Assoc*
+
+ Assoc = Header offset_Delta? offset_Delta?
+ Header = addr_Delta + // in one Nat: difference of address to last recorded node << 2 +
+ hasStartDiff + // one bit indicating whether there follows a start address delta << 1
+ hasEndDiff // one bit indicating whether there follows an end address delta
+ // Nodes which have the same positions as their parents are omitted.
+ // offset_Deltas give difference of start/end offset wrt to the
+ // same offset in the previously recorded node (or 0 for the first recorded node)
+ Delta = Int // Difference between consecutive offsets,
+
+**************************************************************************************/
+
+object TastyFormat {
+
+ final val header = Array(0x5C, 0xA1, 0xAB, 0x1F)
+ final val MajorVersion = 0
+ final val MinorVersion = 5
+
+ // Name tags
+
+ final val UTF8 = 1
+ final val QUALIFIED = 2
+ final val SIGNED = 3
+ final val EXPANDED = 4
+ final val OBJECTCLASS = 5
+ final val SUPERACCESSOR = 6
+ final val DEFAULTGETTER = 7
+ final val SHADOWED = 8
+
+ // AST tags
+
+ final val UNITconst = 2
+ final val FALSEconst = 3
+ final val TRUEconst = 4
+ final val NULLconst = 5
+ final val PRIVATE = 6
+ final val INTERNAL = 7
+ final val PROTECTED = 8
+ final val ABSTRACT = 9
+ final val FINAL = 10
+ final val SEALED = 11
+ final val CASE = 12
+ final val IMPLICIT = 13
+ final val LAZY = 14
+ final val OVERRIDE = 15
+ final val INLINE = 16
+ final val STATIC = 17
+ final val OBJECT = 18
+ final val TRAIT = 19
+ final val LOCAL = 20
+ final val SYNTHETIC = 21
+ final val ARTIFACT = 22
+ final val MUTABLE = 23
+ final val LABEL = 24
+ final val FIELDaccessor = 25
+ final val CASEaccessor = 26
+ final val COVARIANT = 27
+ final val CONTRAVARIANT = 28
+ final val SCALA2X = 29
+ final val DEFAULTparameterized = 30
+ final val INSUPERCALL = 31
+ final val STABLE = 32
+
+ final val SHARED = 64
+ final val TERMREFdirect = 65
+ final val TYPEREFdirect = 66
+ final val TERMREFpkg = 67
+ final val TYPEREFpkg = 68
+ final val RECthis = 69
+ final val BYTEconst = 70
+ final val SHORTconst = 71
+ final val CHARconst = 72
+ final val INTconst = 73
+ final val LONGconst = 74
+ final val FLOATconst = 75
+ final val DOUBLEconst = 76
+ final val STRINGconst = 77
+ final val IMPORTED = 78
+ final val RENAMED = 79
+
+ final val THIS = 96
+ final val QUALTHIS = 97
+ final val CLASSconst = 98
+ final val ENUMconst = 99
+ final val BYNAMEtype = 100
+ final val BYNAMEtpt = 101
+ final val NEW = 102
+ final val IMPLICITarg = 103
+ final val PRIVATEqualified = 104
+ final val PROTECTEDqualified = 105
+ final val RECtype = 106
+ final val SINGLETONtpt = 107
+
+ final val IDENT = 112
+ final val IDENTtpt = 113
+ final val SELECT = 114
+ final val SELECTtpt = 115
+ final val TERMREFsymbol = 116
+ final val TERMREF = 117
+ final val TYPEREFsymbol = 118
+ final val TYPEREF = 119
+ final val SELFDEF = 120
+
+ final val PACKAGE = 128
+ final val VALDEF = 129
+ final val DEFDEF = 130
+ final val TYPEDEF = 131
+ final val IMPORT = 132
+ final val TYPEPARAM = 133
+ final val PARAMS = 134
+ final val PARAM = 136
+ final val APPLY = 137
+ final val TYPEAPPLY = 138
+ final val TYPED = 139
+ final val NAMEDARG = 140
+ final val ASSIGN = 141
+ final val BLOCK = 142
+ final val IF = 143
+ final val LAMBDA = 144
+ final val MATCH = 145
+ final val RETURN = 146
+ final val TRY = 147
+ final val INLINED = 148
+ final val REPEATED = 149
+ final val BIND = 150
+ final val ALTERNATIVE = 151
+ final val UNAPPLY = 152
+ final val ANNOTATEDtype = 153
+ final val ANNOTATEDtpt = 154
+ final val CASEDEF = 155
+ final val TEMPLATE = 156
+ final val SUPER = 157
+ final val SUPERtype = 158
+ final val REFINEDtype = 159
+ final val REFINEDtpt = 160
+ final val APPLIEDtype = 161
+ final val APPLIEDtpt = 162
+ final val TYPEBOUNDS = 163
+ final val TYPEBOUNDStpt = 164
+ final val TYPEALIAS = 165
+ final val ANDtype = 166
+ final val ANDtpt = 167
+ final val ORtype = 168
+ final val ORtpt = 169
+ final val METHODtype = 170
+ final val POLYtype = 171
+ final val POLYtpt = 172
+ final val PARAMtype = 173
+ final val ANNOTATION = 174
+
+ final val firstSimpleTreeTag = UNITconst
+ final val firstNatTreeTag = SHARED
+ final val firstASTTreeTag = THIS
+ final val firstNatASTTreeTag = IDENT
+ final val firstLengthTreeTag = PACKAGE
+
+ def isParamTag(tag: Int) = tag == PARAM || tag == TYPEPARAM
+
+ def isModifierTag(tag: Int) = tag match {
+ case PRIVATE
+ | INTERNAL
+ | PROTECTED
+ | ABSTRACT
+ | FINAL
+ | SEALED
+ | CASE
+ | IMPLICIT
+ | LAZY
+ | OVERRIDE
+ | INLINE
+ | STATIC
+ | OBJECT
+ | TRAIT
+ | LOCAL
+ | SYNTHETIC
+ | ARTIFACT
+ | MUTABLE
+ | LABEL
+ | FIELDaccessor
+ | CASEaccessor
+ | COVARIANT
+ | CONTRAVARIANT
+ | SCALA2X
+ | DEFAULTparameterized
+ | INSUPERCALL
+ | STABLE
+ | ANNOTATION
+ | PRIVATEqualified
+ | PROTECTEDqualified => true
+ case _ => false
+ }
+
+ def isTypeTreeTag(tag: Int) = tag match {
+ case IDENTtpt
+ | SELECTtpt
+ | SINGLETONtpt
+ | REFINEDtpt
+ | APPLIEDtpt
+ | POLYtpt
+ | TYPEBOUNDStpt
+ | ANNOTATEDtpt
+ | ANDtpt
+ | ORtpt
+ | BYNAMEtpt => true
+ case _ => false
+ }
+
+ def nameTagToString(tag: Int): String = tag match {
+ case UTF8 => "UTF8"
+ case QUALIFIED => "QUALIFIED"
+ case SIGNED => "SIGNED"
+ case EXPANDED => "EXPANDED"
+ case OBJECTCLASS => "OBJECTCLASS"
+ case SUPERACCESSOR => "SUPERACCESSOR"
+ case DEFAULTGETTER => "DEFAULTGETTER"
+ }
+
+ def astTagToString(tag: Int): String = tag match {
+ case UNITconst => "UNITconst"
+ case FALSEconst => "FALSEconst"
+ case TRUEconst => "TRUEconst"
+ case NULLconst => "NULLconst"
+ case PRIVATE => "PRIVATE"
+ case INTERNAL => "INTERNAL"
+ case PROTECTED => "PROTECTED"
+ case ABSTRACT => "ABSTRACT"
+ case FINAL => "FINAL"
+ case SEALED => "SEALED"
+ case CASE => "CASE"
+ case IMPLICIT => "IMPLICIT"
+ case LAZY => "LAZY"
+ case OVERRIDE => "OVERRIDE"
+ case INLINE => "INLINE"
+ case STATIC => "STATIC"
+ case OBJECT => "OBJECT"
+ case TRAIT => "TRAIT"
+ case LOCAL => "LOCAL"
+ case SYNTHETIC => "SYNTHETIC"
+ case ARTIFACT => "ARTIFACT"
+ case MUTABLE => "MUTABLE"
+ case LABEL => "LABEL"
+ case FIELDaccessor => "FIELDaccessor"
+ case CASEaccessor => "CASEaccessor"
+ case COVARIANT => "COVARIANT"
+ case CONTRAVARIANT => "CONTRAVARIANT"
+ case SCALA2X => "SCALA2X"
+ case DEFAULTparameterized => "DEFAULTparameterized"
+ case INSUPERCALL => "INSUPERCALL"
+ case STABLE => "STABLE"
+
+ case SHARED => "SHARED"
+ case TERMREFdirect => "TERMREFdirect"
+ case TYPEREFdirect => "TYPEREFdirect"
+ case TERMREFpkg => "TERMREFpkg"
+ case TYPEREFpkg => "TYPEREFpkg"
+ case RECthis => "RECthis"
+ case BYTEconst => "BYTEconst"
+ case SHORTconst => "SHORTconst"
+ case CHARconst => "CHARconst"
+ case INTconst => "INTconst"
+ case LONGconst => "LONGconst"
+ case FLOATconst => "FLOATconst"
+ case DOUBLEconst => "DOUBLEconst"
+ case STRINGconst => "STRINGconst"
+ case RECtype => "RECtype"
+
+ case IDENT => "IDENT"
+ case IDENTtpt => "IDENTtpt"
+ case SELECT => "SELECT"
+ case SELECTtpt => "SELECTtpt"
+ case TERMREFsymbol => "TERMREFsymbol"
+ case TERMREF => "TERMREF"
+ case TYPEREFsymbol => "TYPEREFsymbol"
+ case TYPEREF => "TYPEREF"
+
+ case PACKAGE => "PACKAGE"
+ case VALDEF => "VALDEF"
+ case DEFDEF => "DEFDEF"
+ case TYPEDEF => "TYPEDEF"
+ case IMPORT => "IMPORT"
+ case TYPEPARAM => "TYPEPARAM"
+ case PARAMS => "PARAMS"
+ case PARAM => "PARAM"
+ case IMPORTED => "IMPORTED"
+ case RENAMED => "RENAMED"
+ case APPLY => "APPLY"
+ case TYPEAPPLY => "TYPEAPPLY"
+ case NEW => "NEW"
+ case TYPED => "TYPED"
+ case NAMEDARG => "NAMEDARG"
+ case ASSIGN => "ASSIGN"
+ case BLOCK => "BLOCK"
+ case IF => "IF"
+ case LAMBDA => "LAMBDA"
+ case MATCH => "MATCH"
+ case RETURN => "RETURN"
+ case INLINED => "INLINED"
+ case TRY => "TRY"
+ case REPEATED => "REPEATED"
+ case BIND => "BIND"
+ case ALTERNATIVE => "ALTERNATIVE"
+ case UNAPPLY => "UNAPPLY"
+ case ANNOTATEDtype => "ANNOTATEDtype"
+ case ANNOTATEDtpt => "ANNOTATEDtpt"
+ case CASEDEF => "CASEDEF"
+ case IMPLICITarg => "IMPLICITarg"
+ case TEMPLATE => "TEMPLATE"
+ case SELFDEF => "SELFDEF"
+ case THIS => "THIS"
+ case QUALTHIS => "QUALTHIS"
+ case SUPER => "SUPER"
+ case CLASSconst => "CLASSconst"
+ case ENUMconst => "ENUMconst"
+ case SINGLETONtpt => "SINGLETONtpt"
+ case SUPERtype => "SUPERtype"
+ case REFINEDtype => "REFINEDtype"
+ case REFINEDtpt => "REFINEDtpt"
+ case APPLIEDtype => "APPLIEDtype"
+ case APPLIEDtpt => "APPLIEDtpt"
+ case TYPEBOUNDS => "TYPEBOUNDS"
+ case TYPEBOUNDStpt => "TYPEBOUNDStpt"
+ case TYPEALIAS => "TYPEALIAS"
+ case ANDtype => "ANDtype"
+ case ANDtpt => "ANDtpt"
+ case ORtype => "ORtype"
+ case ORtpt => "ORtpt"
+ case BYNAMEtype => "BYNAMEtype"
+ case BYNAMEtpt => "BYNAMEtpt"
+ case POLYtype => "POLYtype"
+ case POLYtpt => "POLYtpt"
+ case METHODtype => "METHODtype"
+ case PARAMtype => "PARAMtype"
+ case ANNOTATION => "ANNOTATION"
+ case PRIVATEqualified => "PRIVATEqualified"
+ case PROTECTEDqualified => "PROTECTEDqualified"
+ }
+
+ /** @return If non-negative, the number of leading references (represented as nats) of a length/trees entry.
+ * If negative, minus the number of leading non-reference trees.
+ */
+ def numRefs(tag: Int) = tag match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | RETURN | BIND |
+ SELFDEF | REFINEDtype => 1
+ case RENAMED | PARAMtype => 2
+ case POLYtype | METHODtype => -1
+ case _ => 0
+ }
+
+ /** Map between variances and name prefixes */
+ val varianceToPrefix = Map(-1 -> '-', 0 -> '=', 1 -> '+')
+ val prefixToVariance = Map('-' -> -1, '=' -> 0, '+' -> 1)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala
new file mode 100644
index 000000000..26807115c
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyName.scala
@@ -0,0 +1,30 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import core.Names.TermName
+import collection.mutable
+
+abstract class TastyName
+
+object TastyName {
+
+ case class NameRef(index: Int) extends AnyVal
+
+ case class Simple(name: TermName) extends TastyName
+ case class Qualified(qualified: NameRef, selector: NameRef) extends TastyName
+ case class Signed(original: NameRef, params: List[NameRef], result: NameRef) extends TastyName
+ case class Expanded(prefix: NameRef, original: NameRef) extends TastyName
+ case class ModuleClass(module: NameRef) extends TastyName
+ case class SuperAccessor(accessed: NameRef) extends TastyName
+ case class DefaultGetter(method: NameRef, num: Int) extends TastyName
+ case class Shadowed(original: NameRef) extends TastyName
+
+ class Table extends (NameRef => TastyName) {
+ private val names = new mutable.ArrayBuffer[TastyName]
+ def add(name: TastyName) = names += name
+ def apply(ref: NameRef) = names(ref.index)
+ def contents: Iterable[TastyName] = names
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
new file mode 100644
index 000000000..c844d522e
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
@@ -0,0 +1,71 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import TastyFormat._
+import collection.mutable
+import TastyBuffer._
+import java.util.UUID
+import core.Symbols.Symbol
+import ast.tpd
+
+class TastyPickler {
+
+ private val sections = new mutable.ArrayBuffer[(TastyName.NameRef, TastyBuffer)]
+ val uuid = UUID.randomUUID()
+
+ private val headerBuffer = {
+ val buf = new TastyBuffer(24)
+ for (ch <- header) buf.writeByte(ch.toByte)
+ buf.writeNat(MajorVersion)
+ buf.writeNat(MinorVersion)
+ buf.writeUncompressedLong(uuid.getMostSignificantBits)
+ buf.writeUncompressedLong(uuid.getLeastSignificantBits)
+ buf
+ }
+
+ val nameBuffer = new NameBuffer
+
+ def newSection(name: String, buf: TastyBuffer) =
+ sections += ((nameBuffer.nameIndex(name), buf))
+
+ def assembleParts(): Array[Byte] = {
+ def lengthWithLength(buf: TastyBuffer) = {
+ buf.assemble()
+ buf.length + natSize(buf.length)
+ }
+ val totalSize =
+ headerBuffer.length +
+ lengthWithLength(nameBuffer) + {
+ for ((nameRef, buf) <- sections) yield
+ natSize(nameRef.index) + lengthWithLength(buf)
+ }.sum
+ val all = new TastyBuffer(totalSize)
+ all.writeBytes(headerBuffer.bytes, headerBuffer.length)
+ all.writeNat(nameBuffer.length)
+ all.writeBytes(nameBuffer.bytes, nameBuffer.length)
+ for ((nameRef, buf) <- sections) {
+ all.writeNat(nameRef.index)
+ all.writeNat(buf.length)
+ all.writeBytes(buf.bytes, buf.length)
+ }
+ assert(all.length == totalSize && all.bytes.length == totalSize, s"totalSize = $totalSize, all.length = ${all.length}, all.bytes.length = ${all.bytes.length}")
+ all.bytes
+ }
+
+ /** The address in the TASTY file of a given tree, or None if unknown.
+ * Note that trees are looked up by reference equality,
+ * so one can reliably use this function only directly after `pickler`.
+ */
+ var addrOfTree: tpd.Tree => Option[Addr] = (_ => None)
+
+ /**
+ * Addresses in TASTY file of symbols, stored by pickling.
+ * Note that trees are checked for reference equality,
+ * so one can reliably use this function only dirrectly after `pickler`
+ */
+ var addrOfSym: Symbol => Option[Addr] = (_ => None)
+
+ val treePkl = new TreePickler(this)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
new file mode 100644
index 000000000..0dc8d8fea
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
@@ -0,0 +1,122 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+import Contexts._, Decorators._
+import printing.Texts._
+import TastyName._
+import StdNames._
+import TastyUnpickler._
+import TastyBuffer.Addr
+import util.Positions.{Position, offsetToInt}
+import collection.mutable
+
+class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
+
+ val unpickler = new TastyUnpickler(bytes)
+ import unpickler.{tastyName, unpickle}
+
+ def nameToString(name: TastyName): String = name match {
+ case Simple(name) => name.toString
+ case Qualified(qual, name) => nameRefToString(qual) + "." + nameRefToString(name)
+ case Signed(original, params, result) =>
+ i"${nameRefToString(original)}@${params.map(nameRefToString)}%,%:${nameRefToString(result)}"
+ case Expanded(prefix, original) => s"$prefix${nme.EXPAND_SEPARATOR}$original"
+ case ModuleClass(original) => nameRefToString(original) + "/MODULECLASS"
+ case SuperAccessor(accessed) => nameRefToString(accessed) + "/SUPERACCESSOR"
+ case DefaultGetter(meth, num) => nameRefToString(meth) + "/DEFAULTGETTER" + num
+ case Shadowed(original) => nameRefToString(original) + "/SHADOWED"
+ }
+
+ def nameRefToString(ref: NameRef): String = nameToString(tastyName(ref))
+
+ def printNames() =
+ for ((name, idx) <- tastyName.contents.zipWithIndex)
+ println(f"$idx%4d: " + nameToString(name))
+
+ def printContents(): Unit = {
+ println("Names:")
+ printNames()
+ println("Trees:")
+ unpickle(new TreeSectionUnpickler)
+ unpickle(new PositionSectionUnpickler)
+ }
+
+ class TreeSectionUnpickler extends SectionUnpickler[Unit]("ASTs") {
+ import TastyFormat._
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
+ import reader._
+ var indent = 0
+ def newLine() = print(f"\n ${index(currentAddr) - index(startAddr)}%5d:" + " " * indent)
+ def printNat() = print(" " + readNat())
+ def printName() = {
+ val idx = readNat()
+ print(" ") ;print(idx); print("["); print(nameRefToString(NameRef(idx))); print("]")
+ }
+ def printTree(): Unit = {
+ newLine()
+ val tag = readByte()
+ print(" ");print(astTagToString(tag))
+ indent += 2
+ if (tag >= firstLengthTreeTag) {
+ val len = readNat()
+ print(s"($len)")
+ val end = currentAddr + len
+ def printTrees() = until(end)(printTree())
+ tag match {
+ case RENAMED =>
+ printName(); printName()
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND =>
+ printName(); printTrees()
+ case REFINEDtype =>
+ printName(); printTree(); printTrees()
+ case RETURN =>
+ printNat(); printTrees()
+ case METHODtype | POLYtype =>
+ printTree()
+ until(end) { printName(); printTree() }
+ case PARAMtype =>
+ printNat(); printNat()
+ case _ =>
+ printTrees()
+ }
+ if (currentAddr != end) {
+ println(s"incomplete read, current = $currentAddr, end = $end")
+ goto(end)
+ }
+ }
+ else if (tag >= firstNatASTTreeTag) {
+ tag match {
+ case IDENT | SELECT | TERMREF | TYPEREF | SELFDEF => printName()
+ case _ => printNat()
+ }
+ printTree()
+ }
+ else if (tag >= firstASTTreeTag)
+ printTree()
+ else if (tag >= firstNatTreeTag)
+ tag match {
+ case TERMREFpkg | TYPEREFpkg | STRINGconst | IMPORTED => printName()
+ case _ => printNat()
+ }
+ indent -= 2
+ }
+ println(i"start = ${reader.startAddr}, base = $base, current = $currentAddr, end = $endAddr")
+ println(s"${endAddr.index - startAddr.index} bytes of AST, base = $currentAddr")
+ while (!isAtEnd) {
+ printTree()
+ newLine()
+ }
+ }
+ }
+
+ class PositionSectionUnpickler extends SectionUnpickler[Unit]("Positions") {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
+ print(s"${reader.endAddr.index - reader.currentAddr.index}")
+ val positions = new PositionUnpickler(reader).positions
+ println(s" position bytes:")
+ val sorted = positions.toSeq.sortBy(_._1.index)
+ for ((addr, pos) <- sorted) println(s"${addr.index}: ${offsetToInt(pos.start)} .. ${pos.end}")
+ }
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala
new file mode 100644
index 000000000..e583c4793
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyReader.scala
@@ -0,0 +1,141 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import TastyBuffer._
+import TastyName.NameRef
+import collection.mutable
+
+/** A byte array buffer that can be filled with bytes or natural numbers in TASTY format,
+ * and that supports reading and patching addresses represented as natural numbers.
+ *
+ * @param bytes The array containing data
+ * @param start The position from which to read
+ * @param end The position one greater than the last byte to be read
+ * @param base The index referenced by the logical zero address Addr(0)
+ */
+class TastyReader(val bytes: Array[Byte], start: Int, end: Int, val base: Int = 0) {
+
+ def this(bytes: Array[Byte]) = this(bytes, 0, bytes.length)
+
+ private var bp: Int = start
+
+ def addr(idx: Int) = Addr(idx - base)
+ def index(addr: Addr) = addr.index + base
+
+ /** The address of the first byte to read, respectively byte that was read */
+ def startAddr: Addr = addr(start)
+
+ /** The address of the next byte to read */
+ def currentAddr: Addr = addr(bp)
+
+ /** the address one greater than the last brte to read */
+ def endAddr: Addr = addr(end)
+
+ /** Have all bytes been read? */
+ def isAtEnd: Boolean = bp == end
+
+ /** A new reader over the same array with the same address base, but with
+ * specified start and end positions
+ */
+ def subReader(start: Addr, end: Addr): TastyReader =
+ new TastyReader(bytes, index(start), index(end), base)
+
+ /** Read a byte of data. */
+ def readByte(): Int = {
+ val result = bytes(bp) & 0xff
+ bp += 1
+ result
+ }
+
+ /** Returns the next byte of data as a natural number without advancing the read position */
+ def nextByte: Int = bytes(bp) & 0xff
+
+ /** Read the next `n` bytes of `data`. */
+ def readBytes(n: Int): Array[Byte] = {
+ val result = new Array[Byte](n)
+ Array.copy(bytes, bp, result, 0, n)
+ bp += n
+ result
+ }
+
+ /** Read a natural number fitting in an Int in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readNat(): Int = readLongNat.toInt
+
+ /** Read an integer number in 2's complement big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readInt(): Int = readLongInt.toInt
+
+ /** Read a natural number fitting in a Long in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def readLongNat(): Long = {
+ var b = 0L
+ var x = 0L
+ do {
+ b = bytes(bp)
+ x = (x << 7) | (b & 0x7f)
+ bp += 1
+ } while ((b & 0x80) == 0)
+ x
+ }
+
+ /** Read a long integer number in 2's complement big endian format, base 128. */
+ def readLongInt(): Long = {
+ var b = bytes(bp)
+ var x: Long = (b << 1).toByte >> 1 // sign extend with bit 6.
+ bp += 1
+ while ((b & 0x80) == 0) {
+ b = bytes(bp)
+ x = (x << 7) | (b & 0x7f)
+ bp += 1
+ }
+ x
+ }
+
+ /** Read an uncompressed Long stored in 8 bytes in big endian format */
+ def readUncompressedLong(): Long = {
+ var x: Long = 0
+ for (i <- 0 to 7)
+ x = (x << 8) | (readByte() & 0xff)
+ x
+ }
+
+ /** Read a natural number and return as a NameRef */
+ def readNameRef() = NameRef(readNat())
+
+ /** Read a natural number and return as an address */
+ def readAddr() = Addr(readNat())
+
+ /** Read a length number and return the absolute end address implied by it,
+ * given as <address following length field> + <length-value-read>.
+ */
+ def readEnd(): Addr = addr(readNat() + bp)
+
+ /** Set read position to the one pointed to by `addr` */
+ def goto(addr: Addr): Unit =
+ bp = index(addr)
+
+ /** Perform `op` until `end` address is reached and collect results in a list. */
+ def until[T](end: Addr)(op: => T): List[T] = {
+ val buf = new mutable.ListBuffer[T]
+ while (bp < index(end)) buf += op
+ assert(bp == index(end))
+ buf.toList
+ }
+
+ /** If before given `end` address, the result of `op`, otherwise `default` */
+ def ifBefore[T](end: Addr)(op: => T, default: T): T =
+ if (bp < index(end)) op else default
+
+ /** Perform `op` while cindition `cond` holds and collect results in a list. */
+ def collectWhile[T](cond: => Boolean)(op: => T): List[T] = {
+ val buf = new mutable.ListBuffer[T]
+ while (cond) buf += op
+ buf.toList
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
new file mode 100644
index 000000000..8a1f58acd
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala
@@ -0,0 +1,95 @@
+package dotty.tools.dotc
+package core
+package tasty
+
+import scala.collection.mutable
+import TastyFormat._
+import Names.{Name, termName}
+import java.util.UUID
+
+object TastyUnpickler {
+ class UnpickleException(msg: String) extends Exception(msg)
+
+ abstract class SectionUnpickler[R](val name: String) {
+ def unpickle(reader: TastyReader, tastyName: TastyName.Table): R
+ }
+}
+
+import TastyUnpickler._
+
+class TastyUnpickler(reader: TastyReader) {
+ import reader._
+
+ def this(bytes: Array[Byte]) = this(new TastyReader(bytes))
+
+ private val sectionReader = new mutable.HashMap[String, TastyReader]
+ val tastyName = new TastyName.Table
+
+ def check(cond: Boolean, msg: => String) =
+ if (!cond) throw new UnpickleException(msg)
+
+ def readString(): String = {
+ val TastyName.Simple(name) = tastyName(readNameRef())
+ name.toString
+ }
+
+ def readName(): TastyName = {
+ import TastyName._
+ val tag = readByte()
+ val length = readNat()
+ val start = currentAddr
+ val end = start + length
+ val result = tag match {
+ case UTF8 =>
+ goto(end)
+ Simple(termName(bytes, start.index, length))
+ case QUALIFIED =>
+ Qualified(readNameRef(), readNameRef())
+ case SIGNED =>
+ val original = readNameRef()
+ val result = readNameRef()
+ val params = until(end)(readNameRef())
+ Signed(original, params, result)
+ case EXPANDED =>
+ Expanded(readNameRef(), readNameRef())
+ case OBJECTCLASS =>
+ ModuleClass(readNameRef())
+ case SUPERACCESSOR =>
+ SuperAccessor(readNameRef())
+ case DEFAULTGETTER =>
+ DefaultGetter(readNameRef(), readNat())
+ case SHADOWED =>
+ Shadowed(readNameRef())
+ }
+ assert(currentAddr == end, s"bad name $result $start $currentAddr $end")
+ result
+ }
+
+ private def readHeader(): UUID = {
+ for (i <- 0 until header.length)
+ check(readByte() == header(i), "not a TASTy file")
+ val major = readNat()
+ val minor = readNat()
+ check(major == MajorVersion && minor <= MinorVersion,
+ s"""TASTy signature has wrong version.
+ | expected: $MajorVersion.$MinorVersion
+ | found : $major.$minor""".stripMargin)
+ new UUID(readUncompressedLong(), readUncompressedLong())
+ }
+
+ val uuid = readHeader()
+
+ locally {
+ until(readEnd()) { tastyName.add(readName()) }
+ while (!isAtEnd) {
+ val secName = readString()
+ val secEnd = readEnd()
+ sectionReader(secName) = new TastyReader(bytes, currentAddr.index, secEnd.index, currentAddr.index)
+ goto(secEnd)
+ }
+ }
+
+ def unpickle[R](sec: SectionUnpickler[R]): Option[R] =
+ for (reader <- sectionReader.get(sec.name)) yield
+ sec.unpickle(reader, tastyName)
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
new file mode 100644
index 000000000..6c7982d78
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala
@@ -0,0 +1,188 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import util.Util.{bestFit, dble}
+import TastyBuffer.{Addr, AddrWidth}
+import config.Printers.pickling
+import ast.untpd.Tree
+
+class TreeBuffer extends TastyBuffer(50000) {
+
+ private final val ItemsOverOffsets = 2
+ private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets)
+ private var offsets = new Array[Int](initialOffsetSize)
+ private var isRelative = new Array[Boolean](initialOffsetSize)
+ private var delta: Array[Int] = _
+ private var numOffsets = 0
+
+ /** A map from trees to the address at which a tree is pickled. */
+ private val treeAddrs = new java.util.IdentityHashMap[Tree, Any] // really: Addr | Null
+
+ def registerTreeAddr(tree: Tree): Addr = treeAddrs.get(tree) match {
+ case null => treeAddrs.put(tree, currentAddr); currentAddr
+ case addr: Addr => addr
+ }
+
+ def addrOfTree(tree: Tree): Option[Addr] = treeAddrs.get(tree) match {
+ case null => None
+ case addr: Addr => Some(addr)
+ }
+
+ private def offset(i: Int): Addr = Addr(offsets(i))
+
+ private def keepOffset(relative: Boolean): Unit = {
+ if (numOffsets == offsets.length) {
+ offsets = dble(offsets)
+ isRelative = dble(isRelative)
+ }
+ offsets(numOffsets) = length
+ isRelative(numOffsets) = relative
+ numOffsets += 1
+ }
+
+ /** Reserve space for a reference, to be adjusted later */
+ def reserveRef(relative: Boolean): Addr = {
+ val addr = currentAddr
+ keepOffset(relative)
+ reserveAddr()
+ addr
+ }
+
+ /** Write reference right adjusted into freshly reserved field. */
+ def writeRef(target: Addr) = {
+ keepOffset(relative = false)
+ fillAddr(reserveAddr(), target)
+ }
+
+ /** Fill previously reserved field with a reference */
+ def fillRef(at: Addr, target: Addr, relative: Boolean) = {
+ val addr = if (relative) target.relativeTo(at) else target
+ fillAddr(at, addr)
+ }
+
+ /** The amount by which the bytes at the given address are shifted under compression */
+ def deltaAt(at: Addr): Int = {
+ val idx = bestFit(offsets, numOffsets, at.index - 1)
+ if (idx < 0) 0 else delta(idx)
+ }
+
+ /** The address to which `x` is translated under compression */
+ def adjusted(x: Addr): Addr = x - deltaAt(x)
+
+ /** Compute all shift-deltas */
+ private def computeDeltas() = {
+ delta = new Array[Int](numOffsets)
+ var lastDelta = 0
+ var i = 0
+ while (i < numOffsets) {
+ val off = offset(i)
+ val skippedOff = skipZeroes(off)
+ val skippedCount = skippedOff.index - off.index
+ assert(skippedCount < AddrWidth, s"unset field at position $off")
+ lastDelta += skippedCount
+ delta(i) = lastDelta
+ i += 1
+ }
+ }
+
+ /** The absolute or relative adjusted address at index `i` of `offsets` array*/
+ private def adjustedOffset(i: Int): Addr = {
+ val at = offset(i)
+ val original = getAddr(at)
+ if (isRelative(i)) {
+ val start = skipNat(at)
+ val len1 = original + delta(i) - deltaAt(original + start.index)
+ val len2 = adjusted(original + start.index) - adjusted(start).index
+ assert(len1 == len2,
+ s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2")
+ len1
+ } else adjusted(original)
+ }
+
+ /** Adjust all offsets according to previously computed deltas */
+ private def adjustOffsets(): Unit = {
+ for (i <- 0 until numOffsets) {
+ val corrected = adjustedOffset(i)
+ fillAddr(offset(i), corrected)
+ }
+ }
+
+ /** Adjust deltas to also take account references that will shrink (and thereby
+ * generate additional zeroes that can be skipped) due to previously
+ * computed adjustments.
+ */
+ private def adjustDeltas(): Int = {
+ val delta1 = new Array[Int](delta.length)
+ var lastDelta = 0
+ var i = 0
+ while (i < numOffsets) {
+ val corrected = adjustedOffset(i)
+ lastDelta += AddrWidth - TastyBuffer.natSize(corrected.index)
+ delta1(i) = lastDelta
+ i += 1
+ }
+ val saved =
+ if (numOffsets == 0) 0
+ else delta1(numOffsets - 1) - delta(numOffsets - 1)
+ delta = delta1
+ saved
+ }
+
+ /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */
+ private def compress(): Int = {
+ var lastDelta = 0
+ var start = 0
+ var i = 0
+ var wasted = 0
+ def shift(end: Int) =
+ Array.copy(bytes, start, bytes, start - lastDelta, end - start)
+ while (i < numOffsets) {
+ val next = offsets(i)
+ shift(next)
+ start = next + delta(i) - lastDelta
+ val pastZeroes = skipZeroes(Addr(next)).index
+ assert(pastZeroes >= start, s"something's wrong: eliminated non-zero")
+ wasted += (pastZeroes - start)
+ lastDelta = delta(i)
+ i += 1
+ }
+ shift(length)
+ length -= lastDelta
+ wasted
+ }
+
+ def adjustTreeAddrs(): Unit = {
+ val it = treeAddrs.keySet.iterator
+ while (it.hasNext) {
+ val tree = it.next
+ treeAddrs.get(tree) match {
+ case addr: Addr => treeAddrs.put(tree, adjusted(addr))
+ case addrs: List[Addr] => treeAddrs.put(tree, addrs.map(adjusted))
+ }
+ }
+ }
+
+ /** Final assembly, involving the following steps:
+ * - compute deltas
+ * - adjust deltas until additional savings are < 1% of total
+ * - adjust offsets according to the adjusted deltas
+ * - shrink buffer, skipping zeroes.
+ */
+ def compactify(): Unit = {
+ val origLength = length
+ computeDeltas()
+ //println(s"offsets: ${offsets.take(numOffsets).deep}")
+ //println(s"deltas: ${delta.take(numOffsets).deep}")
+ var saved = 0
+ do {
+ saved = adjustDeltas()
+ pickling.println(s"adjusting deltas, saved = $saved")
+ } while (saved > 0 && length / saved < 100)
+ adjustOffsets()
+ adjustTreeAddrs()
+ val wasted = compress()
+ pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now.
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
new file mode 100644
index 000000000..80270aa25
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala
@@ -0,0 +1,641 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import ast.Trees._
+import ast.untpd
+import TastyFormat._
+import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, StdNames.tpnme, NameOps._
+import collection.mutable
+import typer.Inliner
+import NameOps._
+import StdNames.nme
+import TastyBuffer._
+import TypeApplications._
+
+class TreePickler(pickler: TastyPickler) {
+ val buf = new TreeBuffer
+ pickler.newSection("ASTs", buf)
+ import buf._
+ import pickler.nameBuffer.{nameIndex, fullNameIndex}
+ import ast.tpd._
+
+ private val symRefs = new mutable.HashMap[Symbol, Addr]
+ private val forwardSymRefs = new mutable.HashMap[Symbol, List[Addr]]
+ private val pickledTypes = new java.util.IdentityHashMap[Type, Any] // Value type is really Addr, but that's not compatible with null
+
+ private def withLength(op: => Unit) = {
+ val lengthAddr = reserveRef(relative = true)
+ op
+ fillRef(lengthAddr, currentAddr, relative = true)
+ }
+
+ def addrOfSym(sym: Symbol): Option[Addr] = {
+ symRefs.get(sym)
+ }
+
+ def preRegister(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case tree: MemberDef =>
+ if (!symRefs.contains(tree.symbol)) symRefs(tree.symbol) = NoAddr
+ case _ =>
+ }
+
+ def registerDef(sym: Symbol): Unit = {
+ symRefs(sym) = currentAddr
+ forwardSymRefs.get(sym) match {
+ case Some(refs) =>
+ refs.foreach(fillRef(_, currentAddr, relative = false))
+ forwardSymRefs -= sym
+ case None =>
+ }
+ }
+
+ private def pickleName(name: Name): Unit = writeNat(nameIndex(name).index)
+ private def pickleName(name: TastyName): Unit = writeNat(nameIndex(name).index)
+ private def pickleNameAndSig(name: Name, sig: Signature) = {
+ val Signature(params, result) = sig
+ pickleName(TastyName.Signed(nameIndex(name), params.map(fullNameIndex), fullNameIndex(result)))
+ }
+
+ private def pickleName(sym: Symbol)(implicit ctx: Context): Unit = {
+ def encodeSuper(name: Name): TastyName.NameRef =
+ if (sym is Flags.SuperAccessor) {
+ val SuperAccessorName(n) = name
+ nameIndex(TastyName.SuperAccessor(nameIndex(n)))
+ }
+ else nameIndex(name)
+ val nameRef =
+ if (sym is Flags.ExpandedName)
+ nameIndex(
+ TastyName.Expanded(
+ nameIndex(sym.name.expandedPrefix),
+ encodeSuper(sym.name.unexpandedName)))
+ else encodeSuper(sym.name)
+ writeNat(nameRef.index)
+ }
+
+ private def pickleSymRef(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
+ case Some(label) =>
+ if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
+ case None =>
+ // See pos/t1957.scala for an example where this can happen.
+ // I believe it's a bug in typer: the type of an implicit argument refers
+ // to a closure parameter outside the closure itself. TODO: track this down, so that we
+ // can eliminate this case.
+ ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos)
+ pickleForwardSymRef(sym)
+ }
+
+ private def pickleForwardSymRef(sym: Symbol)(implicit ctx: Context) = {
+ val ref = reserveRef(relative = false)
+ assert(!sym.is(Flags.Package), sym)
+ forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil)
+ }
+
+ private def isLocallyDefined(sym: Symbol)(implicit ctx: Context) = symRefs.get(sym) match {
+ case Some(label) => assert(sym.exists); label != NoAddr
+ case None => false
+ }
+
+ def pickleConstant(c: Constant)(implicit ctx: Context): Unit = c.tag match {
+ case UnitTag =>
+ writeByte(UNITconst)
+ case BooleanTag =>
+ writeByte(if (c.booleanValue) TRUEconst else FALSEconst)
+ case ByteTag =>
+ writeByte(BYTEconst)
+ writeInt(c.byteValue)
+ case ShortTag =>
+ writeByte(SHORTconst)
+ writeInt(c.shortValue)
+ case CharTag =>
+ writeByte(CHARconst)
+ writeNat(c.charValue)
+ case IntTag =>
+ writeByte(INTconst)
+ writeInt(c.intValue)
+ case LongTag =>
+ writeByte(LONGconst)
+ writeLongInt(c.longValue)
+ case FloatTag =>
+ writeByte(FLOATconst)
+ writeInt(java.lang.Float.floatToRawIntBits(c.floatValue))
+ case DoubleTag =>
+ writeByte(DOUBLEconst)
+ writeLongInt(java.lang.Double.doubleToRawLongBits(c.doubleValue))
+ case StringTag =>
+ writeByte(STRINGconst)
+ writeNat(nameIndex(c.stringValue).index)
+ case NullTag =>
+ writeByte(NULLconst)
+ case ClazzTag =>
+ writeByte(CLASSconst)
+ pickleType(c.typeValue)
+ case EnumTag =>
+ writeByte(ENUMconst)
+ pickleType(c.symbolValue.termRef)
+ }
+
+ def pickleType(tpe0: Type, richTypes: Boolean = false)(implicit ctx: Context): Unit = try {
+ val tpe = tpe0.stripTypeVar
+ val prev = pickledTypes.get(tpe)
+ if (prev == null) {
+ pickledTypes.put(tpe, currentAddr)
+ pickleNewType(tpe, richTypes)
+ }
+ else {
+ writeByte(SHARED)
+ writeRef(prev.asInstanceOf[Addr])
+ }
+ } catch {
+ case ex: AssertionError =>
+ println(i"error when pickling type $tpe0")
+ throw ex
+ }
+
+ private def pickleNewType(tpe: Type, richTypes: Boolean)(implicit ctx: Context): Unit = try { tpe match {
+ case AppliedType(tycon, args) =>
+ writeByte(APPLIEDtype)
+ withLength { pickleType(tycon); args.foreach(pickleType(_)) }
+ case ConstantType(value) =>
+ pickleConstant(value)
+ case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) =>
+ pickleType(tpe.superType)
+ case tpe: WithFixedSym =>
+ val sym = tpe.symbol
+ def pickleRef() =
+ if (tpe.prefix == NoPrefix) {
+ writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect)
+ pickleSymRef(sym)
+ }
+ else {
+ assert(tpe.symbol.isClass)
+ assert(tpe.symbol.is(Flags.Scala2x), tpe.symbol.showLocated)
+ writeByte(TYPEREF) // should be changed to a new entry that keeps track of prefix, symbol & owner
+ pickleName(tpe.name)
+ pickleType(tpe.prefix)
+ }
+ if (sym.is(Flags.Package)) {
+ writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg)
+ pickleName(qualifiedName(sym))
+ }
+ else if (sym is Flags.BindDefinedType) {
+ registerDef(sym)
+ writeByte(BIND)
+ withLength {
+ pickleName(sym.name)
+ pickleType(sym.info)
+ pickleRef()
+ }
+ }
+ else pickleRef()
+ case tpe: TermRefWithSignature =>
+ if (tpe.symbol.is(Flags.Package)) picklePackageRef(tpe.symbol)
+ else {
+ writeByte(TERMREF)
+ pickleNameAndSig(tpe.name, tpe.signature); pickleType(tpe.prefix)
+ }
+ case tpe: NamedType =>
+ if (isLocallyDefined(tpe.symbol)) {
+ writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol)
+ pickleSymRef(tpe.symbol); pickleType(tpe.prefix)
+ } else {
+ writeByte(if (tpe.isType) TYPEREF else TERMREF)
+ pickleName(tpe.name); pickleType(tpe.prefix)
+ }
+ case tpe: ThisType =>
+ if (tpe.cls.is(Flags.Package) && !tpe.cls.isEffectiveRoot)
+ picklePackageRef(tpe.cls)
+ else {
+ writeByte(THIS)
+ pickleType(tpe.tref)
+ }
+ case tpe: SuperType =>
+ writeByte(SUPERtype)
+ withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)}
+ case tpe: RecThis =>
+ writeByte(RECthis)
+ val binderAddr = pickledTypes.get(tpe.binder)
+ assert(binderAddr != null, tpe.binder)
+ writeRef(binderAddr.asInstanceOf[Addr])
+ case tpe: SkolemType =>
+ pickleType(tpe.info)
+ case tpe: RefinedType =>
+ writeByte(REFINEDtype)
+ withLength {
+ pickleName(tpe.refinedName)
+ pickleType(tpe.parent)
+ pickleType(tpe.refinedInfo, richTypes = true)
+ }
+ case tpe: RecType =>
+ writeByte(RECtype)
+ pickleType(tpe.parent)
+ case tpe: TypeAlias =>
+ writeByte(TYPEALIAS)
+ withLength {
+ pickleType(tpe.alias, richTypes)
+ tpe.variance match {
+ case 1 => writeByte(COVARIANT)
+ case -1 => writeByte(CONTRAVARIANT)
+ case 0 =>
+ }
+ }
+ case tpe: TypeBounds =>
+ writeByte(TYPEBOUNDS)
+ withLength { pickleType(tpe.lo, richTypes); pickleType(tpe.hi, richTypes) }
+ case tpe: AnnotatedType =>
+ writeByte(ANNOTATEDtype)
+ withLength { pickleType(tpe.tpe, richTypes); pickleTree(tpe.annot.tree) }
+ case tpe: AndOrType =>
+ writeByte(if (tpe.isAnd) ANDtype else ORtype)
+ withLength { pickleType(tpe.tp1, richTypes); pickleType(tpe.tp2, richTypes) }
+ case tpe: ExprType =>
+ writeByte(BYNAMEtype)
+ pickleType(tpe.underlying)
+ case tpe: PolyType =>
+ writeByte(POLYtype)
+ val paramNames = tpe.typeParams.map(tparam =>
+ varianceToPrefix(tparam.paramVariance) +: tparam.paramName)
+ pickleMethodic(tpe.resultType, paramNames, tpe.paramBounds)
+ case tpe: MethodType if richTypes =>
+ writeByte(METHODtype)
+ pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes)
+ case tpe: PolyParam =>
+ if (!pickleParamType(tpe))
+ // TODO figure out why this case arises in e.g. pickling AbstractFileReader.
+ ctx.typerState.constraint.entry(tpe) match {
+ case TypeBounds(lo, hi) if lo eq hi => pickleNewType(lo, richTypes)
+ case _ => assert(false, s"orphan poly parameter: $tpe")
+ }
+ case tpe: MethodParam =>
+ assert(pickleParamType(tpe), s"orphan method parameter: $tpe")
+ case tpe: LazyRef =>
+ pickleType(tpe.ref)
+ }} catch {
+ case ex: AssertionError =>
+ println(i"error while pickling type $tpe")
+ throw ex
+ }
+
+ def picklePackageRef(pkg: Symbol)(implicit ctx: Context): Unit = {
+ writeByte(TERMREFpkg)
+ pickleName(qualifiedName(pkg))
+ }
+
+ def pickleMethodic(result: Type, names: List[Name], types: List[Type])(implicit ctx: Context) =
+ withLength {
+ pickleType(result, richTypes = true)
+ (names, types).zipped.foreach { (name, tpe) =>
+ pickleName(name); pickleType(tpe)
+ }
+ }
+
+ def pickleParamType(tpe: ParamType)(implicit ctx: Context): Boolean = {
+ val binder = pickledTypes.get(tpe.binder)
+ val pickled = binder != null
+ if (pickled) {
+ writeByte(PARAMtype)
+ withLength { writeRef(binder.asInstanceOf[Addr]); writeNat(tpe.paramNum) }
+ }
+ pickled
+ }
+
+ def pickleTpt(tpt: Tree)(implicit ctx: Context): Unit =
+ pickleTree(tpt)
+
+ def pickleTreeUnlessEmpty(tree: Tree)(implicit ctx: Context): Unit =
+ if (!tree.isEmpty) pickleTree(tree)
+
+ def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(implicit ctx: Context) = {
+ assert(symRefs(sym) == NoAddr, sym)
+ registerDef(sym)
+ writeByte(tag)
+ withLength {
+ pickleName(sym)
+ pickleParams
+ tpt match {
+ case templ: Template => pickleTree(tpt)
+ case _ if tpt.isType => pickleTpt(tpt)
+ }
+ pickleTreeUnlessEmpty(rhs)
+ pickleModifiers(sym)
+ }
+ }
+
+ def pickleParam(tree: Tree)(implicit ctx: Context): Unit = {
+ registerTreeAddr(tree)
+ tree match {
+ case tree: ValDef => pickleDef(PARAM, tree.symbol, tree.tpt)
+ case tree: DefDef => pickleDef(PARAM, tree.symbol, tree.tpt, tree.rhs)
+ case tree: TypeDef => pickleDef(TYPEPARAM, tree.symbol, tree.rhs)
+ }
+ }
+
+ def pickleParams(trees: List[Tree])(implicit ctx: Context): Unit = {
+ trees.foreach(preRegister)
+ trees.foreach(pickleParam)
+ }
+
+ def pickleStats(stats: List[Tree])(implicit ctx: Context) = {
+ stats.foreach(preRegister)
+ stats.foreach(stat => if (!stat.isEmpty) pickleTree(stat))
+ }
+
+ def pickleTree(tree: Tree)(implicit ctx: Context): Unit = {
+ val addr = registerTreeAddr(tree)
+ if (addr != currentAddr) {
+ writeByte(SHARED)
+ writeRef(addr)
+ }
+ else
+ try tree match {
+ case Ident(name) =>
+ tree.tpe match {
+ case tp: TermRef if name != nme.WILDCARD =>
+ // wildcards are pattern bound, need to be preserved as ids.
+ pickleType(tp)
+ case _ =>
+ writeByte(if (tree.isType) IDENTtpt else IDENT)
+ pickleName(name)
+ pickleType(tree.tpe)
+ }
+ case This(qual) =>
+ if (qual.isEmpty) pickleType(tree.tpe)
+ else {
+ writeByte(QUALTHIS)
+ val ThisType(tref) = tree.tpe
+ pickleTree(qual.withType(tref))
+ }
+ case Select(qual, name) =>
+ writeByte(if (name.isTypeName) SELECTtpt else SELECT)
+ val realName = tree.tpe match {
+ case tp: NamedType if tp.name.isShadowedName => tp.name
+ case _ => name
+ }
+ val sig = tree.tpe.signature
+ if (sig == Signature.NotAMethod) pickleName(realName)
+ else pickleNameAndSig(realName, sig)
+ pickleTree(qual)
+ case Apply(fun, args) =>
+ writeByte(APPLY)
+ withLength {
+ pickleTree(fun)
+ args.foreach(pickleTree)
+ }
+ case TypeApply(fun, args) =>
+ writeByte(TYPEAPPLY)
+ withLength {
+ pickleTree(fun)
+ args.foreach(pickleTpt)
+ }
+ case Literal(const1) =>
+ pickleConstant {
+ tree.tpe match {
+ case ConstantType(const2) => const2
+ case _ => const1
+ }
+ }
+ case Super(qual, mix) =>
+ writeByte(SUPER)
+ withLength {
+ pickleTree(qual);
+ if (!mix.isEmpty) {
+ val SuperType(_, mixinType: TypeRef) = tree.tpe
+ pickleTree(mix.withType(mixinType))
+ }
+ }
+ case New(tpt) =>
+ writeByte(NEW)
+ pickleTpt(tpt)
+ case Typed(expr, tpt) =>
+ writeByte(TYPED)
+ withLength { pickleTree(expr); pickleTpt(tpt) }
+ case NamedArg(name, arg) =>
+ writeByte(NAMEDARG)
+ withLength { pickleName(name); pickleTree(arg) }
+ case Assign(lhs, rhs) =>
+ writeByte(ASSIGN)
+ withLength { pickleTree(lhs); pickleTree(rhs) }
+ case Block(stats, expr) =>
+ writeByte(BLOCK)
+ stats.foreach(preRegister)
+ withLength { pickleTree(expr); stats.foreach(pickleTree) }
+ case If(cond, thenp, elsep) =>
+ writeByte(IF)
+ withLength { pickleTree(cond); pickleTree(thenp); pickleTree(elsep) }
+ case Closure(env, meth, tpt) =>
+ writeByte(LAMBDA)
+ assert(env.isEmpty)
+ withLength {
+ pickleTree(meth)
+ if (tpt.tpe.exists) pickleTpt(tpt)
+ }
+ case Match(selector, cases) =>
+ writeByte(MATCH)
+ withLength { pickleTree(selector); cases.foreach(pickleTree) }
+ case CaseDef(pat, guard, rhs) =>
+ writeByte(CASEDEF)
+ withLength { pickleTree(pat); pickleTree(rhs); pickleTreeUnlessEmpty(guard) }
+ case Return(expr, from) =>
+ writeByte(RETURN)
+ withLength { pickleSymRef(from.symbol); pickleTreeUnlessEmpty(expr) }
+ case Try(block, cases, finalizer) =>
+ writeByte(TRY)
+ withLength { pickleTree(block); cases.foreach(pickleTree); pickleTreeUnlessEmpty(finalizer) }
+ case SeqLiteral(elems, elemtpt) =>
+ writeByte(REPEATED)
+ withLength { pickleTree(elemtpt); elems.foreach(pickleTree) }
+ case Inlined(call, bindings, expansion) =>
+ writeByte(INLINED)
+ bindings.foreach(preRegister)
+ withLength { pickleTree(call); pickleTree(expansion); bindings.foreach(pickleTree) }
+ case Bind(name, body) =>
+ registerDef(tree.symbol)
+ writeByte(BIND)
+ withLength { pickleName(name); pickleType(tree.symbol.info); pickleTree(body) }
+ case Alternative(alts) =>
+ writeByte(ALTERNATIVE)
+ withLength { alts.foreach(pickleTree) }
+ case UnApply(fun, implicits, patterns) =>
+ writeByte(UNAPPLY)
+ withLength {
+ pickleTree(fun)
+ for (implicitArg <- implicits) {
+ writeByte(IMPLICITarg)
+ pickleTree(implicitArg)
+ }
+ pickleType(tree.tpe)
+ patterns.foreach(pickleTree)
+ }
+ case tree: ValDef =>
+ pickleDef(VALDEF, tree.symbol, tree.tpt, tree.rhs)
+ case tree: DefDef =>
+ def pickleAllParams = {
+ pickleParams(tree.tparams)
+ for (vparams <- tree.vparamss) {
+ writeByte(PARAMS)
+ withLength { pickleParams(vparams) }
+ }
+ }
+ pickleDef(DEFDEF, tree.symbol, tree.tpt, tree.rhs, pickleAllParams)
+ case tree: TypeDef =>
+ pickleDef(TYPEDEF, tree.symbol, tree.rhs)
+ case tree: Template =>
+ registerDef(tree.symbol)
+ writeByte(TEMPLATE)
+ val (params, rest) = tree.body partition {
+ case stat: TypeDef => stat.symbol is Flags.Param
+ case stat: ValOrDefDef =>
+ stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter
+ case _ => false
+ }
+ withLength {
+ pickleParams(params)
+ tree.parents.foreach(pickleTree)
+ val cinfo @ ClassInfo(_, _, _, _, selfInfo) = tree.symbol.owner.info
+ if ((selfInfo ne NoType) || !tree.self.isEmpty) {
+ writeByte(SELFDEF)
+ pickleName(tree.self.name)
+
+ if (!tree.self.tpt.isEmpty) pickleTree(tree.self.tpt)
+ else {
+ if (!tree.self.isEmpty) registerTreeAddr(tree.self)
+ pickleType {
+ cinfo.selfInfo match {
+ case sym: Symbol => sym.info
+ case tp: Type => tp
+ }
+ }
+ }
+ }
+ pickleStats(tree.constr :: rest)
+ }
+ case Import(expr, selectors) =>
+ writeByte(IMPORT)
+ withLength {
+ pickleTree(expr)
+ selectors foreach {
+ case Thicket((from @ Ident(_)) :: (to @ Ident(_)) :: Nil) =>
+ pickleSelector(IMPORTED, from)
+ pickleSelector(RENAMED, to)
+ case id @ Ident(_) =>
+ pickleSelector(IMPORTED, id)
+ }
+ }
+ case PackageDef(pid, stats) =>
+ writeByte(PACKAGE)
+ withLength { pickleType(pid.tpe); pickleStats(stats) }
+ case tree: TypeTree =>
+ pickleType(tree.tpe)
+ case SingletonTypeTree(ref) =>
+ writeByte(SINGLETONtpt)
+ pickleTree(ref)
+ case RefinedTypeTree(parent, refinements) =>
+ if (refinements.isEmpty) pickleTree(parent)
+ else {
+ val refineCls = refinements.head.symbol.owner.asClass
+ pickledTypes.put(refineCls.typeRef, currentAddr)
+ writeByte(REFINEDtpt)
+ refinements.foreach(preRegister)
+ withLength { pickleTree(parent); refinements.foreach(pickleTree) }
+ }
+ case AppliedTypeTree(tycon, args) =>
+ writeByte(APPLIEDtpt)
+ withLength { pickleTree(tycon); args.foreach(pickleTree) }
+ case AndTypeTree(tp1, tp2) =>
+ writeByte(ANDtpt)
+ withLength { pickleTree(tp1); pickleTree(tp2) }
+ case OrTypeTree(tp1, tp2) =>
+ writeByte(ORtpt)
+ withLength { pickleTree(tp1); pickleTree(tp2) }
+ case ByNameTypeTree(tp) =>
+ writeByte(BYNAMEtpt)
+ pickleTree(tp)
+ case Annotated(tree, annot) =>
+ writeByte(ANNOTATEDtpt)
+ withLength { pickleTree(tree); pickleTree(annot.tree) }
+ case PolyTypeTree(tparams, body) =>
+ writeByte(POLYtpt)
+ withLength { pickleParams(tparams); pickleTree(body) }
+ case TypeBoundsTree(lo, hi) =>
+ writeByte(TYPEBOUNDStpt)
+ withLength { pickleTree(lo); pickleTree(hi) }
+ }
+ catch {
+ case ex: AssertionError =>
+ println(i"error when pickling tree $tree")
+ throw ex
+ }
+ }
+
+ def pickleSelector(tag: Int, id: untpd.Ident)(implicit ctx: Context): Unit = {
+ registerTreeAddr(id)
+ writeByte(tag)
+ pickleName(id.name)
+ }
+
+ def qualifiedName(sym: Symbol)(implicit ctx: Context): TastyName =
+ if (sym.isRoot || sym.owner.isRoot) TastyName.Simple(sym.name.toTermName)
+ else TastyName.Qualified(nameIndex(qualifiedName(sym.owner)), nameIndex(sym.name))
+
+ def pickleModifiers(sym: Symbol)(implicit ctx: Context): Unit = {
+ import Flags._
+ val flags = sym.flags
+ val privateWithin = sym.privateWithin
+ if (privateWithin.exists) {
+ writeByte(if (flags is Protected) PROTECTEDqualified else PRIVATEqualified)
+ pickleType(privateWithin.typeRef)
+ }
+ if (flags is Private) writeByte(PRIVATE)
+ if (flags is Protected) if (!privateWithin.exists) writeByte(PROTECTED)
+ if ((flags is Final) && !(sym is Module)) writeByte(FINAL)
+ if (flags is Case) writeByte(CASE)
+ if (flags is Override) writeByte(OVERRIDE)
+ if (flags is Inline) writeByte(INLINE)
+ if (flags is JavaStatic) writeByte(STATIC)
+ if (flags is Module) writeByte(OBJECT)
+ if (flags is Local) writeByte(LOCAL)
+ if (flags is Synthetic) writeByte(SYNTHETIC)
+ if (flags is Artifact) writeByte(ARTIFACT)
+ if (flags is Scala2x) writeByte(SCALA2X)
+ if (flags is InSuperCall) writeByte(INSUPERCALL)
+ if (sym.isTerm) {
+ if (flags is Implicit) writeByte(IMPLICIT)
+ if ((flags is Lazy) && !(sym is Module)) writeByte(LAZY)
+ if (flags is AbsOverride) { writeByte(ABSTRACT); writeByte(OVERRIDE) }
+ if (flags is Mutable) writeByte(MUTABLE)
+ if (flags is Accessor) writeByte(FIELDaccessor)
+ if (flags is CaseAccessor) writeByte(CASEaccessor)
+ if (flags is DefaultParameterized) writeByte(DEFAULTparameterized)
+ if (flags is Stable) writeByte(STABLE)
+ } else {
+ if (flags is Sealed) writeByte(SEALED)
+ if (flags is Abstract) writeByte(ABSTRACT)
+ if (flags is Trait) writeByte(TRAIT)
+ if (flags is Covariant) writeByte(COVARIANT)
+ if (flags is Contravariant) writeByte(CONTRAVARIANT)
+ }
+ sym.annotations.foreach(pickleAnnotation)
+ }
+
+ def pickleAnnotation(ann: Annotation)(implicit ctx: Context) =
+ if (ann.symbol != defn.BodyAnnot) { // inline bodies are reconstituted automatically when unpickling
+ writeByte(ANNOTATION)
+ withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) }
+ }
+
+ def pickle(trees: List[Tree])(implicit ctx: Context) = {
+ trees.foreach(tree => if (!tree.isEmpty) pickleTree(tree))
+ assert(forwardSymRefs.isEmpty, i"unresolved symbols: ${forwardSymRefs.keySet.toList}%, % when pickling ${ctx.source}")
+ }
+
+ def compactify() = {
+ buf.compactify()
+
+ def updateMapWithDeltas[T](mp: collection.mutable.Map[T, Addr]) =
+ for (key <- mp.keysIterator.toBuffer[T]) mp(key) = adjusted(mp(key))
+
+ updateMapWithDeltas(symRefs)
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
new file mode 100644
index 000000000..eba9ab533
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
@@ -0,0 +1,1161 @@
+package dotty.tools
+package dotc
+package core
+package tasty
+
+import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._
+import StdNames._, Denotations._, Flags._, Constants._, Annotations._
+import util.Positions._
+import ast.{tpd, Trees, untpd}
+import Trees._
+import Decorators._
+import TastyUnpickler._, TastyBuffer._
+import scala.annotation.{tailrec, switch}
+import scala.collection.mutable.ListBuffer
+import scala.collection.{ mutable, immutable }
+import config.Printers.pickling
+
+/** Unpickler for typed trees
+ * @param reader the reader from which to unpickle
+ * @param tastyName the nametable
+ * @param posUNpicklerOpt the unpickler for positions, if it exists
+ */
+class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table, posUnpicklerOpt: Option[PositionUnpickler]) {
+ import TastyFormat._
+ import TastyName._
+ import TreeUnpickler._
+ import tpd._
+
+ /** A map from addresses of definition entries to the symbols they define */
+ private val symAtAddr = new mutable.HashMap[Addr, Symbol]
+
+ /** A temporary map from addresses of definition entries to the trees they define.
+ * Used to remember trees of symbols that are created by a completion. Emptied
+ * once the tree is inlined into a larger tree.
+ */
+ private val treeAtAddr = new mutable.HashMap[Addr, Tree]
+
+ /** A map from addresses of type entries to the types they define.
+ * Currently only populated for types that might be recursively referenced
+ * from within themselves (i.e. RefinedTypes, PolyTypes, MethodTypes).
+ */
+ private val typeAtAddr = new mutable.HashMap[Addr, Type]
+
+ /** The root symbol denotation which are defined by the Tasty file associated with this
+ * TreeUnpickler. Set by `enterTopLevel`.
+ */
+ private var roots: Set[SymDenotation] = null
+
+ /** The root symbols that are defined in this Tasty file. This
+ * is a subset of `roots.map(_.symbol)`.
+ */
+ private var seenRoots: Set[Symbol] = Set()
+
+ /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */
+ private var ownerTree: OwnerTree = _
+
+ private def registerSym(addr: Addr, sym: Symbol) =
+ symAtAddr(addr) = sym
+
+ /** Enter all toplevel classes and objects into their scopes
+ * @param roots a set of SymDenotations that should be overwritten by unpickling
+ */
+ def enterTopLevel(roots: Set[SymDenotation])(implicit ctx: Context): Unit = {
+ this.roots = roots
+ var rdr = new TreeReader(reader).fork
+ ownerTree = new OwnerTree(NoAddr, 0, rdr.fork, reader.endAddr)
+ rdr.indexStats(reader.endAddr)
+ }
+
+ /** The unpickled trees */
+ def unpickle()(implicit ctx: Context): List[Tree] = {
+ assert(roots != null, "unpickle without previous enterTopLevel")
+ new TreeReader(reader).readTopLevel()(ctx.addMode(Mode.AllowDependentFunctions))
+ }
+
+ def toTermName(tname: TastyName): TermName = tname match {
+ case Simple(name) => name
+ case Qualified(qual, name) => toTermName(qual) ++ "." ++ toTermName(name)
+ case Signed(original, params, result) => toTermName(original)
+ case Shadowed(original) => toTermName(original).shadowedName
+ case Expanded(prefix, original) => toTermName(original).expandedName(toTermName(prefix))
+ case ModuleClass(original) => toTermName(original).moduleClassName.toTermName
+ case SuperAccessor(accessed) => toTermName(accessed).superName
+ case DefaultGetter(meth, num) => ???
+ }
+
+ def toTermName(ref: NameRef): TermName = toTermName(tastyName(ref))
+ def toTypeName(ref: NameRef): TypeName = toTermName(ref).toTypeName
+
+ class Completer(owner: Symbol, reader: TastyReader) extends LazyType {
+ import reader._
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
+ treeAtAddr(currentAddr) =
+ new TreeReader(reader).readIndexedDef()(
+ ctx.withPhaseNoLater(ctx.picklerPhase).withOwner(owner))
+ }
+ }
+
+ class TreeReader(val reader: TastyReader) {
+ import reader._
+
+ def forkAt(start: Addr) = new TreeReader(subReader(start, endAddr))
+ def fork = forkAt(currentAddr)
+
+ def skipTree(tag: Int): Unit =
+ if (tag >= firstLengthTreeTag) goto(readEnd())
+ else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() }
+ else if (tag >= firstASTTreeTag) skipTree()
+ else if (tag >= firstNatTreeTag) readNat()
+ def skipTree(): Unit = skipTree(readByte())
+
+ def skipParams(): Unit =
+ while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree()
+
+ /** Record all directly nested definitions and templates in current tree
+ * as `OwnerTree`s in `buf`
+ */
+ def scanTree(buf: ListBuffer[OwnerTree], mode: MemberDefMode = AllDefs): Unit = {
+ val start = currentAddr
+ val tag = readByte()
+ tag match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | TEMPLATE =>
+ val end = readEnd()
+ for (i <- 0 until numRefs(tag)) readNat()
+ if (tag == TEMPLATE) scanTrees(buf, end, MemberDefsOnly)
+ if (mode != NoMemberDefs) buf += new OwnerTree(start, tag, fork, end)
+ goto(end)
+ case tag =>
+ if (mode == MemberDefsOnly) skipTree(tag)
+ else if (tag >= firstLengthTreeTag) {
+ val end = readEnd()
+ var nrefs = numRefs(tag)
+ if (nrefs < 0) {
+ for (i <- nrefs until 0) scanTree(buf)
+ goto(end)
+ }
+ else {
+ for (i <- 0 until nrefs) readNat()
+ scanTrees(buf, end)
+ }
+ }
+ else if (tag >= firstNatASTTreeTag) { readNat(); scanTree(buf) }
+ else if (tag >= firstASTTreeTag) scanTree(buf)
+ else if (tag >= firstNatTreeTag) readNat()
+ }
+ }
+
+ /** Record all directly nested definitions and templates between current address and `end`
+ * as `OwnerTree`s in `buf`
+ */
+ def scanTrees(buf: ListBuffer[OwnerTree], end: Addr, mode: MemberDefMode = AllDefs): Unit = {
+ while (currentAddr.index < end.index) scanTree(buf, mode)
+ assert(currentAddr.index == end.index)
+ }
+
+ /** The next tag, following through SHARED tags */
+ def nextUnsharedTag: Int = {
+ val tag = nextByte
+ if (tag == SHARED) {
+ val lookAhead = fork
+ lookAhead.reader.readByte()
+ forkAt(lookAhead.reader.readAddr()).nextUnsharedTag
+ }
+ else tag
+ }
+
+ def readName(): TermName = toTermName(readNameRef())
+
+ def readNameSplitSig()(implicit ctx: Context): Any /* TermName | (TermName, Signature) */ =
+ tastyName(readNameRef()) match {
+ case Signed(original, params, result) =>
+ var sig = Signature(params map toTypeName, toTypeName(result))
+ if (sig == Signature.NotAMethod) sig = Signature.NotAMethod
+ (toTermName(original), sig)
+ case name =>
+ toTermName(name)
+ }
+
+// ------ Reading types -----------------------------------------------------
+
+ /** Read names in an interleaved sequence of (parameter) names and types/bounds */
+ def readParamNames(end: Addr): List[Name] =
+ until(end) {
+ val name = readName()
+ skipTree()
+ name
+ }
+
+ /** Read types or bounds in an interleaved sequence of (parameter) names and types/bounds */
+ def readParamTypes[T <: Type](end: Addr)(implicit ctx: Context): List[T] =
+ until(end) { readNat(); readType().asInstanceOf[T] }
+
+ /** Read referece to definition and return symbol created at that definition */
+ def readSymRef()(implicit ctx: Context): Symbol = symbolAt(readAddr())
+
+ /** The symbol at given address; createa new one if none exists yet */
+ def symbolAt(addr: Addr)(implicit ctx: Context): Symbol = symAtAddr.get(addr) match {
+ case Some(sym) =>
+ sym
+ case None =>
+ val sym = forkAt(addr).createSymbol()(ctx.withOwner(ownerTree.findOwner(addr)))
+ ctx.log(i"forward reference to $sym")
+ sym
+ }
+
+ /** The symbol defined by current definition */
+ def symbolAtCurrent()(implicit ctx: Context): Symbol = symAtAddr.get(currentAddr) match {
+ case Some(sym) =>
+ assert(ctx.owner == sym.owner, i"owner discrepancy for $sym, expected: ${ctx.owner}, found: ${sym.owner}")
+ sym
+ case None =>
+ createSymbol()
+ }
+
+ /** Read a type */
+ def readType()(implicit ctx: Context): Type = {
+ val start = currentAddr
+ val tag = readByte()
+ pickling.println(s"reading type ${astTagToString(tag)} at $start")
+
+ def registeringType[T](tp: Type, op: => T): T = {
+ typeAtAddr(start) = tp
+ op
+ }
+
+ def readLengthType(): Type = {
+ val end = readEnd()
+
+ def readNamesSkipParams: (List[Name], TreeReader) = {
+ val nameReader = fork
+ nameReader.skipTree() // skip result
+ val paramReader = nameReader.fork
+ (nameReader.readParamNames(end), paramReader)
+ }
+
+ val result =
+ (tag: @switch) match {
+ case SUPERtype =>
+ SuperType(readType(), readType())
+ case REFINEDtype =>
+ var name: Name = readName()
+ val parent = readType()
+ val ttag = nextUnsharedTag
+ if (ttag == TYPEBOUNDS || ttag == TYPEALIAS) name = name.toTypeName
+ RefinedType(parent, name, readType())
+ // Note that the lambda "rt => ..." is not equivalent to a wildcard closure!
+ // Eta expansion of the latter puts readType() out of the expression.
+ case APPLIEDtype =>
+ readType().appliedTo(until(end)(readType()))
+ case TYPEBOUNDS =>
+ TypeBounds(readType(), readType())
+ case TYPEALIAS =>
+ val alias = readType()
+ val variance =
+ if (nextByte == COVARIANT) { readByte(); 1 }
+ else if (nextByte == CONTRAVARIANT) { readByte(); -1 }
+ else 0
+ TypeAlias(alias, variance)
+ case ANNOTATEDtype =>
+ AnnotatedType(readType(), Annotation(readTerm()))
+ case ANDtype =>
+ AndType(readType(), readType())
+ case ORtype =>
+ OrType(readType(), readType())
+ case BIND =>
+ val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType())
+ registerSym(start, sym)
+ TypeRef.withFixedSym(NoPrefix, sym.name, sym)
+ case POLYtype =>
+ val (rawNames, paramReader) = readNamesSkipParams
+ val (variances, paramNames) = rawNames
+ .map(name => (prefixToVariance(name.head), name.tail.toTypeName)).unzip
+ val result = PolyType(paramNames, variances)(
+ pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
+ pt => readType())
+ goto(end)
+ result
+ case METHODtype =>
+ val (names, paramReader) = readNamesSkipParams
+ val result = MethodType(names.map(_.toTermName), paramReader.readParamTypes[Type](end))(
+ mt => registeringType(mt, readType()))
+ goto(end)
+ result
+ case PARAMtype =>
+ readTypeRef() match {
+ case binder: PolyType => PolyParam(binder, readNat())
+ case binder: MethodType => MethodParam(binder, readNat())
+ }
+ case CLASSconst =>
+ ConstantType(Constant(readType()))
+ case ENUMconst =>
+ ConstantType(Constant(readTermRef().termSymbol))
+ }
+ assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
+ result
+ }
+
+ def readSimpleType(): Type = (tag: @switch) match {
+ case TYPEREFdirect | TERMREFdirect =>
+ NamedType.withFixedSym(NoPrefix, readSymRef())
+ case TYPEREFsymbol | TERMREFsymbol =>
+ readSymNameRef()
+ case TYPEREFpkg =>
+ readPackageRef().moduleClass.typeRef
+ case TERMREFpkg =>
+ readPackageRef().termRef
+ case TYPEREF =>
+ val name = readName().toTypeName
+ TypeRef(readType(), name)
+ case TERMREF =>
+ readNameSplitSig() match {
+ case name: TermName => TermRef.all(readType(), name)
+ case (name: TermName, sig: Signature) => TermRef.withSig(readType(), name, sig)
+ }
+ case THIS =>
+ ThisType.raw(readType().asInstanceOf[TypeRef])
+ case RECtype =>
+ RecType(rt => registeringType(rt, readType()))
+ case RECthis =>
+ RecThis(readTypeRef().asInstanceOf[RecType])
+ case SHARED =>
+ val ref = readAddr()
+ typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType())
+ case UNITconst =>
+ ConstantType(Constant(()))
+ case TRUEconst =>
+ ConstantType(Constant(true))
+ case FALSEconst =>
+ ConstantType(Constant(false))
+ case BYTEconst =>
+ ConstantType(Constant(readInt().toByte))
+ case SHORTconst =>
+ ConstantType(Constant(readInt().toShort))
+ case CHARconst =>
+ ConstantType(Constant(readNat().toChar))
+ case INTconst =>
+ ConstantType(Constant(readInt()))
+ case LONGconst =>
+ ConstantType(Constant(readLongInt()))
+ case FLOATconst =>
+ ConstantType(Constant(java.lang.Float.intBitsToFloat(readInt())))
+ case DOUBLEconst =>
+ ConstantType(Constant(java.lang.Double.longBitsToDouble(readLongInt())))
+ case STRINGconst =>
+ ConstantType(Constant(readName().toString))
+ case NULLconst =>
+ ConstantType(Constant(null))
+ case CLASSconst =>
+ ConstantType(Constant(readType()))
+ case BYNAMEtype =>
+ ExprType(readType())
+ }
+
+ if (tag < firstLengthTreeTag) readSimpleType() else readLengthType()
+ }
+
+ private def readSymNameRef()(implicit ctx: Context): Type = {
+ val sym = readSymRef()
+ val prefix = readType()
+ val res = NamedType.withSymAndName(prefix, sym, sym.name)
+ prefix match {
+ case prefix: ThisType if prefix.cls eq sym.owner => res.withDenot(sym.denot)
+ // without this precaution we get an infinite cycle when unpickling pos/extmethods.scala
+ // the problem arises when a self type of a trait is a type parameter of the same trait.
+ case _ => res
+ }
+ }
+
+ private def readPackageRef()(implicit ctx: Context): TermSymbol = {
+ val name = readName()
+ if (name == nme.ROOT || name == nme.ROOTPKG) defn.RootPackage
+ else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal
+ else ctx.requiredPackage(name)
+ }
+
+ def readTypeRef(): Type =
+ typeAtAddr(readAddr())
+
+ def readTermRef()(implicit ctx: Context): TermRef =
+ readType().asInstanceOf[TermRef]
+
+// ------ Reading definitions -----------------------------------------------------
+
+ private def noRhs(end: Addr): Boolean =
+ currentAddr == end || isModifierTag(nextByte)
+
+ private def localContext(owner: Symbol)(implicit ctx: Context) = {
+ val lctx = ctx.fresh.setOwner(owner)
+ if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope
+ }
+
+ private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbsType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = {
+ val lacksDefinition =
+ rhsIsEmpty &&
+ name.isTermName && !name.isConstructorName && !givenFlags.is(ParamOrAccessor) ||
+ isAbsType
+ var flags = givenFlags
+ if (lacksDefinition && tag != PARAM) flags |= Deferred
+ if (tag == DEFDEF) flags |= Method
+ if (givenFlags is Module)
+ flags = flags | (if (tag == VALDEF) ModuleCreationFlags else ModuleClassCreationFlags)
+ if (ctx.owner.isClass) {
+ if (tag == TYPEPARAM) flags |= Param
+ else if (tag == PARAM) flags |= ParamAccessor
+ }
+ else if (isParamTag(tag)) flags |= Param
+ flags
+ }
+
+ def isAbstractType(ttag: Int)(implicit ctx: Context): Boolean = nextUnsharedTag match {
+ case POLYtpt =>
+ val rdr = fork
+ rdr.reader.readByte() // tag
+ rdr.reader.readNat() // length
+ rdr.skipParams() // tparams
+ rdr.isAbstractType(rdr.nextUnsharedTag)
+ case TYPEBOUNDS | TYPEBOUNDStpt => true
+ case _ => false
+ }
+
+ /** Create symbol of definition node and enter in symAtAddr map
+ * @return the created symbol
+ */
+ def createSymbol()(implicit ctx: Context): Symbol = nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ createMemberSymbol()
+ case TEMPLATE =>
+ val localDummy = ctx.newLocalDummy(ctx.owner)
+ registerSym(currentAddr, localDummy)
+ localDummy
+ case tag =>
+ throw new Error(s"illegal createSymbol at $currentAddr, tag = $tag")
+ }
+
+ /** Create symbol of member definition or parameter node and enter in symAtAddr map
+ * @return the created symbol
+ */
+ def createMemberSymbol()(implicit ctx: Context): Symbol = {
+ val start = currentAddr
+ val tag = readByte()
+ val end = readEnd()
+ val rawName = tastyName(readNameRef())
+ var name: Name = toTermName(rawName)
+ if (tag == TYPEDEF || tag == TYPEPARAM) name = name.toTypeName
+ skipParams()
+ val ttag = nextUnsharedTag
+ val isAbsType = isAbstractType(ttag)
+ val isClass = ttag == TEMPLATE
+ val templateStart = currentAddr
+ skipTree() // tpt
+ val rhsStart = currentAddr
+ val rhsIsEmpty = noRhs(end)
+ if (!rhsIsEmpty) skipTree()
+ val (givenFlags, annots, privateWithin) = readModifiers(end)
+ def nameFlags(tname: TastyName): FlagSet = tname match {
+ case TastyName.Expanded(_, original) => ExpandedName | nameFlags(tastyName(original))
+ case TastyName.SuperAccessor(_) => Flags.SuperAccessor
+ case _ => EmptyFlags
+ }
+ pickling.println(i"creating symbol $name at $start with flags $givenFlags")
+ val flags = normalizeFlags(tag, givenFlags | nameFlags(rawName), name, isAbsType, rhsIsEmpty)
+ def adjustIfModule(completer: LazyType) =
+ if (flags is Module) ctx.adjustModuleCompleter(completer, name) else completer
+ val sym =
+ roots.find(root => (root.owner eq ctx.owner) && root.name == name) match {
+ case Some(rootd) =>
+ pickling.println(i"overwriting ${rootd.symbol} # ${rootd.hashCode}")
+ rootd.info = adjustIfModule(
+ new Completer(ctx.owner, subReader(start, end)) with SymbolLoaders.SecondCompleter)
+ rootd.flags = flags &~ Touched // allow one more completion
+ rootd.privateWithin = privateWithin
+ seenRoots += rootd.symbol
+ rootd.symbol
+ case _ =>
+ val completer = adjustIfModule(new Completer(ctx.owner, subReader(start, end)))
+ if (isClass)
+ ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer, privateWithin, coord = start.index)
+ else
+ ctx.newSymbol(ctx.owner, name, flags, completer, privateWithin, coord = start.index)
+ } // TODO set position somehow (but take care not to upset Symbol#isDefinedInCurrentRun)
+ sym.annotations = annots
+ ctx.enter(sym)
+ registerSym(start, sym)
+ if (isClass) {
+ sym.completer.withDecls(newScope)
+ forkAt(templateStart).indexTemplateParams()(localContext(sym))
+ }
+ else if (sym.isInlineMethod)
+ sym.addAnnotation(LazyBodyAnnotation { ctx0 =>
+ implicit val ctx: Context = localContext(sym)(ctx0).addMode(Mode.ReadPositions)
+ // avoids space leaks by not capturing the current context
+ forkAt(rhsStart).readTerm()
+ })
+ goto(start)
+ sym
+ }
+
+ /** Read modifier list into triplet of flags, annotations and a privateWithin
+ * boundary symbol.
+ */
+ def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = {
+ var flags: FlagSet = EmptyFlags
+ var annots = new mutable.ListBuffer[Annotation]
+ var privateWithin: Symbol = NoSymbol
+ while (currentAddr.index != end.index) {
+ def addFlag(flag: FlagSet) = {
+ flags |= flag
+ readByte()
+ }
+ nextByte match {
+ case PRIVATE => addFlag(Private)
+ case INTERNAL => ??? // addFlag(Internal)
+ case PROTECTED => addFlag(Protected)
+ case ABSTRACT =>
+ readByte()
+ nextByte match {
+ case OVERRIDE => addFlag(AbsOverride)
+ case _ => flags |= Abstract
+ }
+ case FINAL => addFlag(Final)
+ case SEALED => addFlag(Sealed)
+ case CASE => addFlag(Case)
+ case IMPLICIT => addFlag(Implicit)
+ case LAZY => addFlag(Lazy)
+ case OVERRIDE => addFlag(Override)
+ case INLINE => addFlag(Inline)
+ case STATIC => addFlag(JavaStatic)
+ case OBJECT => addFlag(Module)
+ case TRAIT => addFlag(Trait)
+ case LOCAL => addFlag(Local)
+ case SYNTHETIC => addFlag(Synthetic)
+ case ARTIFACT => addFlag(Artifact)
+ case MUTABLE => addFlag(Mutable)
+ case LABEL => addFlag(Label)
+ case FIELDaccessor => addFlag(Accessor)
+ case CASEaccessor => addFlag(CaseAccessor)
+ case COVARIANT => addFlag(Covariant)
+ case CONTRAVARIANT => addFlag(Contravariant)
+ case SCALA2X => addFlag(Scala2x)
+ case DEFAULTparameterized => addFlag(DefaultParameterized)
+ case INSUPERCALL => addFlag(InSuperCall)
+ case STABLE => addFlag(Stable)
+ case PRIVATEqualified =>
+ readByte()
+ privateWithin = readType().typeSymbol
+ case PROTECTEDqualified =>
+ addFlag(Protected)
+ privateWithin = readType().typeSymbol
+ case ANNOTATION =>
+ readByte()
+ val end = readEnd()
+ val sym = readType().typeSymbol
+ val lazyAnnotTree = readLater(end, rdr => ctx => rdr.readTerm()(ctx))
+ annots += Annotation.deferred(sym, _ => lazyAnnotTree.complete)
+ case _ =>
+ assert(false, s"illegal modifier tag at $currentAddr")
+ }
+ }
+ (flags, annots.toList, privateWithin)
+ }
+
+ /** Create symbols for the definitions in the statement sequence between
+ * current address and `end`.
+ * @return the largest subset of {NoInits, PureInterface} that a
+ * trait owning the indexed statements can have as flags.
+ */
+ def indexStats(end: Addr)(implicit ctx: Context): FlagSet = {
+ var initsFlags = NoInitsInterface
+ while (currentAddr.index < end.index) {
+ nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ val sym = symbolAtCurrent()
+ skipTree()
+ if (sym.isTerm && !sym.is(MethodOrLazyOrDeferred))
+ initsFlags = EmptyFlags
+ else if (sym.isClass ||
+ sym.is(Method, butNot = Deferred) && !sym.isConstructor)
+ initsFlags &= NoInits
+ case IMPORT =>
+ skipTree()
+ case PACKAGE =>
+ processPackage { (pid, end) => implicit ctx => indexStats(end) }
+ case _ =>
+ skipTree()
+ initsFlags = EmptyFlags
+ }
+ }
+ assert(currentAddr.index == end.index)
+ initsFlags
+ }
+
+ /** Process package with given operation `op`. The operation takes as arguments
+ * - a `RefTree` representing the `pid` of the package,
+ * - an end address,
+ * - a context which has the processd package as owner
+ */
+ def processPackage[T](op: (RefTree, Addr) => Context => T)(implicit ctx: Context): T = {
+ readByte()
+ val end = readEnd()
+ val pid = ref(readTermRef()).asInstanceOf[RefTree]
+ op(pid, end)(localContext(pid.symbol.moduleClass))
+ }
+
+ /** Create symbols the longest consecutive sequence of parameters with given
+ * `tag` starting at current address.
+ */
+ def indexParams(tag: Int)(implicit ctx: Context) =
+ while (nextByte == tag) {
+ symbolAtCurrent()
+ skipTree()
+ }
+
+ /** Create symbols for all type and value parameters of template starting
+ * at current address.
+ */
+ def indexTemplateParams()(implicit ctx: Context) = {
+ assert(readByte() == TEMPLATE)
+ readEnd()
+ indexParams(TYPEPARAM)
+ indexParams(PARAM)
+ }
+
+ /** If definition was already read by a completer, return the previously read tree
+ * or else read definition.
+ */
+ def readIndexedDef()(implicit ctx: Context): Tree = treeAtAddr.remove(currentAddr) match {
+ case Some(tree) => skipTree(); tree
+ case none => readNewDef()
+ }
+
+ private def readNewDef()(implicit ctx: Context): Tree = {
+ val start = currentAddr
+ val sym = symAtAddr(start)
+ val tag = readByte()
+ val end = readEnd()
+
+ def readParamss(implicit ctx: Context): List[List[ValDef]] = {
+ collectWhile(nextByte == PARAMS) {
+ readByte()
+ readEnd()
+ readParams[ValDef](PARAM)
+ }
+ }
+
+ def readRhs(implicit ctx: Context) =
+ if (noRhs(end)) EmptyTree
+ else readLater(end, rdr => ctx => rdr.readTerm()(ctx))
+
+ def localCtx = localContext(sym)
+
+ def ValDef(tpt: Tree) =
+ ta.assignType(untpd.ValDef(sym.name.asTermName, tpt, readRhs(localCtx)), sym)
+
+ def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) =
+ ta.assignType(
+ untpd.DefDef(
+ sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)),
+ sym)
+
+ def TypeDef(rhs: Tree) =
+ ta.assignType(untpd.TypeDef(sym.name.asTypeName, rhs), sym)
+
+ def ta = ctx.typeAssigner
+
+ val name = readName()
+ pickling.println(s"reading def of $name at $start")
+ val tree: MemberDef = tag match {
+ case DEFDEF =>
+ val tparams = readParams[TypeDef](TYPEPARAM)(localCtx)
+ val vparamss = readParamss(localCtx)
+ val tpt = readTpt()
+ val typeParams = tparams.map(_.symbol)
+ val valueParamss = ctx.normalizeIfConstructor(
+ vparamss.nestedMap(_.symbol), name == nme.CONSTRUCTOR)
+ val resType = ctx.effectiveResultType(sym, typeParams, tpt.tpe)
+ sym.info = ctx.methodType(typeParams, valueParamss, resType)
+ if (sym.isSetter && sym.accessedFieldOrGetter.is(ParamAccessor)) {
+ // reconstitute ParamAccessor flag of setters for var parameters, which is not pickled
+ sym.setFlag(ParamAccessor)
+ sym.resetFlag(Deferred)
+ }
+ DefDef(tparams, vparamss, tpt)
+ case VALDEF =>
+ val tpt = readTpt()
+ sym.info = tpt.tpe
+ ValDef(tpt)
+ case TYPEDEF | TYPEPARAM =>
+ if (sym.isClass) {
+ val companion = sym.scalacLinkedClass
+
+ // Is the companion defined in the same Tasty file as `sym`?
+ // The only case to check here is if `sym` is a root. In this case
+ // `companion` might have been entered by the environment but it might
+ // be missing from the Tasty file. So we check explicitly for that.
+ def isCodefined =
+ roots.contains(companion.denot) == seenRoots.contains(companion)
+ if (companion.exists && isCodefined) {
+ import transform.SymUtils._
+ if (sym is Flags.ModuleClass) sym.registerCompanionMethod(nme.COMPANION_CLASS_METHOD, companion)
+ else sym.registerCompanionMethod(nme.COMPANION_MODULE_METHOD, companion)
+ }
+ TypeDef(readTemplate(localCtx))
+ } else {
+ val rhs = readTpt()
+ sym.info = rhs.tpe match {
+ case _: TypeBounds | _: ClassInfo => rhs.tpe
+ case _ => TypeAlias(rhs.tpe, sym.variance)
+ }
+ TypeDef(rhs)
+ }
+ case PARAM =>
+ val tpt = readTpt()
+ if (noRhs(end)) {
+ sym.info = tpt.tpe
+ ValDef(tpt)
+ }
+ else {
+ sym.setFlag(Method)
+ sym.info = ExprType(tpt.tpe)
+ pickling.println(i"reading param alias $name -> $currentAddr")
+ DefDef(Nil, Nil, tpt)
+ }
+ }
+ val mods =
+ if (sym.annotations.isEmpty) untpd.EmptyModifiers
+ else untpd.Modifiers(annotations = sym.annotations.map(_.tree))
+ tree.withMods(mods)
+ // record annotations in tree so that tree positions can be filled in.
+ // Note: Once the inline PR with its changes to positions is in, this should be
+ // no longer necessary.
+ goto(end)
+ setPos(start, tree)
+ }
+
+ private def readTemplate(implicit ctx: Context): Template = {
+ val start = currentAddr
+ val cls = ctx.owner.asClass
+ def setClsInfo(parents: List[TypeRef], selfType: Type) =
+ cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType)
+ val assumedSelfType =
+ if (cls.is(Module) && cls.owner.isClass)
+ TermRef.withSig(cls.owner.thisType, cls.name.sourceModuleName, Signature.NotAMethod)
+ else NoType
+ setClsInfo(Nil, assumedSelfType)
+ val localDummy = symbolAtCurrent()
+ assert(readByte() == TEMPLATE)
+ val end = readEnd()
+ val tparams = readIndexedParams[TypeDef](TYPEPARAM)
+ val vparams = readIndexedParams[ValDef](PARAM)
+ val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) {
+ nextByte match {
+ case APPLY | TYPEAPPLY => readTerm()
+ case _ => readTpt()
+ }
+ }
+ val parentRefs = ctx.normalizeToClassRefs(parents.map(_.tpe), cls, cls.unforcedDecls)
+ val self =
+ if (nextByte == SELFDEF) {
+ readByte()
+ untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType)
+ }
+ else EmptyValDef
+ setClsInfo(parentRefs, if (self.isEmpty) NoType else self.tpt.tpe)
+ cls.setApplicableFlags(fork.indexStats(end))
+ val constr = readIndexedDef().asInstanceOf[DefDef]
+
+ def mergeTypeParamsAndAliases(tparams: List[TypeDef], stats: List[Tree]): (List[Tree], List[Tree]) =
+ (tparams, stats) match {
+ case (tparam :: tparams1, (alias: TypeDef) :: stats1)
+ if tparam.name == alias.name.expandedName(cls) =>
+ val (tas, stats2) = mergeTypeParamsAndAliases(tparams1, stats1)
+ (tparam :: alias :: tas, stats2)
+ case _ =>
+ (tparams, stats)
+ }
+
+ val lazyStats = readLater(end, rdr => implicit ctx => {
+ val stats0 = rdr.readIndexedStats(localDummy, end)
+ val (tparamsAndAliases, stats) = mergeTypeParamsAndAliases(tparams, stats0)
+ tparamsAndAliases ++ vparams ++ stats
+ })
+ setPos(start,
+ untpd.Template(constr, parents, self, lazyStats)
+ .withType(localDummy.nonMemberTermRef))
+ }
+
+ def skipToplevel()(implicit ctx: Context): Unit= {
+ if (!isAtEnd)
+ nextByte match {
+ case IMPORT | PACKAGE =>
+ skipTree()
+ skipToplevel()
+ case _ =>
+ }
+ }
+
+ def readTopLevel()(implicit ctx: Context): List[Tree] = {
+ @tailrec def read(acc: ListBuffer[Tree]): List[Tree] = nextByte match {
+ case IMPORT | PACKAGE =>
+ acc += readIndexedStat(NoSymbol)
+ if (!isAtEnd) read(acc) else acc.toList
+ case _ => // top-level trees which are not imports or packages are not part of tree
+ acc.toList
+ }
+ read(new ListBuffer[tpd.Tree])
+ }
+
+ def readIndexedStat(exprOwner: Symbol)(implicit ctx: Context): Tree = nextByte match {
+ case TYPEDEF | VALDEF | DEFDEF =>
+ readIndexedDef()
+ case IMPORT =>
+ readImport()
+ case PACKAGE =>
+ val start = currentAddr
+ processPackage { (pid, end) => implicit ctx =>
+ setPos(start, PackageDef(pid, readIndexedStats(exprOwner, end)(ctx)))
+ }
+ case _ =>
+ readTerm()(ctx.withOwner(exprOwner))
+ }
+
+ def readImport()(implicit ctx: Context): Tree = {
+ val start = currentAddr
+ readByte()
+ readEnd()
+ val expr = readTerm()
+ def readSelectors(): List[untpd.Tree] = nextByte match {
+ case IMPORTED =>
+ val start = currentAddr
+ readByte()
+ val from = setPos(start, untpd.Ident(readName()))
+ nextByte match {
+ case RENAMED =>
+ val start2 = currentAddr
+ readByte()
+ val to = setPos(start2, untpd.Ident(readName()))
+ untpd.Thicket(from, to) :: readSelectors()
+ case _ =>
+ from :: readSelectors()
+ }
+ case _ =>
+ Nil
+ }
+ setPos(start, Import(expr, readSelectors()))
+ }
+
+ def readIndexedStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] =
+ until(end)(readIndexedStat(exprOwner))
+
+ def readStats(exprOwner: Symbol, end: Addr)(implicit ctx: Context): List[Tree] = {
+ fork.indexStats(end)
+ readIndexedStats(exprOwner, end)
+ }
+
+ def readIndexedParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] =
+ collectWhile(nextByte == tag) { readIndexedDef().asInstanceOf[T] }
+
+ def readParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] = {
+ fork.indexParams(tag)
+ readIndexedParams(tag)
+ }
+
+// ------ Reading trees -----------------------------------------------------
+
+ def readTerm()(implicit ctx: Context): Tree = { // TODO: rename to readTree
+ val start = currentAddr
+ val tag = readByte()
+ pickling.println(s"reading term ${astTagToString(tag)} at $start")
+
+ def readPathTerm(): Tree = {
+ goto(start)
+ readType() match {
+ case path: TypeRef => TypeTree(path)
+ case path: TermRef => ref(path)
+ case path: ThisType => This(path.cls)
+ case path: ConstantType => Literal(path.value)
+ }
+ }
+
+ def completeSelect(name: Name, tpf: Type => Type): Select = {
+ val localCtx =
+ if (name == nme.CONSTRUCTOR) ctx.addMode(Mode.InSuperCall) else ctx
+ val qual = readTerm()(localCtx)
+ val unshadowed = if (name.isShadowedName) name.revertShadowed else name
+ untpd.Select(qual, unshadowed).withType(tpf(qual.tpe.widenIfUnstable))
+ }
+
+ def readQualId(): (untpd.Ident, TypeRef) = {
+ val qual = readTerm().asInstanceOf[untpd.Ident]
+ (untpd.Ident(qual.name).withPos(qual.pos), qual.tpe.asInstanceOf[TypeRef])
+ }
+
+ def readSimpleTerm(): Tree = tag match {
+ case SHARED =>
+ forkAt(readAddr()).readTerm()
+ case IDENT =>
+ untpd.Ident(readName()).withType(readType())
+ case IDENTtpt =>
+ untpd.Ident(readName().toTypeName).withType(readType())
+ case SELECT =>
+ def readRest(name: Name, sig: Signature) =
+ completeSelect(name, TermRef.withSig(_, name.asTermName, sig))
+ readNameSplitSig match {
+ case name: Name => readRest(name, Signature.NotAMethod)
+ case (name: Name, sig: Signature) => readRest(name, sig)
+ }
+ case SELECTtpt =>
+ val name = readName().toTypeName
+ completeSelect(name, TypeRef(_, name))
+ case QUALTHIS =>
+ val (qual, tref) = readQualId()
+ untpd.This(qual).withType(ThisType.raw(tref))
+ case NEW =>
+ New(readTpt())
+ case SINGLETONtpt =>
+ SingletonTypeTree(readTerm())
+ case BYNAMEtpt =>
+ ByNameTypeTree(readTpt())
+ case _ =>
+ readPathTerm()
+ }
+
+ def readLengthTerm(): Tree = {
+ val end = readEnd()
+
+ def localNonClassCtx = {
+ val ctx1 = ctx.fresh.setNewScope
+ if (ctx.owner.isClass) ctx1.setOwner(ctx1.newLocalDummy(ctx.owner)) else ctx1
+ }
+
+ def readBlock(mkTree: (List[Tree], Tree) => Tree): Tree = {
+ val exprReader = fork
+ skipTree()
+ val localCtx = localNonClassCtx
+ val stats = readStats(ctx.owner, end)(localCtx)
+ val expr = exprReader.readTerm()(localCtx)
+ mkTree(stats, expr)
+ }
+
+ val result =
+ (tag: @switch) match {
+ case SUPER =>
+ val qual = readTerm()
+ val (mixId, mixTpe) = ifBefore(end)(readQualId(), (untpd.EmptyTypeIdent, NoType))
+ tpd.Super(qual, mixId, ctx.mode.is(Mode.InSuperCall), mixTpe.typeSymbol)
+ case APPLY =>
+ val fn = readTerm()
+ val isJava = fn.symbol.is(JavaDefined)
+ def readArg() = readTerm() match {
+ case SeqLiteral(elems, elemtpt) if isJava =>
+ JavaSeqLiteral(elems, elemtpt)
+ case arg => arg
+ }
+ tpd.Apply(fn, until(end)(readArg()))
+ case TYPEAPPLY =>
+ tpd.TypeApply(readTerm(), until(end)(readTpt()))
+ case TYPED =>
+ val expr = readTerm()
+ val tpt = readTpt()
+ val expr1 = expr match {
+ case SeqLiteral(elems, elemtpt) if tpt.tpe.isRef(defn.ArrayClass) =>
+ JavaSeqLiteral(elems, elemtpt)
+ case expr => expr
+ }
+ Typed(expr1, tpt)
+ case NAMEDARG =>
+ NamedArg(readName(), readTerm())
+ case ASSIGN =>
+ Assign(readTerm(), readTerm())
+ case BLOCK =>
+ readBlock(Block)
+ case INLINED =>
+ val call = readTerm()
+ readBlock((defs, expr) => Inlined(call, defs.asInstanceOf[List[MemberDef]], expr))
+ case IF =>
+ If(readTerm(), readTerm(), readTerm())
+ case LAMBDA =>
+ val meth = readTerm()
+ val tpt = ifBefore(end)(readTpt(), EmptyTree)
+ Closure(Nil, meth, tpt)
+ case MATCH =>
+ Match(readTerm(), readCases(end))
+ case RETURN =>
+ val from = readSymRef()
+ val expr = ifBefore(end)(readTerm(), EmptyTree)
+ Return(expr, Ident(from.termRef))
+ case TRY =>
+ Try(readTerm(), readCases(end), ifBefore(end)(readTerm(), EmptyTree))
+ case REPEATED =>
+ val elemtpt = readTpt()
+ SeqLiteral(until(end)(readTerm()), elemtpt)
+ case BIND =>
+ val name = readName()
+ val info = readType()
+ val sym = ctx.newSymbol(ctx.owner, name, EmptyFlags, info)
+ registerSym(start, sym)
+ Bind(sym, readTerm())
+ case ALTERNATIVE =>
+ Alternative(until(end)(readTerm()))
+ case UNAPPLY =>
+ val fn = readTerm()
+ val implicitArgs =
+ collectWhile(nextByte == IMPLICITarg) {
+ readByte()
+ readTerm()
+ }
+ val patType = readType()
+ val argPats = until(end)(readTerm())
+ UnApply(fn, implicitArgs, argPats, patType)
+ case REFINEDtpt =>
+ val refineCls = ctx.newCompleteClassSymbol(
+ ctx.owner, tpnme.REFINE_CLASS, Fresh, parents = Nil)
+ typeAtAddr(start) = refineCls.typeRef
+ val parent = readTpt()
+ val refinements = readStats(refineCls, end)(localContext(refineCls))
+ RefinedTypeTree(parent, refinements, refineCls)
+ case APPLIEDtpt =>
+ AppliedTypeTree(readTpt(), until(end)(readTpt()))
+ case ANDtpt =>
+ AndTypeTree(readTpt(), readTpt())
+ case ORtpt =>
+ OrTypeTree(readTpt(), readTpt())
+ case ANNOTATEDtpt =>
+ Annotated(readTpt(), readTerm())
+ case POLYtpt =>
+ val localCtx = localNonClassCtx
+ val tparams = readParams[TypeDef](TYPEPARAM)(localCtx)
+ val body = readTpt()(localCtx)
+ PolyTypeTree(tparams, body)
+ case TYPEBOUNDStpt =>
+ TypeBoundsTree(readTpt(), readTpt())
+ case _ =>
+ readPathTerm()
+ }
+ assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}")
+ result
+ }
+
+ val tree = if (tag < firstLengthTreeTag) readSimpleTerm() else readLengthTerm()
+ tree.overwriteType(tree.tpe.simplified)
+ setPos(start, tree)
+ }
+
+ def readTpt()(implicit ctx: Context) =
+ if (isTypeTreeTag(nextUnsharedTag)) readTerm()
+ else {
+ val start = currentAddr
+ val tp = readType()
+ if (tp.exists) setPos(start, TypeTree(tp)) else EmptyTree
+ }
+
+ def readCases(end: Addr)(implicit ctx: Context): List[CaseDef] =
+ collectWhile(nextByte == CASEDEF && currentAddr != end) { readCase()(ctx.fresh.setNewScope) }
+
+ def readCase()(implicit ctx: Context): CaseDef = {
+ val start = currentAddr
+ readByte()
+ val end = readEnd()
+ val pat = readTerm()
+ val rhs = readTerm()
+ val guard = ifBefore(end)(readTerm(), EmptyTree)
+ setPos(start, CaseDef(pat, guard, rhs))
+ }
+
+ def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context => T): Trees.Lazy[T] = {
+ val localReader = fork
+ goto(end)
+ new LazyReader(localReader, op)
+ }
+
+// ------ Setting positions ------------------------------------------------
+
+ /** Set position of `tree` at given `addr`. */
+ def setPos[T <: untpd.Tree](addr: Addr, tree: T)(implicit ctx: Context): tree.type =
+ if (ctx.mode.is(Mode.ReadPositions)) {
+ posUnpicklerOpt match {
+ case Some(posUnpickler) =>
+ //println(i"setPos $tree / ${tree.getClass} at $addr to ${posUnpickler.posAt(addr)}")
+ val pos = posUnpickler.posAt(addr)
+ if (pos.exists) tree.setPosUnchecked(pos)
+ tree
+ case _ =>
+ //println(i"no pos $tree")
+ tree
+ }
+ }
+ else tree
+ }
+
+ class LazyReader[T <: AnyRef](reader: TreeReader, op: TreeReader => Context => T) extends Trees.Lazy[T] {
+ def complete(implicit ctx: Context): T = {
+ pickling.println(i"starting to read at ${reader.reader.currentAddr}")
+ op(reader)(ctx.addMode(Mode.AllowDependentFunctions).withPhaseNoLater(ctx.picklerPhase))
+ }
+ }
+
+ class LazyAnnotationReader(sym: Symbol, reader: TreeReader) extends LazyAnnotation(sym) {
+ def complete(implicit ctx: Context) = {
+ reader.readTerm()(ctx.withPhaseNoLater(ctx.picklerPhase))
+ }
+ }
+
+ /** A lazy datastructure that records how definitions are nested in TASTY data.
+ * The structure is lazy because it needs to be computed only for forward references
+ * to symbols that happen before the referenced symbol is created (see `symbolAt`).
+ * Such forward references are rare.
+ *
+ * @param addr The address of tree representing an owning definition, NoAddr for root tree
+ * @param tag The tag at `addr`. Used to determine which subtrees to scan for children
+ * (i.e. if `tag` is template, don't scan member defs, as these belong already
+ * to enclosing class).
+ * @param reader The reader to be used for scanning for children
+ * @param end The end of the owning definition
+ */
+ class OwnerTree(val addr: Addr, tag: Int, reader: TreeReader, val end: Addr) {
+
+ /** All definitions that have the definition at `addr` as closest enclosing definition */
+ lazy val children: List[OwnerTree] = {
+ val buf = new ListBuffer[OwnerTree]
+ reader.scanTrees(buf, end, if (tag == TEMPLATE) NoMemberDefs else AllDefs)
+ buf.toList
+ }
+
+ /** Find the owner of definition at `addr` */
+ def findOwner(addr: Addr)(implicit ctx: Context): Symbol = {
+ def search(cs: List[OwnerTree], current: Symbol): Symbol =
+ try cs match {
+ case ot :: cs1 =>
+ if (ot.addr.index == addr.index)
+ current
+ else if (ot.addr.index < addr.index && addr.index < ot.end.index)
+ search(ot.children, reader.symbolAt(ot.addr))
+ else
+ search(cs1, current)
+ case Nil =>
+ throw new TreeWithoutOwner
+ }
+ catch {
+ case ex: TreeWithoutOwner =>
+ println(i"no owner for $addr among $cs") // DEBUG
+ throw ex
+ }
+ search(children, NoSymbol)
+ }
+
+ override def toString = s"OwnerTree(${addr.index}, ${end.index}"
+ }
+}
+
+object TreeUnpickler {
+
+ /** An enumeration indicating which subtrees should be added to an OwnerTree. */
+ type MemberDefMode = Int
+ final val MemberDefsOnly = 0 // add only member defs; skip other statements
+ final val NoMemberDefs = 1 // add only statements that are not member defs
+ final val AllDefs = 2 // add everything
+
+ class TreeWithoutOwner extends Exception
+}
+
+
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
new file mode 100644
index 000000000..17fef3852
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala
@@ -0,0 +1,299 @@
+package dotty.tools
+package dotc
+package core
+package unpickleScala2
+
+import Flags._
+
+/** Variable length byte arrays, with methods for basic pickling and unpickling.
+ *
+ * @param data The initial buffer
+ * @param from The first index where defined data are found
+ * @param to The first index where new data can be written
+ */
+class PickleBuffer(data: Array[Byte], from: Int, to: Int) {
+
+ var bytes = data
+ var readIndex = from
+ var writeIndex = to
+
+ /** Double bytes array */
+ private def dble(): Unit = {
+ val bytes1 = new Array[Byte](bytes.length * 2)
+ Array.copy(bytes, 0, bytes1, 0, writeIndex)
+ bytes = bytes1
+ }
+
+ def ensureCapacity(capacity: Int) =
+ while (bytes.length < writeIndex + capacity) dble()
+
+ // -- Basic output routines --------------------------------------------
+
+ /** Write a byte of data */
+ def writeByte(b: Int): Unit = {
+ if (writeIndex == bytes.length) dble()
+ bytes(writeIndex) = b.toByte
+ writeIndex += 1
+ }
+
+ /** Write a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.
+ */
+ def writeNat(x: Int): Unit =
+ writeLongNat(x.toLong & 0x00000000FFFFFFFFL)
+
+ /**
+ * Like writeNat, but for longs. This is not the same as
+ * writeLong, which writes in base 256. Note that the
+ * binary representation of LongNat is identical to Nat
+ * if the long value is in the range Int.MIN_VALUE to
+ * Int.MAX_VALUE.
+ */
+ def writeLongNat(x: Long): Unit = {
+ def writeNatPrefix(x: Long): Unit = {
+ val y = x >>> 7
+ if (y != 0L) writeNatPrefix(y)
+ writeByte(((x & 0x7f) | 0x80).toInt)
+ }
+ val y = x >>> 7
+ if (y != 0L) writeNatPrefix(y)
+ writeByte((x & 0x7f).toInt)
+ }
+
+ /** Write a natural number <code>x</code> at position <code>pos</code>.
+ * If number is more than one byte, shift rest of array to make space.
+ *
+ * @param pos ...
+ * @param x ...
+ */
+ def patchNat(pos: Int, x: Int): Unit = {
+ def patchNatPrefix(x: Int): Unit = {
+ writeByte(0)
+ Array.copy(bytes, pos, bytes, pos + 1, writeIndex - (pos + 1))
+ bytes(pos) = ((x & 0x7f) | 0x80).toByte
+ val y = x >>> 7
+ if (y != 0) patchNatPrefix(y)
+ }
+ bytes(pos) = (x & 0x7f).toByte
+ val y = x >>> 7
+ if (y != 0) patchNatPrefix(y)
+ }
+
+ /** Write a long number <code>x</code> in signed big endian format, base 256.
+ *
+ * @param x The long number to be written.
+ */
+ def writeLong(x: Long): Unit = {
+ val y = x >> 8
+ val z = x & 0xff
+ if (-y != (z >> 7)) writeLong(y)
+ writeByte(z.toInt)
+ }
+
+ // -- Basic input routines --------------------------------------------
+
+ /** Peek at the current byte without moving the read index */
+ def peekByte(): Int = bytes(readIndex)
+
+ /** Read a byte */
+ def readByte(): Int = {
+ val x = bytes(readIndex); readIndex += 1; x
+ }
+
+ /** Read a natural number in big endian format, base 128.
+ * All but the last digits have bit 0x80 set.*/
+ def readNat(): Int = readLongNat().toInt
+
+ def readLongNat(): Long = {
+ var b = 0L
+ var x = 0L
+ do {
+ b = readByte()
+ x = (x << 7) + (b & 0x7f)
+ } while ((b & 0x80) != 0L)
+ x
+ }
+
+ /** Read a long number in signed big endian format, base 256. */
+ def readLong(len: Int): Long = {
+ var x = 0L
+ var i = 0
+ while (i < len) {
+ x = (x << 8) + (readByte() & 0xff)
+ i += 1
+ }
+ val leading = 64 - (len << 3)
+ x << leading >> leading
+ }
+
+ /** Returns the buffer as a sequence of (Int, Array[Byte]) representing
+ * (tag, data) of the individual entries. Saves and restores buffer state.
+ */
+
+ def toIndexedSeq: IndexedSeq[(Int, Array[Byte])] = {
+ val saved = readIndex
+ readIndex = 0
+ readNat() ; readNat() // discarding version
+ val result = new Array[(Int, Array[Byte])](readNat())
+
+ result.indices foreach { index =>
+ val tag = readNat()
+ val len = readNat()
+ val bytes = data.slice(readIndex, len + readIndex)
+ readIndex += len
+
+ result(index) = tag -> bytes
+ }
+
+ readIndex = saved
+ result.toIndexedSeq
+ }
+
+ /** Perform operation <code>op</code> until the condition
+ * <code>readIndex == end</code> is satisfied.
+ * Concatenate results into a list.
+ *
+ * @param end ...
+ * @param op ...
+ * @return ...
+ */
+ def until[T](end: Int, op: () => T): List[T] =
+ if (readIndex == end) List() else op() :: until(end, op)
+
+ /** Perform operation <code>op</code> the number of
+ * times specified. Concatenate the results into a list.
+ */
+ def times[T](n: Int, op: ()=>T): List[T] =
+ if (n == 0) List() else op() :: times(n-1, op)
+
+ /** Pickle = majorVersion_Nat minorVersion_Nat nbEntries_Nat {Entry}
+ * Entry = type_Nat length_Nat [actual entries]
+ *
+ * Assumes that the ..Version_Nat are already consumed.
+ *
+ * @return an array mapping entry numbers to locations in
+ * the byte array where the entries start.
+ */
+ def createIndex: Array[Int] = {
+ val index = new Array[Int](readNat()) // nbEntries_Nat
+ for (i <- 0 until index.length) {
+ index(i) = readIndex
+ readByte() // skip type_Nat
+ readIndex = readNat() + readIndex // read length_Nat, jump to next entry
+ }
+ index
+ }
+}
+
+object PickleBuffer {
+
+ private final val ScalaFlagEnd = 48
+ private final val ChunkBits = 8
+ private final val ChunkSize = 1 << ChunkBits
+ private type FlagMap = Array[Array[Long]]
+
+ private val (scalaTermFlagMap, scalaTypeFlagMap) = {
+ import scala.reflect.internal.Flags._
+
+ // The following vals are copy-pasted from reflect.internal.Flags.
+ // They are unfortunately private there, so we cannot get at them directly.
+ // Using the public method pickledToRawFlags instead looks unattractive
+ // because of performance.
+ val IMPLICIT_PKL = (1 << 0)
+ val FINAL_PKL = (1 << 1)
+ val PRIVATE_PKL = (1 << 2)
+ val PROTECTED_PKL = (1 << 3)
+ val SEALED_PKL = (1 << 4)
+ val OVERRIDE_PKL = (1 << 5)
+ val CASE_PKL = (1 << 6)
+ val ABSTRACT_PKL = (1 << 7)
+ val DEFERRED_PKL = (1 << 8)
+ val METHOD_PKL = (1 << 9)
+ val MODULE_PKL = (1 << 10)
+ val INTERFACE_PKL = (1 << 11)
+
+ val corr = Map(
+ PROTECTED_PKL -> Protected,
+ OVERRIDE_PKL -> Override,
+ PRIVATE_PKL -> Private,
+ ABSTRACT_PKL -> Abstract,
+ DEFERRED_PKL -> Deferred,
+ FINAL_PKL -> Final,
+ METHOD_PKL -> Method,
+ INTERFACE_PKL -> NoInitsInterface,
+ MODULE_PKL -> (Module | Lazy, Module),
+ IMPLICIT_PKL -> Implicit,
+ SEALED_PKL -> Sealed,
+ CASE_PKL -> Case,
+ MUTABLE -> Mutable,
+ PARAM -> Param,
+ PACKAGE -> Package,
+ MACRO -> Macro,
+ BYNAMEPARAM -> (Method, Covariant),
+ LABEL -> (Label, Contravariant),
+ ABSOVERRIDE -> AbsOverride,
+ LOCAL -> Local,
+ JAVA -> JavaDefined,
+ SYNTHETIC -> Synthetic,
+ STABLE -> Stable,
+ STATIC -> JavaStatic,
+ CASEACCESSOR -> CaseAccessor,
+ DEFAULTPARAM -> (DefaultParameterized, Trait),
+ BRIDGE -> Bridge,
+ ACCESSOR -> Accessor,
+ SUPERACCESSOR -> SuperAccessor,
+ PARAMACCESSOR -> ParamAccessor,
+ MODULEVAR -> Scala2ModuleVar,
+ LAZY -> Lazy,
+ MIXEDIN -> (MixedIn, Scala2Existential),
+ EXPANDEDNAME -> ExpandedName,
+ IMPLCLASS -> (Scala2PreSuper, ImplClass),
+ SPECIALIZED -> Specialized,
+ VBRIDGE -> VBridge,
+ VARARGS -> JavaVarargs,
+ ENUM -> Enum)
+
+ // generate initial maps from Scala flags to Dotty flags
+ val termMap, typeMap = new Array[Long](64)
+ for (idx <- 0 until ScalaFlagEnd)
+ corr get (1L << idx) match {
+ case Some((termFlag: FlagSet, typeFlag: FlagSet)) =>
+ termMap(idx) |= termFlag.bits
+ typeMap(idx) |= typeFlag.bits
+ case Some(commonFlag: FlagSet) =>
+ termMap(idx) |= commonFlag.toTermFlags.bits
+ typeMap(idx) |= commonFlag.toTypeFlags.bits
+ case _ =>
+ }
+
+ // Convert map so that it maps chunks of ChunkBits size at once
+ // instead of single bits.
+ def chunkMap(xs: Array[Long]): FlagMap = {
+ val chunked = Array.ofDim[Long](
+ (xs.length + ChunkBits - 1) / ChunkBits, ChunkSize)
+ for (i <- 0 until chunked.length)
+ for (j <- 0 until ChunkSize)
+ for (k <- 0 until ChunkBits)
+ if ((j & (1 << k)) != 0)
+ chunked(i)(j) |= xs(i * ChunkBits + k)
+ chunked
+ }
+
+ (chunkMap(termMap), chunkMap(typeMap))
+ }
+
+ def unpickleScalaFlags(sflags: Long, isType: Boolean): FlagSet = {
+ val map: FlagMap = if (isType) scalaTypeFlagMap else scalaTermFlagMap
+ val shift = ChunkBits
+ val mask = ChunkSize - 1
+ assert(6 * ChunkBits == ScalaFlagEnd)
+ FlagSet(
+ map(0)((sflags >>> (shift * 0)).toInt & mask) |
+ map(1)((sflags >>> (shift * 1)).toInt & mask) |
+ map(2)((sflags >>> (shift * 2)).toInt & mask) |
+ map(3)((sflags >>> (shift * 3)).toInt & mask) |
+ map(4)((sflags >>> (shift * 4)).toInt & mask) |
+ map(5)((sflags >>> (shift * 5)).toInt & mask)
+ )
+ }
+}
diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
new file mode 100644
index 000000000..b01f6cc6a
--- /dev/null
+++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
@@ -0,0 +1,1260 @@
+package dotty.tools
+package dotc
+package core
+package unpickleScala2
+
+import java.io.IOException
+import java.lang.Float.intBitsToFloat
+import java.lang.Double.longBitsToDouble
+
+import Contexts._, Symbols._, Types._, Scopes._, SymDenotations._, Names._, NameOps._
+import StdNames._, Denotations._, NameOps._, Flags._, Constants._, Annotations._
+import dotty.tools.dotc.typer.ProtoTypes.{FunProtoTyped, FunProto}
+import util.Positions._
+import dotty.tools.dotc.ast.{tpd, Trees, untpd}, ast.tpd._
+import ast.untpd.Modifiers
+import printing.Texts._
+import printing.Printer
+import io.AbstractFile
+import util.common._
+import typer.Checking.checkNonCyclic
+import PickleBuffer._
+import scala.reflect.internal.pickling.PickleFormat._
+import Decorators._
+import TypeApplications._
+import classfile.ClassfileParser
+import scala.collection.{ mutable, immutable }
+import scala.collection.mutable.ListBuffer
+import scala.annotation.switch
+
+object Scala2Unpickler {
+
+ /** Exception thrown if classfile is corrupted */
+ class BadSignature(msg: String) extends RuntimeException(msg)
+
+ case class TempPolyType(tparams: List[TypeSymbol], tpe: Type) extends UncachedGroundType {
+ override def fallbackToText(printer: Printer): Text =
+ "[" ~ printer.dclsText(tparams, ", ") ~ "]" ~ printer.toText(tpe)
+ }
+
+ /** Temporary type for classinfos, will be decomposed on completion of the class */
+ case class TempClassInfoType(parentTypes: List[Type], decls: Scope, clazz: Symbol) extends UncachedGroundType
+
+ /** Convert temp poly type to poly type and leave other types alone. */
+ def translateTempPoly(tp: Type)(implicit ctx: Context): Type = tp match {
+ case TempPolyType(tparams, restpe) => restpe.LambdaAbstract(tparams)
+ case tp => tp
+ }
+
+ def addConstructorTypeParams(denot: SymDenotation)(implicit ctx: Context) = {
+ assert(denot.isConstructor)
+ denot.info = denot.info.LambdaAbstract(denot.owner.typeParams)
+ }
+
+ /** Convert array parameters denoting a repeated parameter of a Java method
+ * to `RepeatedParamClass` types.
+ */
+ def arrayToRepeated(tp: Type)(implicit ctx: Context): Type = tp match {
+ case tp @ MethodType(paramNames, paramTypes) =>
+ val lastArg = paramTypes.last
+ assert(lastArg isRef defn.ArrayClass)
+ val elemtp0 :: Nil = lastArg.baseArgInfos(defn.ArrayClass)
+ val elemtp = elemtp0 match {
+ case AndType(t1, t2) if t1.typeSymbol.isAbstractType && (t2 isRef defn.ObjectClass) =>
+ t1 // drop intersection with Object for abstract types in varargs. UnCurry can handle them.
+ case _ =>
+ elemtp0
+ }
+ tp.derivedMethodType(
+ paramNames,
+ paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp),
+ tp.resultType)
+ case tp: PolyType =>
+ tp.derivedPolyType(tp.paramNames, tp.paramBounds, arrayToRepeated(tp.resultType))
+ }
+
+ def ensureConstructor(cls: ClassSymbol, scope: Scope)(implicit ctx: Context) =
+ if (scope.lookup(nme.CONSTRUCTOR) == NoSymbol) {
+ val constr = ctx.newDefaultConstructor(cls)
+ addConstructorTypeParams(constr)
+ cls.enter(constr, scope)
+ }
+
+ def setClassInfo(denot: ClassDenotation, info: Type, selfInfo: Type = NoType)(implicit ctx: Context): Unit = {
+ val cls = denot.classSymbol
+ val (tparams, TempClassInfoType(parents, decls, clazz)) = info match {
+ case TempPolyType(tps, cinfo) => (tps, cinfo)
+ case cinfo => (Nil, cinfo)
+ }
+ val ost =
+ if ((selfInfo eq NoType) && (denot is ModuleClass) && denot.sourceModule.exists)
+ // it seems sometimes the source module does not exist for a module class.
+ // An example is `scala.reflect.internal.Trees.Template$. Without the
+ // `denot.sourceModule.exists` provision i859.scala crashes in the backend.
+ denot.owner.thisType select denot.sourceModule
+ else selfInfo
+ val tempInfo = new TempClassInfo(denot.owner.thisType, denot.classSymbol, decls, ost)
+ denot.info = tempInfo // first rough info to avoid CyclicReferences
+ var parentRefs = ctx.normalizeToClassRefs(parents, cls, decls)
+ if (parentRefs.isEmpty) parentRefs = defn.ObjectType :: Nil
+ for (tparam <- tparams) {
+ val tsym = decls.lookup(tparam.name)
+ if (tsym.exists) tsym.setFlag(TypeParam)
+ else denot.enter(tparam, decls)
+ }
+ if (!(denot.flagsUNSAFE is JavaModule)) ensureConstructor(denot.symbol.asClass, decls)
+
+ val scalacCompanion = denot.classSymbol.scalacLinkedClass
+
+ def registerCompanionPair(module: Symbol, claz: Symbol) = {
+ import transform.SymUtils._
+ module.registerCompanionMethod(nme.COMPANION_CLASS_METHOD, claz)
+ if (claz.isClass) {
+ claz.registerCompanionMethod(nme.COMPANION_MODULE_METHOD, module)
+ }
+ }
+
+ if (denot.flagsUNSAFE is Module) {
+ registerCompanionPair(denot.classSymbol, scalacCompanion)
+ } else {
+ registerCompanionPair(scalacCompanion, denot.classSymbol)
+ }
+
+ tempInfo.finalize(denot, parentRefs) // install final info, except possibly for typeparams ordering
+ denot.ensureTypeParamsInCorrectOrder()
+ }
+}
+
+/** Unpickle symbol table information descending from a class and/or module root
+ * from an array of bytes.
+ * @param bytes bytearray from which we unpickle
+ * @param classroot the top-level class which is unpickled, or NoSymbol if inapplicable
+ * @param moduleroot the top-level module class which is unpickled, or NoSymbol if inapplicable
+ * @param filename filename associated with bytearray, only used for error messages
+ */
+class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot: ClassDenotation)(ictx: Context)
+ extends PickleBuffer(bytes, 0, -1) with ClassfileParser.Embedded {
+
+ def showPickled() = {
+ atReadPos(0, () => {
+ println(s"classRoot = ${classRoot.debugString}, moduleClassRoot = ${moduleClassRoot.debugString}")
+ util.ShowPickled.printFile(this)
+ })
+ }
+
+ // print("unpickling "); showPickled() // !!! DEBUG
+
+ import Scala2Unpickler._
+
+ val moduleRoot = moduleClassRoot.sourceModule(ictx).denot(ictx)
+ assert(moduleRoot.isTerm)
+
+ checkVersion(ictx)
+
+ private val loadingMirror = defn(ictx) // was: mirrorThatLoaded(classRoot)
+
+ /** A map from entry numbers to array offsets */
+ private val index = createIndex
+
+ /** A map from entry numbers to symbols, types, or annotations */
+ private val entries = new Array[AnyRef](index.length)
+
+ /** A map from symbols to their associated `decls` scopes */
+ private val symScopes = mutable.AnyRefMap[Symbol, Scope]()
+
+ protected def errorBadSignature(msg: String, original: Option[RuntimeException] = None)(implicit ctx: Context) = {
+ val ex = new BadSignature(
+ i"""error reading Scala signature of $classRoot from $source:
+ |error occurred at position $readIndex: $msg""")
+ if (ctx.debug || true) original.getOrElse(ex).printStackTrace() // temporarily enable printing of original failure signature to debug failing builds
+ throw ex
+ }
+
+ protected def handleRuntimeException(ex: RuntimeException)(implicit ctx: Context) = ex match {
+ case ex: BadSignature => throw ex
+ case _ => errorBadSignature(s"a runtime exception occurred: $ex", Some(ex))
+ }
+
+ def run()(implicit ctx: Context) =
+ try {
+ var i = 0
+ while (i < index.length) {
+ if (entries(i) == null && isSymbolEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ val sym = readSymbol()
+ entries(i) = sym
+ sym.infoOrCompleter match {
+ case info: ClassUnpickler => info.init()
+ case _ =>
+ }
+ readIndex = savedIndex
+ }
+ i += 1
+ }
+ // read children last, fix for #3951
+ i = 0
+ while (i < index.length) {
+ if (entries(i) == null) {
+ if (isSymbolAnnotationEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ readSymbolAnnotation()
+ readIndex = savedIndex
+ } else if (isChildrenEntry(i)) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ readChildren()
+ readIndex = savedIndex
+ }
+ }
+ i += 1
+ }
+ } catch {
+ case ex: RuntimeException => handleRuntimeException(ex)
+ }
+
+ def source(implicit ctx: Context): AbstractFile = {
+ val f = classRoot.symbol.associatedFile
+ if (f != null) f else moduleClassRoot.symbol.associatedFile
+ }
+
+ private def checkVersion(implicit ctx: Context): Unit = {
+ val major = readNat()
+ val minor = readNat()
+ if (major != MajorVersion || minor > MinorVersion)
+ throw new IOException("Scala signature " + classRoot.fullName.decode +
+ " has wrong version\n expected: " +
+ MajorVersion + "." + MinorVersion +
+ "\n found: " + major + "." + minor +
+ " in " + source)
+ }
+
+ /** The `decls` scope associated with given symbol */
+ protected def symScope(sym: Symbol) = symScopes.getOrElseUpdate(sym, newScope)
+
+ /** Does entry represent an (internal) symbol */
+ protected def isSymbolEntry(i: Int)(implicit ctx: Context): Boolean = {
+ val tag = bytes(index(i)).toInt
+ (firstSymTag <= tag && tag <= lastSymTag &&
+ (tag != CLASSsym || !isRefinementSymbolEntry(i)))
+ }
+
+ /** Does entry represent an (internal or external) symbol */
+ protected def isSymbolRef(i: Int): Boolean = {
+ val tag = bytes(index(i))
+ (firstSymTag <= tag && tag <= lastExtSymTag)
+ }
+
+ /** Does entry represent a name? */
+ protected def isNameEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == TERMname || tag == TYPEname
+ }
+
+ /** Does entry represent a symbol annotation? */
+ protected def isSymbolAnnotationEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == SYMANNOT
+ }
+
+ /** Does the entry represent children of a symbol? */
+ protected def isChildrenEntry(i: Int): Boolean = {
+ val tag = bytes(index(i)).toInt
+ tag == CHILDREN
+ }
+
+ /** Does entry represent a refinement symbol?
+ * pre: Entry is a class symbol
+ */
+ protected def isRefinementSymbolEntry(i: Int)(implicit ctx: Context): Boolean = {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ val tag = readByte().toInt
+ assert(tag == CLASSsym)
+
+ readNat(); // read length
+ val result = readNameRef() == tpnme.REFINE_CLASS
+ readIndex = savedIndex
+ result
+ }
+
+ protected def isRefinementClass(sym: Symbol)(implicit ctx: Context): Boolean =
+ sym.name == tpnme.REFINE_CLASS
+
+ protected def isLocal(sym: Symbol)(implicit ctx: Context) = isUnpickleRoot(sym.topLevelClass)
+
+ protected def isUnpickleRoot(sym: Symbol)(implicit ctx: Context) = {
+ val d = sym.denot
+ d == moduleRoot || d == moduleClassRoot || d == classRoot
+ }
+
+ /** If entry at <code>i</code> is undefined, define it by performing
+ * operation <code>op</code> with <code>readIndex at start of i'th
+ * entry. Restore <code>readIndex</code> afterwards.
+ */
+ protected def at[T <: AnyRef](i: Int, op: () => T): T = {
+ var r = entries(i)
+ if (r eq null) {
+ r = atReadPos(index(i), op)
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ }
+ r.asInstanceOf[T]
+ }
+
+ protected def atReadPos[T](start: Int, op: () => T): T = {
+ val savedIndex = readIndex
+ readIndex = start
+ try op()
+ finally readIndex = savedIndex
+ }
+
+ /** Read a name */
+ protected def readName()(implicit ctx: Context): Name = {
+ val tag = readByte()
+ val len = readNat()
+ tag match {
+ case TERMname => termName(bytes, readIndex, len)
+ case TYPEname => typeName(bytes, readIndex, len)
+ case _ => errorBadSignature("bad name tag: " + tag)
+ }
+ }
+ protected def readTermName()(implicit ctx: Context): TermName = readName().toTermName
+ protected def readTypeName()(implicit ctx: Context): TypeName = readName().toTypeName
+
+ /** Read a symbol */
+ protected def readSymbol()(implicit ctx: Context): Symbol = readDisambiguatedSymbol(alwaysTrue)()
+
+ /** Read a symbol, with possible disambiguation */
+ protected def readDisambiguatedSymbol(p: Symbol => Boolean)()(implicit ctx: Context): Symbol = {
+ val start = indexCoord(readIndex)
+ val tag = readByte()
+ val end = readNat() + readIndex
+ def atEnd = readIndex == end
+
+ def readExtSymbol(): Symbol = {
+ val name = readNameRef()
+ val owner = if (atEnd) loadingMirror.RootClass else readSymbolRef()
+
+ def adjust(denot: Denotation) = {
+ val denot1 = denot.disambiguate(d => p(d.symbol))
+ val sym = denot1.symbol
+ if (denot.exists && !denot1.exists) { // !!!DEBUG
+ val alts = denot.alternatives map (d => d + ":" + d.info + "/" + d.signature)
+ System.err.println(s"!!! disambiguation failure: $alts")
+ val members = denot.alternatives.head.symbol.owner.info.decls.toList map (d => d + ":" + d.info + "/" + d.signature)
+ System.err.println(s"!!! all members: $members")
+ }
+ if (tag == EXTref) sym else sym.moduleClass
+ }
+
+ def fromName(name: Name): Symbol = name.toTermName match {
+ case nme.ROOT => loadingMirror.RootClass
+ case nme.ROOTPKG => loadingMirror.RootPackage
+ case _ =>
+ def declIn(owner: Symbol) = adjust(owner.info.decl(name))
+ val sym = declIn(owner)
+ if (sym.exists || owner.ne(defn.ObjectClass)) sym else declIn(defn.AnyClass)
+ }
+
+ def slowSearch(name: Name): Symbol =
+ owner.info.decls.find(_.name == name).getOrElse(NoSymbol)
+
+ def nestedObjectSymbol: Symbol = {
+ // If the owner is overloaded (i.e. a method), it's not possible to select the
+ // right member, so return NoSymbol. This can only happen when unpickling a tree.
+ // the "case Apply" in readTree() takes care of selecting the correct alternative
+ // after parsing the arguments.
+ //if (owner.isOverloaded)
+ // return NoSymbol
+
+ if (tag == EXTMODCLASSref) {
+ val module = owner.info.decl(name.toTermName).suchThat(_ is Module)
+ module.info // force it, as completer does not yet point to module class.
+ module.symbol.moduleClass
+
+ /* was:
+ val moduleVar = owner.info.decl(name.toTermName.moduleVarName).symbol
+ if (moduleVar.isLazyAccessor)
+ return moduleVar.lazyAccessor.lazyAccessor
+ */
+ } else NoSymbol
+ }
+
+ // println(s"read ext symbol $name from ${owner.denot.debugString} in ${classRoot.debugString}") // !!! DEBUG
+
+ // (1) Try name.
+ fromName(name) orElse {
+ // (2) Try with expanded name. Can happen if references to private
+ // symbols are read from outside: for instance when checking the children
+ // of a class. See #1722.
+ fromName(name.toTermName.expandedName(owner)) orElse {
+ // (3) Try as a nested object symbol.
+ nestedObjectSymbol orElse {
+ // (4) Call the mirror's "missing" hook.
+ adjust(ctx.base.missingHook(owner, name)) orElse {
+ // println(owner.info.decls.toList.map(_.debugString).mkString("\n ")) // !!! DEBUG
+ // }
+ // (5) Create a stub symbol to defer hard failure a little longer.
+ System.err.println(i"***** missing reference, looking for $name in $owner")
+ System.err.println(i"decls = ${owner.info.decls}")
+ owner.info.decls.checkConsistent()
+ if (slowSearch(name).exists)
+ System.err.println(i"**** slow search found: ${slowSearch(name)}")
+ if (ctx.debug) Thread.dumpStack()
+ ctx.newStubSymbol(owner, name, source)
+ }
+ }
+ }
+ }
+ }
+
+ tag match {
+ case NONEsym => return NoSymbol
+ case EXTref | EXTMODCLASSref => return readExtSymbol()
+ case _ =>
+ }
+
+ // symbols that were pickled with Pickler.writeSymInfo
+ val nameref = readNat()
+ val name0 = at(nameref, readName)
+ val owner = readSymbolRef()
+
+ var flags = unpickleScalaFlags(readLongNat(), name0.isTypeName)
+ if (flags is DefaultParameter) {
+ // DefaultParameterized flag now on method, not parameter
+ //assert(flags is Param, s"$name0 in $owner")
+ flags = flags &~ DefaultParameterized
+ owner.setFlag(DefaultParameterized)
+ }
+
+ val name1 = name0.adjustIfModuleClass(flags)
+ val name = if (name1 == nme.TRAIT_CONSTRUCTOR) nme.CONSTRUCTOR else name1
+
+ def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner) && !(flags is ModuleClass)
+ def isModuleClassRoot = (name == moduleClassRoot.name) && (owner == moduleClassRoot.owner) && (flags is Module)
+ def isModuleRoot = (name == moduleClassRoot.name.sourceModuleName) && (owner == moduleClassRoot.owner) && (flags is Module)
+
+ //if (isClassRoot) println(s"classRoot of $classRoot found at $readIndex, flags = $flags") // !!! DEBUG
+ //if (isModuleRoot) println(s"moduleRoot of $moduleRoot found at $readIndex, flags = $flags") // !!! DEBUG
+ //if (isModuleClassRoot) println(s"moduleClassRoot of $moduleClassRoot found at $readIndex, flags = $flags") // !!! DEBUG
+
+ def completeRoot(denot: ClassDenotation, completer: LazyType): Symbol = {
+ denot.setFlag(flags)
+ denot.resetFlag(Touched) // allow one more completion
+ denot.info = completer
+ denot.symbol
+ }
+
+ def finishSym(sym: Symbol): Symbol = {
+ if (sym.isClass) sym.setFlag(Scala2x)
+ val owner = sym.owner
+ if (owner.isClass &&
+ !( isUnpickleRoot(sym)
+ || (sym is Scala2Existential)
+ || isRefinementClass(sym)
+ )
+ )
+ owner.asClass.enter(sym, symScope(owner))
+ else if (isRefinementClass(owner))
+ symScope(owner).openForMutations.enter(sym)
+ sym
+ }
+
+ finishSym(tag match {
+ case TYPEsym | ALIASsym =>
+ var name1 = name.asTypeName
+ var flags1 = flags
+ if (flags is TypeParam) {
+ name1 = name1.expandedName(owner)
+ flags1 |= owner.typeParamCreationFlags | ExpandedName
+ }
+ ctx.newSymbol(owner, name1, flags1, localMemberUnpickler, coord = start)
+ case CLASSsym =>
+ var infoRef = readNat()
+ if (isSymbolRef(infoRef)) infoRef = readNat()
+ if (isClassRoot)
+ completeRoot(
+ classRoot, rootClassUnpickler(start, classRoot.symbol, NoSymbol, infoRef))
+ else if (isModuleClassRoot)
+ completeRoot(
+ moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule, infoRef))
+ else if (name == tpnme.REFINE_CLASS)
+ // create a type alias instead
+ ctx.newSymbol(owner, name, flags, localMemberUnpickler, coord = start)
+ else {
+ def completer(cls: Symbol) = {
+ val unpickler = new ClassUnpickler(infoRef) withDecls symScope(cls)
+ if (flags is ModuleClass)
+ unpickler withSourceModule (implicit ctx =>
+ cls.owner.info.decls.lookup(cls.name.sourceModuleName)
+ .suchThat(_ is Module).symbol)
+ else unpickler
+ }
+ ctx.newClassSymbol(owner, name.asTypeName, flags, completer, coord = start)
+ }
+ case VALsym =>
+ ctx.newSymbol(owner, name.asTermName, flags, localMemberUnpickler, coord = start)
+ case MODULEsym =>
+ if (isModuleRoot) {
+ moduleRoot setFlag flags
+ moduleRoot.symbol
+ } else ctx.newSymbol(owner, name.asTermName, flags,
+ new LocalUnpickler() withModuleClass(implicit ctx =>
+ owner.info.decls.lookup(name.moduleClassName)
+ .suchThat(_ is Module).symbol)
+ , coord = start)
+ case _ =>
+ errorBadSignature("bad symbol tag: " + tag)
+ })
+ }
+
+ class LocalUnpickler extends LazyType {
+ def startCoord(denot: SymDenotation): Coord = denot.symbol.coord
+ def complete(denot: SymDenotation)(implicit ctx: Context): Unit = try {
+ def parseToCompletion(denot: SymDenotation)(implicit ctx: Context) = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ def atEnd = readIndex == end
+ val unusedNameref = readNat()
+ val unusedOwnerref = readNat()
+ val unusedFlags = readLongNat()
+ var inforef = readNat()
+ denot.privateWithin =
+ if (!isSymbolRef(inforef)) NoSymbol
+ else {
+ val pw = at(inforef, readSymbol)
+ inforef = readNat()
+ pw
+ }
+ // println("reading type for " + denot) // !!! DEBUG
+ val tp = at(inforef, readType)
+ denot match {
+ case denot: ClassDenotation =>
+ val selfInfo = if (atEnd) NoType else readTypeRef()
+ setClassInfo(denot, tp, selfInfo)
+ case denot =>
+ val tp1 = translateTempPoly(tp)
+ denot.info =
+ if (tag == ALIASsym) TypeAlias(tp1)
+ else if (denot.isType) checkNonCyclic(denot.symbol, tp1, reportErrors = false)
+ // we need the checkNonCyclic call to insert LazyRefs for F-bounded cycles
+ else if (!denot.is(Param)) tp1.underlyingIfRepeated(isJava = false)
+ else tp1
+ if (denot.isConstructor) addConstructorTypeParams(denot)
+ if (atEnd) {
+ assert(!(denot is SuperAccessor), denot)
+ } else {
+ assert(denot is (SuperAccessor | ParamAccessor), denot)
+ def disambiguate(alt: Symbol) = { // !!! DEBUG
+ ctx.debugTraceIndented(s"disambiguating ${denot.info} =:= ${denot.owner.thisType.memberInfo(alt)} ${denot.owner}") {
+ denot.info matches denot.owner.thisType.memberInfo(alt)
+ }
+ }
+ val alias = readDisambiguatedSymbolRef(disambiguate).asTerm
+ denot.addAnnotation(Annotation.makeAlias(alias))
+ }
+ }
+ // println(s"unpickled ${denot.debugString}, info = ${denot.info}") !!! DEBUG
+ }
+ atReadPos(startCoord(denot).toIndex,
+ () => parseToCompletion(denot)(
+ ctx.addMode(Mode.Scala2Unpickling).withPhaseNoLater(ctx.picklerPhase)))
+ } catch {
+ case ex: RuntimeException => handleRuntimeException(ex)
+ }
+ }
+
+ object localMemberUnpickler extends LocalUnpickler
+
+ class ClassUnpickler(infoRef: Int) extends LocalUnpickler with TypeParamsCompleter {
+ private def readTypeParams()(implicit ctx: Context): List[TypeSymbol] = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ if (tag == POLYtpe) {
+ val unusedRestpeRef = readNat()
+ until(end, readSymbolRef).asInstanceOf[List[TypeSymbol]]
+ } else Nil
+ }
+ private def loadTypeParams(implicit ctx: Context) =
+ atReadPos(index(infoRef), readTypeParams)
+
+ /** Force reading type params early, we need them in setClassInfo of subclasses. */
+ def init()(implicit ctx: Context) = loadTypeParams
+
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] =
+ loadTypeParams
+ }
+
+ def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol, infoRef: Int) =
+ (new ClassUnpickler(infoRef) with SymbolLoaders.SecondCompleter {
+ override def startCoord(denot: SymDenotation): Coord = start
+ }) withDecls symScope(cls) withSourceModule (_ => module)
+
+ /** Convert
+ * tp { type name = sym } forSome { sym >: L <: H }
+ * to
+ * tp { name >: L <: H }
+ * and
+ * tp { name: sym } forSome { sym <: T with Singleton }
+ * to
+ * tp { name: T }
+ */
+ def elimExistentials(boundSyms: List[Symbol], tp: Type)(implicit ctx: Context): Type = {
+ // Need to be careful not to run into cyclic references here (observed when
+ // comiling t247.scala). That's why we avoiud taking `symbol` of a TypeRef
+ // unless names match up.
+ val isBound = (tp: Type) => {
+ def refersTo(tp: Type, sym: Symbol): Boolean = tp match {
+ case tp @ TypeRef(_, name) => sym.name == name && sym == tp.symbol
+ case tp: TypeVar => refersTo(tp.underlying, sym)
+ case tp : LazyRef => refersTo(tp.ref, sym)
+ case _ => false
+ }
+ boundSyms.exists(refersTo(tp, _))
+ }
+ // Cannot use standard `existsPart` method because it calls `lookupRefined`
+ // which can cause CyclicReference errors.
+ val isBoundAccumulator = new ExistsAccumulator(isBound) {
+ override def foldOver(x: Boolean, tp: Type): Boolean = tp match {
+ case tp: TypeRef => applyToPrefix(x, tp)
+ case _ => super.foldOver(x, tp)
+ }
+ }
+ def removeSingleton(tp: Type): Type =
+ if (tp isRef defn.SingletonClass) defn.AnyType else tp
+ def elim(tp: Type): Type = tp match {
+ case tp @ RefinedType(parent, name, rinfo) =>
+ val parent1 = elim(tp.parent)
+ rinfo match {
+ case TypeAlias(info: TypeRef) if isBound(info) =>
+ RefinedType(parent1, name, info.symbol.info)
+ case info: TypeRef if isBound(info) =>
+ val info1 = info.symbol.info
+ assert(info1.derivesFrom(defn.SingletonClass))
+ RefinedType(parent1, name, info1.mapReduceAnd(removeSingleton)(_ & _))
+ case info =>
+ tp.derivedRefinedType(parent1, name, info)
+ }
+ case tp @ HKApply(tycon, args) =>
+ val tycon1 = tycon.safeDealias
+ def mapArg(arg: Type) = arg match {
+ case arg: TypeRef if isBound(arg) => arg.symbol.info
+ case _ => arg
+ }
+ if (tycon1 ne tycon) elim(tycon1.appliedTo(args))
+ else tp.derivedAppliedType(tycon, args.map(mapArg))
+ case _ =>
+ tp
+ }
+ val tp1 = elim(tp)
+ if (isBoundAccumulator(false, tp1)) {
+ val anyTypes = boundSyms map (_ => defn.AnyType)
+ val boundBounds = boundSyms map (_.info.bounds.hi)
+ val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes)
+ ctx.warning(s"""failure to eliminate existential
+ |original type : $tp forSome {${ctx.dclsText(boundSyms, "; ").show}
+ |reduces to : $tp1
+ |type used instead: $tp2
+ |proceed at own risk.""".stripMargin)
+ tp2
+ } else tp1
+ }
+
+ /** Read a type
+ *
+ * @param forceProperType is used to ease the transition to NullaryMethodTypes (commentmarker: NMT_TRANSITION)
+ * the flag say that a type of kind * is expected, so that PolyType(tps, restpe) can be disambiguated to PolyType(tps, NullaryMethodType(restpe))
+ * (if restpe is not a ClassInfoType, a MethodType or a NullaryMethodType, which leaves TypeRef/SingletonType -- the latter would make the polytype a type constructor)
+ */
+ protected def readType()(implicit ctx: Context): Type = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ (tag: @switch) match {
+ case NOtpe =>
+ NoType
+ case NOPREFIXtpe =>
+ NoPrefix
+ case THIStpe =>
+ readSymbolRef().thisType
+ case SINGLEtpe =>
+ val pre = readTypeRef()
+ val sym = readDisambiguatedSymbolRef(_.info.isParameterless)
+ if (isLocal(sym) || (pre == NoPrefix)) pre select sym
+ else TermRef.withSig(pre, sym.name.asTermName, Signature.NotAMethod) // !!! should become redundant
+ case SUPERtpe =>
+ val thistpe = readTypeRef()
+ val supertpe = readTypeRef()
+ SuperType(thistpe, supertpe)
+ case CONSTANTtpe =>
+ ConstantType(readConstantRef())
+ case TYPEREFtpe =>
+ var pre = readTypeRef()
+ val sym = readSymbolRef()
+ pre match {
+ case thispre: ThisType =>
+ // The problem is that class references super.C get pickled as
+ // this.C. Dereferencing the member might then get an overriding class
+ // instance. The problem arises for instance for LinkedHashMap#MapValues
+ // and also for the inner Transform class in all views. We fix it by
+ // replacing the this with the appropriate super.
+ if (sym.owner != thispre.cls) {
+ val overriding = thispre.cls.info.decls.lookup(sym.name)
+ if (overriding.exists && overriding != sym) {
+ val base = pre.baseTypeWithArgs(sym.owner)
+ assert(base.exists)
+ pre = SuperType(thispre, base)
+ }
+ }
+ case _ =>
+ }
+ val tycon =
+ if (sym.isClass && sym.is(Scala2x) && !sym.owner.is(Package))
+ // used fixed sym for Scala 2 inner classes, because they might be shadowed
+ TypeRef.withFixedSym(pre, sym.name.asTypeName, sym.asType)
+ else if (isLocal(sym) || pre == NoPrefix) {
+ val pre1 = if ((pre eq NoPrefix) && (sym is TypeParam)) sym.owner.thisType else pre
+ pre1 select sym
+ }
+ else TypeRef(pre, sym.name.asTypeName)
+ val args = until(end, readTypeRef)
+ if (sym == defn.ByNameParamClass2x) ExprType(args.head)
+ else if (args.nonEmpty) tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args))
+ else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams)
+ else tycon
+ case TYPEBOUNDStpe =>
+ TypeBounds(readTypeRef(), readTypeRef())
+ case REFINEDtpe =>
+ val clazz = readSymbolRef()
+ val decls = symScope(clazz)
+ symScopes(clazz) = EmptyScope // prevent further additions
+ val parents = until(end, readTypeRef)
+ val parent = parents.reduceLeft(AndType(_, _))
+ if (decls.isEmpty) parent
+ else {
+ def subst(info: Type, rt: RecType) =
+ if (clazz.isClass) info.substThis(clazz.asClass, RecThis(rt))
+ else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case.
+ def addRefinement(tp: Type, sym: Symbol) = RefinedType(tp, sym.name, sym.info)
+ val refined = (parent /: decls.toList)(addRefinement)
+ RecType.closeOver(rt => subst(refined, rt))
+ }
+ case CLASSINFOtpe =>
+ val clazz = readSymbolRef()
+ TempClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
+ case METHODtpe | IMPLICITMETHODtpe =>
+ val restpe = readTypeRef()
+ val params = until(end, readSymbolRef)
+ def isImplicit =
+ tag == IMPLICITMETHODtpe ||
+ params.nonEmpty && (params.head is Implicit)
+ val maker = if (isImplicit) ImplicitMethodType else MethodType
+ maker.fromSymbols(params, restpe)
+ case POLYtpe =>
+ val restpe = readTypeRef()
+ val typeParams = until(end, readSymbolRef)
+ if (typeParams.nonEmpty) TempPolyType(typeParams.asInstanceOf[List[TypeSymbol]], restpe.widenExpr)
+ else ExprType(restpe)
+ case EXISTENTIALtpe =>
+ val restpe = readTypeRef()
+ val boundSyms = until(end, readSymbolRef)
+ elimExistentials(boundSyms, restpe)
+ case ANNOTATEDtpe =>
+ AnnotatedType.make(readTypeRef(), until(end, readAnnotationRef))
+ case _ =>
+ noSuchTypeTag(tag, end)
+ }
+ }
+
+ def readTypeParams()(implicit ctx: Context): List[Symbol] = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ if (tag == POLYtpe) {
+ val unusedRestperef = readNat()
+ until(end, readSymbolRef)
+ } else Nil
+ }
+
+ def noSuchTypeTag(tag: Int, end: Int)(implicit ctx: Context): Type =
+ errorBadSignature("bad type tag: " + tag)
+
+ /** Read a constant */
+ protected def readConstant()(implicit ctx: Context): Constant = {
+ val tag = readByte().toInt
+ val len = readNat()
+ (tag: @switch) match {
+ case LITERALunit => Constant(())
+ case LITERALboolean => Constant(readLong(len) != 0L)
+ case LITERALbyte => Constant(readLong(len).toByte)
+ case LITERALshort => Constant(readLong(len).toShort)
+ case LITERALchar => Constant(readLong(len).toChar)
+ case LITERALint => Constant(readLong(len).toInt)
+ case LITERALlong => Constant(readLong(len))
+ case LITERALfloat => Constant(intBitsToFloat(readLong(len).toInt))
+ case LITERALdouble => Constant(longBitsToDouble(readLong(len)))
+ case LITERALstring => Constant(readNameRef().toString)
+ case LITERALnull => Constant(null)
+ case LITERALclass => Constant(readTypeRef())
+ case LITERALenum => Constant(readSymbolRef())
+ case _ => noSuchConstantTag(tag, len)
+ }
+ }
+
+ def noSuchConstantTag(tag: Int, len: Int)(implicit ctx: Context): Constant =
+ errorBadSignature("bad constant tag: " + tag)
+
+ /** Read children and store them into the corresponding symbol.
+ */
+ protected def readChildren()(implicit ctx: Context): Unit = {
+ val tag = readByte()
+ assert(tag == CHILDREN)
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ while (readIndex != end)
+ target.addAnnotation(Annotation.makeChild(readSymbolRef()))
+ }
+
+ /* Read a reference to a pickled item */
+ protected def readSymbolRef()(implicit ctx: Context): Symbol = { //OPT inlined from: at(readNat(), readSymbol) to save on closure creation
+ val i = readNat()
+ var r = entries(i)
+ if (r eq null) {
+ val savedIndex = readIndex
+ readIndex = index(i)
+ r = readSymbol()
+ assert(entries(i) eq null, entries(i))
+ entries(i) = r
+ readIndex = savedIndex
+ }
+ r.asInstanceOf[Symbol]
+ }
+
+ protected def readDisambiguatedSymbolRef(p: Symbol => Boolean)(implicit ctx: Context): Symbol =
+ at(readNat(), readDisambiguatedSymbol(p))
+
+ protected def readNameRef()(implicit ctx: Context): Name = at(readNat(), readName)
+ protected def readTypeRef()(implicit ctx: Context): Type = at(readNat(), () => readType()) // after the NMT_TRANSITION period, we can leave off the () => ... ()
+ protected def readConstantRef()(implicit ctx: Context): Constant = at(readNat(), readConstant)
+
+ protected def readTypeNameRef()(implicit ctx: Context): TypeName = readNameRef().toTypeName
+ protected def readTermNameRef()(implicit ctx: Context): TermName = readNameRef().toTermName
+
+ protected def readAnnotationRef()(implicit ctx: Context): Annotation = at(readNat(), readAnnotation)
+
+ protected def readModifiersRef(isType: Boolean)(implicit ctx: Context): Modifiers = at(readNat(), () => readModifiers(isType))
+ protected def readTreeRef()(implicit ctx: Context): Tree = at(readNat(), readTree)
+
+ /** Read an annotation argument, which is pickled either
+ * as a Constant or a Tree.
+ */
+ protected def readAnnotArg(i: Int)(implicit ctx: Context): Tree = bytes(index(i)) match {
+ case TREE => at(i, readTree)
+ case _ => Literal(at(i, readConstant))
+ }
+
+ /** Read a ClassfileAnnotArg (argument to a classfile annotation)
+ */
+ private def readArrayAnnotArg()(implicit ctx: Context): Tree = {
+ readByte() // skip the `annotargarray` tag
+ val end = readNat() + readIndex
+ // array elements are trees representing instances of scala.annotation.Annotation
+ SeqLiteral(
+ until(end, () => readClassfileAnnotArg(readNat())),
+ TypeTree(defn.AnnotationType))
+ }
+
+ private def readAnnotInfoArg()(implicit ctx: Context): Tree = {
+ readByte() // skip the `annotinfo` tag
+ val end = readNat() + readIndex
+ readAnnotationContents(end)
+ }
+
+ protected def readClassfileAnnotArg(i: Int)(implicit ctx: Context): Tree = bytes(index(i)) match {
+ case ANNOTINFO => at(i, readAnnotInfoArg)
+ case ANNOTARGARRAY => at(i, readArrayAnnotArg)
+ case _ => readAnnotArg(i)
+ }
+
+ /** Read an annotation's contents. Not to be called directly, use
+ * readAnnotation, readSymbolAnnotation, or readAnnotInfoArg
+ */
+ protected def readAnnotationContents(end: Int)(implicit ctx: Context): Tree = {
+ val atp = readTypeRef()
+ val args = {
+ val t = new ListBuffer[Tree]
+
+ while (readIndex != end) {
+ val argref = readNat()
+ t += {
+ if (isNameEntry(argref)) {
+ val name = at(argref, readName)
+ val arg = readClassfileAnnotArg(readNat())
+ NamedArg(name.asTermName, arg)
+ } else readAnnotArg(argref)
+ }
+ }
+ t.toList
+ }
+ // println(atp)
+ val targs = atp.argTypes
+
+ tpd.applyOverloaded(tpd.New(atp withoutArgs targs), nme.CONSTRUCTOR, args, targs, atp)
+}
+
+ /** Read an annotation and as a side effect store it into
+ * the symbol it requests. Called at top-level, for all
+ * (symbol, annotInfo) entries.
+ */
+ protected def readSymbolAnnotation()(implicit ctx: Context): Unit = {
+ val tag = readByte()
+ if (tag != SYMANNOT)
+ errorBadSignature("symbol annotation expected (" + tag + ")")
+ val end = readNat() + readIndex
+ val target = readSymbolRef()
+ target.addAnnotation(deferredAnnot(end))
+ }
+
+ /** Read an annotation and return it. Used when unpickling
+ * an ANNOTATED(WSELF)tpe or a NestedAnnotArg
+ */
+ protected def readAnnotation()(implicit ctx: Context): Annotation = {
+ val tag = readByte()
+ if (tag != ANNOTINFO)
+ errorBadSignature("annotation expected (" + tag + ")")
+ val end = readNat() + readIndex
+ deferredAnnot(end)
+ }
+
+ /** A deferred annotation that can be completed by reading
+ * the bytes between `readIndex` and `end`.
+ */
+ protected def deferredAnnot(end: Int)(implicit ctx: Context): Annotation = {
+ val start = readIndex
+ val atp = readTypeRef()
+ Annotation.deferred(
+ atp.typeSymbol, implicit ctx1 =>
+ atReadPos(start, () => readAnnotationContents(end)(ctx1.withPhase(ctx.phase))))
+ }
+
+ /* Read an abstract syntax tree */
+ protected def readTree()(implicit ctx: Context): Tree = {
+ val outerTag = readByte()
+ if (outerTag != TREE)
+ errorBadSignature("tree expected (" + outerTag + ")")
+ val end = readNat() + readIndex
+ val tag = readByte()
+ val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
+
+ // Set by the three functions to follow. If symbol is non-null
+ // after the new tree 't' has been created, t has its Symbol
+ // set to symbol; and it always has its Type set to tpe.
+ var symbol: Symbol = null
+ var mods: Modifiers = null
+ var name: Name = null
+
+ /** Read a Symbol, Modifiers, and a Name */
+ def setSymModsName(): Unit = {
+ symbol = readSymbolRef()
+ mods = readModifiersRef(symbol.isType)
+ name = readNameRef()
+ }
+ /** Read a Symbol and a Name */
+ def setSymName(): Unit = {
+ symbol = readSymbolRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol */
+ def setSym(): Unit = {
+ symbol = readSymbolRef()
+ }
+
+ implicit val pos: Position = NoPosition
+
+ tag match {
+ case EMPTYtree =>
+ EmptyTree
+
+ case PACKAGEtree =>
+ setSym()
+ val pid = readTreeRef().asInstanceOf[RefTree]
+ val stats = until(end, readTreeRef)
+ PackageDef(pid, stats)
+
+ case CLASStree =>
+ setSymModsName()
+ val impl = readTemplateRef()
+ val tparams = until(end, readTypeDefRef)
+ val cls = symbol.asClass
+ val ((constr: DefDef) :: Nil, stats) =
+ impl.body.partition(_.symbol == cls.primaryConstructor)
+ ClassDef(cls, constr, tparams ++ stats)
+
+ case MODULEtree =>
+ setSymModsName()
+ ModuleDef(symbol.asTerm, readTemplateRef().body)
+
+ case VALDEFtree =>
+ setSymModsName()
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+ ValDef(symbol.asTerm, rhs)
+
+ case DEFDEFtree =>
+ setSymModsName()
+ val tparams = times(readNat(), readTypeDefRef)
+ val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
+ val tpt = readTreeRef()
+ val rhs = readTreeRef()
+ DefDef(symbol.asTerm, rhs)
+
+ case TYPEDEFtree =>
+ setSymModsName()
+ val rhs = readTreeRef()
+ val tparams = until(end, readTypeDefRef)
+ TypeDef(symbol.asType)
+
+ case LABELtree =>
+ setSymName()
+ val rhs = readTreeRef()
+ val params = until(end, readIdentRef)
+ val ldef = DefDef(symbol.asTerm, rhs)
+ def isCaseLabel(sym: Symbol) = sym.name.startsWith(nme.CASEkw)
+ if (isCaseLabel(symbol)) ldef
+ else Block(ldef :: Nil, Apply(Ident(symbol.termRef), Nil))
+
+ case IMPORTtree =>
+ setSym()
+ val expr = readTreeRef()
+ val selectors = until(end, () => {
+ val fromName = readNameRef()
+ val toName = readNameRef()
+ val from = untpd.Ident(fromName)
+ val to = untpd.Ident(toName)
+ if (toName.isEmpty) from else untpd.Thicket(from, untpd.Ident(toName))
+ })
+
+ Import(expr, selectors)
+
+ case TEMPLATEtree =>
+ setSym()
+ val parents = times(readNat(), readTreeRef)
+ val self = readValDefRef()
+ val body = until(end, readTreeRef)
+ untpd.Template(???, parents, self, body) // !!! TODO: pull out primary constructor
+ .withType(symbol.namedType)
+
+ case BLOCKtree =>
+ val expr = readTreeRef()
+ val stats = until(end, readTreeRef)
+ Block(stats, expr)
+
+ case CASEtree =>
+ val pat = readTreeRef()
+ val guard = readTreeRef()
+ val body = readTreeRef()
+ CaseDef(pat, guard, body)
+
+ case ALTERNATIVEtree =>
+ Alternative(until(end, readTreeRef))
+
+ case STARtree =>
+ readTreeRef()
+ unimplementedTree("STAR")
+
+ case BINDtree =>
+ setSymName()
+ Bind(symbol.asTerm, readTreeRef())
+
+ case UNAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ UnApply(fun, Nil, args, defn.AnyType) // !!! this is wrong in general
+
+ case ARRAYVALUEtree =>
+ val elemtpt = readTreeRef()
+ val trees = until(end, readTreeRef)
+ SeqLiteral(trees, elemtpt)
+ // note can't deal with trees passed to Java methods as arrays here
+
+ case FUNCTIONtree =>
+ setSym()
+ val body = readTreeRef()
+ val vparams = until(end, readValDefRef)
+ val applyType = MethodType(vparams map (_.name), vparams map (_.tpt.tpe), body.tpe)
+ val applyMeth = ctx.newSymbol(symbol.owner, nme.apply, Method, applyType)
+ Closure(applyMeth, Function.const(body.changeOwner(symbol, applyMeth)) _)
+
+ case ASSIGNtree =>
+ val lhs = readTreeRef()
+ val rhs = readTreeRef()
+ Assign(lhs, rhs)
+
+ case IFtree =>
+ val cond = readTreeRef()
+ val thenp = readTreeRef()
+ val elsep = readTreeRef()
+ If(cond, thenp, elsep)
+
+ case MATCHtree =>
+ val selector = readTreeRef()
+ val cases = until(end, readCaseDefRef)
+ Match(selector, cases)
+
+ case RETURNtree =>
+ setSym()
+ Return(readTreeRef(), Ident(symbol.termRef))
+
+ case TREtree =>
+ val block = readTreeRef()
+ val finalizer = readTreeRef()
+ val catches = until(end, readCaseDefRef)
+ Try(block, catches, finalizer)
+
+ case THROWtree =>
+ Throw(readTreeRef())
+
+ case NEWtree =>
+ New(readTreeRef().tpe)
+
+ case TYPEDtree =>
+ val expr = readTreeRef()
+ val tpt = readTreeRef()
+ Typed(expr, tpt)
+
+ case TYPEAPPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ TypeApply(fun, args)
+
+ case APPLYtree =>
+ val fun = readTreeRef()
+ val args = until(end, readTreeRef)
+ /*
+ if (fun.symbol.isOverloaded) {
+ fun.setType(fun.symbol.info)
+ inferMethodAlternative(fun, args map (_.tpe), tpe)
+ }
+*/
+ Apply(fun, args) // note: can't deal with overloaded syms yet
+
+ case APPLYDYNAMICtree =>
+ setSym()
+ val qual = readTreeRef()
+ val args = until(end, readTreeRef)
+ unimplementedTree("APPLYDYNAMIC")
+
+ case SUPERtree =>
+ setSym()
+ val qual = readTreeRef()
+ val mix = readTypeNameRef()
+ Super(qual, mix, inConstrCall = false) // todo: revise
+
+ case THIStree =>
+ setSym()
+ val name = readTypeNameRef()
+ This(symbol.asClass)
+
+ case SELECTtree =>
+ setSym()
+ val qualifier = readTreeRef()
+ val selector = readNameRef()
+ qualifier.select(symbol.namedType)
+ case IDENTtree =>
+ setSymName()
+ Ident(symbol.namedType)
+
+ case LITERALtree =>
+ Literal(readConstantRef())
+
+ case TYPEtree =>
+ TypeTree(tpe)
+
+ case ANNOTATEDtree =>
+ val annot = readTreeRef()
+ val arg = readTreeRef()
+ Annotated(arg, annot)
+
+ case SINGLETONTYPEtree =>
+ SingletonTypeTree(readTreeRef())
+
+ case SELECTFROMTYPEtree =>
+ val qualifier = readTreeRef()
+ val selector = readTypeNameRef()
+ Select(qualifier, symbol.namedType)
+
+ case COMPOUNDTYPEtree =>
+ readTemplateRef()
+ TypeTree(tpe)
+
+ case APPLIEDTYPEtree =>
+ val tpt = readTreeRef()
+ val args = until(end, readTreeRef)
+ AppliedTypeTree(tpt, args)
+
+ case TYPEBOUNDStree =>
+ val lo = readTreeRef()
+ val hi = readTreeRef()
+ TypeBoundsTree(lo, hi)
+
+ case EXISTENTIALTYPEtree =>
+ val tpt = readTreeRef()
+ val whereClauses = until(end, readTreeRef)
+ TypeTree(tpe)
+
+ case _ =>
+ noSuchTreeTag(tag, end)
+ }
+ }
+
+ def noSuchTreeTag(tag: Int, end: Int)(implicit ctx: Context) =
+ errorBadSignature("unknown tree type (" + tag + ")")
+
+ def unimplementedTree(what: String)(implicit ctx: Context) =
+ errorBadSignature(s"cannot read $what trees from Scala 2.x signatures")
+
+ def readModifiers(isType: Boolean)(implicit ctx: Context): Modifiers = {
+ val tag = readNat()
+ if (tag != MODIFIERS)
+ errorBadSignature("expected a modifiers tag (" + tag + ")")
+ val end = readNat() + readIndex
+ val pflagsHi = readNat()
+ val pflagsLo = readNat()
+ val pflags = (pflagsHi.toLong << 32) + pflagsLo
+ val flags = unpickleScalaFlags(pflags, isType)
+ val privateWithin = readNameRef().asTypeName
+ Modifiers(flags, privateWithin, Nil)
+ }
+
+ protected def readTemplateRef()(implicit ctx: Context): Template =
+ readTreeRef() match {
+ case templ: Template => templ
+ case other =>
+ errorBadSignature("expected a template (" + other + ")")
+ }
+ protected def readCaseDefRef()(implicit ctx: Context): CaseDef =
+ readTreeRef() match {
+ case tree: CaseDef => tree
+ case other =>
+ errorBadSignature("expected a case def (" + other + ")")
+ }
+ protected def readValDefRef()(implicit ctx: Context): ValDef =
+ readTreeRef() match {
+ case tree: ValDef => tree
+ case other =>
+ errorBadSignature("expected a ValDef (" + other + ")")
+ }
+ protected def readIdentRef()(implicit ctx: Context): Ident =
+ readTreeRef() match {
+ case tree: Ident => tree
+ case other =>
+ errorBadSignature("expected an Ident (" + other + ")")
+ }
+ protected def readTypeDefRef()(implicit ctx: Context): TypeDef =
+ readTreeRef() match {
+ case tree: TypeDef => tree
+ case other =>
+ errorBadSignature("expected an TypeDef (" + other + ")")
+ }
+
+}