summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/compiler/scala/reflect/macros/runtime/Names.scala13
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala8
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatchSupport.scala115
-rw-r--r--src/compiler/scala/tools/nsc/matching/Matrix.scala232
-rw-r--r--src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala191
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala866
-rw-r--r--src/compiler/scala/tools/nsc/matching/PatternBindings.scala126
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala457
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala82
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala163
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala45
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala113
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala76
-rw-r--r--src/compiler/scala/tools/nsc/util/package.scala2
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala48
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala2
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala6
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala60
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala1
-rw-r--r--src/reflect/scala/reflect/macros/Names.scala14
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala1
28 files changed, 207 insertions, 2447 deletions
diff --git a/src/compiler/scala/reflect/macros/runtime/Names.scala b/src/compiler/scala/reflect/macros/runtime/Names.scala
index ee9f3a56d3..635e8bcd45 100644
--- a/src/compiler/scala/reflect/macros/runtime/Names.scala
+++ b/src/compiler/scala/reflect/macros/runtime/Names.scala
@@ -7,11 +7,20 @@ trait Names {
lazy val freshNameCreator = callsiteTyper.context.unit.fresh
def fresh(): String =
- freshNameCreator.newName()
+ freshName()
def fresh(name: String): String =
- freshNameCreator.newName(name)
+ freshName(name)
def fresh[NameType <: Name](name: NameType): NameType =
+ freshName[NameType](name)
+
+ def freshName(): String =
+ freshNameCreator.newName()
+
+ def freshName(name: String): String =
+ freshNameCreator.newName(name)
+
+ def freshName[NameType <: Name](name: NameType): NameType =
name.mapName(freshNameCreator.newName(_)).asInstanceOf[NameType]
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
index c647ef6f51..c5bb8494ce 100644
--- a/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/CompletionOutput.scala
@@ -37,7 +37,7 @@ trait CompletionOutput {
val pkg = method.ownerChain find (_.isPackageClass) map (_.fullName) getOrElse ""
def relativize(str: String): String = quietString(str stripPrefix (pkg + "."))
- def relativize(tp: Type): String = relativize(tp.normalize.toString)
+ def relativize(tp: Type): String = relativize(tp.dealiasWiden.toString)
def braceList(tparams: List[String]) = if (tparams.isEmpty) "" else (tparams map relativize).mkString("[", ", ", "]")
def parenList(params: List[Any]) = params.mkString("(", ", ", ")")
@@ -55,8 +55,8 @@ trait CompletionOutput {
}
)
- def tupleString(tp: Type) = parenList(tp.normalize.typeArgs map relativize)
- def functionString(tp: Type) = tp.normalize.typeArgs match {
+ def tupleString(tp: Type) = parenList(tp.dealiasWiden.typeArgs map relativize)
+ def functionString(tp: Type) = tp.dealiasWiden.typeArgs match {
case List(t, r) => t + " => " + r
case xs => parenList(xs.init) + " => " + xs.last
}
@@ -64,7 +64,7 @@ trait CompletionOutput {
def tparamsString(tparams: List[Symbol]) = braceList(tparams map (_.defString))
def paramsString(params: List[Symbol]) = {
def paramNameString(sym: Symbol) = if (sym.isSynthetic) "" else sym.nameString + ": "
- def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.normalize)
+ def paramString(sym: Symbol) = paramNameString(sym) + typeToString(sym.info.dealiasWiden)
val isImplicit = params.nonEmpty && params.head.isImplicit
val strs = (params map paramString) match {
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 91e909b1f1..36f012229e 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -523,7 +523,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
// normalize non-public types so we don't see protected aliases like Self
def normalizeNonPublic(tp: Type) = tp match {
- case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.normalize
+ case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
case _ => tp
}
diff --git a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala b/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
deleted file mode 100644
index 3c26997cfe..0000000000
--- a/src/compiler/scala/tools/nsc/matching/MatchSupport.scala
+++ /dev/null
@@ -1,115 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import scala.annotation.elidable
-import scala.language.postfixOps
-
-/** Ancillary bits of ParallelMatching which are better off
- * out of the way.
- */
-trait MatchSupport extends ast.TreeDSL { self: ParallelMatching =>
-
- import global.{ typer => _, _ }
- import CODE._
-
- /** Debugging support: enable with -Ypmat-debug **/
- private final def trace = settings.Ypmatdebug.value
-
- def impossible: Nothing = abort("this never happens")
-
- object Types {
- import definitions._
-
- val subrangeTypes = Set[Symbol](ByteClass, ShortClass, CharClass, IntClass)
-
- implicit class RichType(undecodedTpe: Type) {
- def tpe = decodedEqualsType(undecodedTpe)
- def isAnyRef = tpe <:< AnyRefClass.tpe
-
- // These tests for final classes can inspect the typeSymbol
- private def is(s: Symbol) = tpe.typeSymbol eq s
- def isInt = is(IntClass)
- def isNothing = is(NothingClass)
- }
- }
-
- object Debug {
- def treeToString(t: Tree): String = treeInfo.unbind(t) match {
- case EmptyTree => "?"
- case WILD() => "_"
- case Literal(Constant(x)) => "LIT(%s)".format(x)
- case Apply(fn, args) => "%s(%s)".format(treeToString(fn), args map treeToString mkString ",")
- case Typed(expr, tpt) => "%s: %s".format(treeToString(expr), treeToString(tpt))
- case x => x.toString + " (" + x.getClass + ")"
- }
-
- // Formatting for some error messages
- private val NPAD = 15
- def pad(s: String): String = "%%%ds" format (NPAD-1) format s
-
- // pretty print for debugging
- def pp(x: Any): String = pp(x, false)
- def pp(x: Any, newlines: Boolean): String = {
- val stripStrings = List("""java\.lang\.""", """\$iw\.""")
-
- def clean(s: String): String =
- stripStrings.foldLeft(s)((s, x) => s.replaceAll(x, ""))
-
- def pplist(xs: List[Any]): String =
- if (newlines) (xs map (" " + _ + "\n")).mkString("\n", "", "")
- else xs.mkString("(", ", ", ")")
-
- pp(x match {
- case s: String => return clean(s)
- case x: Tree => asCompactString(x)
- case xs: List[_] => pplist(xs map pp)
- case x: Tuple2[_,_] => "%s -> %s".format(pp(x._1), pp(x._2))
- case x => x.toString
- })
- }
-
- @elidable(elidable.FINE) def TRACE(f: String, xs: Any*): Unit = {
- if (trace) {
- val msg = if (xs.isEmpty) f else f.format(xs map pp: _*)
- println(msg)
- }
- }
- @elidable(elidable.FINE) def traceCategory(cat: String, f: String, xs: Any*) = {
- if (trace)
- TRACE("[" + """%10s""".format(cat) + "] " + f, xs: _*)
- }
- def tracing[T](s: String)(x: T): T = {
- if (trace)
- println(("[" + """%10s""".format(s) + "] %s") format pp(x))
-
- x
- }
- private[nsc] def printing[T](fmt: String, xs: Any*)(x: T): T = {
- println(fmt.format(xs: _*) + " == " + x)
- x
- }
- private[nsc] def debugging[T](fmt: String, xs: Any*)(x: T): T = {
- if (settings.debug.value) printing(fmt, xs: _*)(x)
- else x
- }
-
- def indentAll(s: Seq[Any]) = s map (" " + _.toString() + "\n") mkString
- }
-
- /** Drops the 'i'th element of a list.
- */
- def dropIndex[T](xs: List[T], n: Int) = {
- val (l1, l2) = xs splitAt n
- l1 ::: (l2 drop 1)
- }
-
- /** Extract the nth element of a list and return it and the remainder.
- */
- def extractIndex[T](xs: List[T], n: Int): (T, List[T]) =
- (xs(n), dropIndex(xs, n))
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Matrix.scala b/src/compiler/scala/tools/nsc/matching/Matrix.scala
deleted file mode 100644
index ba966acf34..0000000000
--- a/src/compiler/scala/tools/nsc/matching/Matrix.scala
+++ /dev/null
@@ -1,232 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import symtab.Flags
-import scala.collection.mutable
-import scala.language.implicitConversions
-
-trait Matrix extends MatrixAdditions {
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import analyzer.Typer
- import CODE._
- import Debug._
- import Flags.{ SYNTHETIC, MUTABLE }
-
- private[matching] val NO_EXHAUSTIVE = Flags.TRANS_FLAG
-
- /** Translation of match expressions.
- *
- * `p`: pattern
- * `g`: guard
- * `bx`: body index
- *
- * internal representation is (tvars:List[Symbol], rows:List[Row])
- *
- * tmp1 tmp_n
- * Row( p_11 ... p_1n g_1 b_1 ) + subst
- *
- * Row( p_m1 ... p_mn g_m b_m ) + subst
- *
- * Implementation based on the algorithm described in
- *
- * "A Term Pattern-Match Compiler Inspired by Finite Automata Theory"
- * Mikael Pettersson
- * ftp://ftp.ida.liu.se/pub/labs/pelab/papers/cc92pmc.ps.gz
- *
- * @author Burak Emir
- */
-
- /** "The Mixture Rule"
-
- {v=pat1, pats1 .. } {q1}
- match {.. } {..}
- {v=patn, patsn .. } {qn}
-
- The is the real work-horse of the algorithm. There is some column whose top-most pattern is a
- constructor. (Forsimplicity, itisdepicted above asthe left-most column, but anycolumn will do.)
- The goal is to build a test state with the variablevand some outgoing arcs (one for each construc-
- tor and possibly a default arc). Foreach constructor in the selected column, its arc is defined as
- follows:
-
- Let {i1,...,ij} be the rows-indices of the patterns in the column that match c. Since the pat-
- terns are viewed as regular expressions, this will be the indices of the patterns that either
- have the same constructor c, or are wildcards.
-
- Let {pat1,...,patj} be the patterns in the column corresponding to the indices computed
- above, and let nbe the arity of the constructor c, i.e. the number of sub-patterns it has. For
- eachpati, its n sub-patterns are extracted; if pat i is a wildcard, nwildcards are produced
- instead, each tagged with the right path variable. This results in a pattern matrix with n
- columns and j rows. This matrix is then appended to the result of selecting, from each col-
- umn in the rest of the original matrix, those rows whose indices are in {i1,...,ij}. Finally
- the indices are used to select the corresponding final states that go with these rows. Note
- that the order of the indices is significant; selected rows do not change their relative orders.
- The arc for the constructor c is now defined as (c’,state), where c’ is cwith any
- immediate sub-patterns replaced by their path variables (thus c’ is a simple pattern), and
- state is the result of recursively applying match to the new matrix and the new sequence
- of final states.
-
- Finally, the possibility for matching failure is considered. If the set of constructors is exhaustive,
- then no more arcs are computed. Otherwise, a default arc(_,state)is the last arc. If there are
- any wildcard patterns in the selected column, then their rows are selected from the rest of the
- matrix and the final states, and the state is the result of applying match to the new matrix and
- states. Otherwise,the error state is used after its reference count has been incremented.
- **/
-
- /** Handles all translation of pattern matching.
- */
- def handlePattern(
- selector: Tree, // tree being matched upon (called scrutinee after this)
- cases: List[CaseDef], // list of cases in the match
- isChecked: Boolean, // whether exhaustiveness checking is enabled (disabled with @unchecked)
- context: MatrixContext): Tree =
- {
- import context._
- TRACE("handlePattern", "(%s: %s) match { %s cases }", selector, selector.tpe, cases.size)
-
- val matrixInit: MatrixInit = {
- val v = copyVar(selector, isChecked, selector.tpe, "temp")
- MatrixInit(List(v), cases, atPos(selector.pos)(MATCHERROR(v.ident)))
- }
- val matrix = new MatchMatrix(context) { lazy val data = matrixInit }
- val mch = typer typed matrix.expansion.toTree
- val dfatree = typer typed Block(matrix.data.valDefs, mch)
-
- // redundancy check
- matrix.targets filter (_.unreached) foreach (cs => cunit.error(cs.body.pos, "unreachable code"))
- // optimize performs squeezing and resets any remaining NO_EXHAUSTIVE
- tracing("handlePattern")(matrix optimize dfatree)
- }
-
- case class MatrixContext(
- cunit: CompilationUnit, // current unit
- handleOuter: Tree => Tree, // for outer pointer
- typer: Typer, // a local typer
- owner: Symbol, // the current owner
- matchResultType: Type) // the expected result type of the whole match
- extends Squeezer
- {
- private def ifNull[T](x: T, alt: T) = if (x == null) alt else x
-
- // NO_EXHAUSTIVE communicates there should be no exhaustiveness checking
- private def flags(checked: Boolean) = if (checked) Nil else List(NO_EXHAUSTIVE)
-
- // Recording the symbols of the synthetics we create so we don't go clearing
- // anyone else's mutable flags.
- private val _syntheticSyms = mutable.HashSet[Symbol]()
- def clearSyntheticSyms() = {
- _syntheticSyms foreach (_ resetFlag (NO_EXHAUSTIVE|MUTABLE))
- debuglog("Cleared NO_EXHAUSTIVE/MUTABLE on " + _syntheticSyms.size + " synthetic symbols.")
- _syntheticSyms.clear()
- }
- def recordSyntheticSym(sym: Symbol): Symbol = {
- _syntheticSyms += sym
- if (_syntheticSyms.size > 25000) {
- cunit.error(owner.pos, "Sanity check failed: over 25000 symbols created for pattern match.")
- abort("This is a bug in the pattern matcher.")
- }
- sym
- }
-
- case class MatrixInit(
- roots: List[PatternVar],
- cases: List[CaseDef],
- default: Tree
- ) {
- def valDefs = roots map (_.valDef)
- override def toString() = "MatrixInit(roots = %s, %d cases)".format(pp(roots), cases.size)
- }
-
- implicit def pvlist2pvgroup(xs: List[PatternVar]): PatternVarGroup =
- PatternVarGroup(xs)
-
- object PatternVarGroup {
- def apply(xs: PatternVar*) = new PatternVarGroup(xs.toList)
- def apply(xs: List[PatternVar]) = new PatternVarGroup(xs)
- }
-
- val emptyPatternVarGroup = PatternVarGroup()
- class PatternVarGroup(val pvs: List[PatternVar]) {
- def syms = pvs map (_.sym)
- def valDefs = pvs map (_.valDef)
-
- def extractIndex(index: Int): (PatternVar, PatternVarGroup) = {
- val (t, ts) = self.extractIndex(pvs, index)
- (t, PatternVarGroup(ts))
- }
-
- def isEmpty = pvs.isEmpty
- def size = pvs.size
- def :::(ts: List[PatternVar]) = PatternVarGroup(ts ::: pvs)
-
- def apply(i: Int) = pvs(i)
- def zipWithIndex = pvs.zipWithIndex
- def indices = pvs.indices
-
- override def toString() = pp(pvs)
- }
-
- /** Every temporary variable allocated is put in a PatternVar.
- */
- class PatternVar(val lhs: Symbol, val rhs: Tree, val checked: Boolean) {
- def sym = lhs
- def tpe = lhs.tpe
- if (checked)
- lhs resetFlag NO_EXHAUSTIVE
- else
- lhs setFlag NO_EXHAUSTIVE
-
- // See #1427 for an example of a crash which occurs unless we retype:
- // in that instance there is an existential in the pattern.
- lazy val ident = typer typed Ident(lhs)
- lazy val valDef = typer typedValDef ValDef(lhs, rhs)
-
- override def toString() = "%s: %s = %s".format(lhs, tpe, rhs)
- }
-
- /** Given a tree, creates a new synthetic variable of the same type
- * and assigns the tree to it.
- */
- def copyVar(
- root: Tree,
- checked: Boolean,
- _tpe: Type = null,
- label: String = "temp"): PatternVar =
- {
- val tpe = ifNull(_tpe, root.tpe)
- val name = cunit.freshTermName(label)
- val sym = newVar(root.pos, tpe, flags(checked), name)
-
- tracing("copy")(new PatternVar(sym, root, checked))
- }
-
- /** Creates a new synthetic variable of the specified type and
- * assigns the result of f(symbol) to it.
- */
- def createVar(tpe: Type, f: Symbol => Tree, checked: Boolean) = {
- val lhs = newVar(owner.pos, tpe, flags(checked))
- val rhs = f(lhs)
-
- tracing("create")(new PatternVar(lhs, rhs, checked))
- }
-
- private def newVar(
- pos: Position,
- tpe: Type,
- flags: List[Long],
- name: TermName = null): Symbol =
- {
- val n = if (name == null) cunit.freshTermName("temp") else name
- // careful: pos has special meaning
- val flagsLong = (SYNTHETIC.toLong /: flags)(_|_)
- recordSyntheticSym(owner.newVariable(n, pos, flagsLong) setInfo tpe)
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala b/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
deleted file mode 100644
index b1ca6e7b5a..0000000000
--- a/src/compiler/scala/tools/nsc/matching/MatrixAdditions.scala
+++ /dev/null
@@ -1,191 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-
-/** Traits which are mixed into MatchMatrix, but separated out as
- * (somewhat) independent components to keep them on the sidelines.
- */
-trait MatrixAdditions extends ast.TreeDSL {
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import symtab.Flags
- import Debug._
- import treeInfo._
- import definitions.{ isPrimitiveValueClass }
-
- /** The Squeezer, responsible for all the squeezing.
- */
- private[matching] trait Squeezer {
- self: MatrixContext =>
-
- private val settings_squeeze = !settings.Ynosqueeze.value
-
- class RefTraverser(vd: ValDef) extends Traverser {
- private val targetSymbol = vd.symbol
- private var safeRefs = 0
- private var isSafe = true
-
- def canDrop = isSafe && safeRefs == 0
- def canInline = isSafe && safeRefs == 1
-
- override def traverse(tree: Tree): Unit = tree match {
- case t: Ident if t.symbol eq targetSymbol =>
- // target symbol's owner should match currentOwner
- if (targetSymbol.owner == currentOwner) safeRefs += 1
- else isSafe = false
-
- case LabelDef(_, params, rhs) =>
- if (params exists (_.symbol eq targetSymbol)) // cannot substitute this one
- isSafe = false
-
- traverse(rhs)
- case _ if safeRefs > 1 => ()
- case _ =>
- super.traverse(tree)
- }
- }
-
- /** Compresses multiple Blocks. */
- private def combineBlocks(stats: List[Tree], expr: Tree): Tree = expr match {
- case Block(stats1, expr1) if stats.isEmpty => combineBlocks(stats1, expr1)
- case _ => Block(stats, expr)
- }
- def squeezedBlock(vds: List[Tree], exp: Tree): Tree =
- if (settings_squeeze) combineBlocks(Nil, squeezedBlock1(vds, exp))
- else combineBlocks(vds, exp)
-
- private def squeezedBlock1(vds: List[Tree], exp: Tree): Tree = {
- lazy val squeezedTail = squeezedBlock(vds.tail, exp)
- def default = squeezedTail match {
- case Block(vds2, exp2) => Block(vds.head :: vds2, exp2)
- case exp2 => Block(vds.head :: Nil, exp2)
- }
-
- if (vds.isEmpty) exp
- else vds.head match {
- case vd: ValDef =>
- val rt = new RefTraverser(vd)
- rt.atOwner(owner)(rt traverse squeezedTail)
-
- if (rt.canDrop)
- squeezedTail
- else if (isConstantType(vd.symbol.tpe) || rt.canInline)
- new TreeSubstituter(List(vd.symbol), List(vd.rhs)) transform squeezedTail
- else
- default
- case _ => default
- }
- }
- }
-
- /** The Optimizer, responsible for some of the optimizing.
- */
- private[matching] trait MatchMatrixOptimizer {
- self: MatchMatrix =>
-
- import self.context._
-
- final def optimize(tree: Tree): Tree = {
- // Uses treeInfo extractors rather than looking at trees directly
- // because the many Blocks obscure our vision.
- object lxtt extends Transformer {
- override def transform(tree: Tree): Tree = tree match {
- case Block(stats, ld @ LabelDef(_, _, body)) if targets exists (_ shouldInline ld.symbol) =>
- squeezedBlock(transformStats(stats, currentOwner), body)
- case IsIf(cond, IsTrue(), IsFalse()) =>
- transform(cond)
- case IsIf(cond1, IsIf(cond2, thenp, elsep1), elsep2) if elsep1 equalsStructure elsep2 =>
- transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, elsep2))
- case If(cond1, IsIf(cond2, thenp, Apply(jmp, Nil)), ld: LabelDef) if jmp.symbol eq ld.symbol =>
- transform(typer typed If(gen.mkAnd(cond1, cond2), thenp, ld))
- case _ =>
- super.transform(tree)
- }
- }
- try lxtt transform tree
- finally clearSyntheticSyms()
- }
- }
-
- /** The Exhauster.
- */
- private[matching] trait MatrixExhaustiveness {
- self: MatchMatrix =>
-
- import self.context._
-
- /** Exhaustiveness checking requires looking for sealed classes
- * and if found, making sure all children are covered by a pattern.
- */
- class ExhaustivenessChecker(rep: Rep, matchPos: Position) {
- val Rep(tvars, rows) = rep
-
- import Flags.{ MUTABLE, ABSTRACT, SEALED }
-
- private case class Combo(index: Int, sym: Symbol) { }
-
- /* True if the patterns in 'row' cover the given type symbol combination, and has no guard. */
- private def rowCoversCombo(row: Row, combos: List[Combo]) =
- row.guard.isEmpty && combos.forall(c => row.pats(c.index) covers c.sym)
-
- private def requiresExhaustive(sym: Symbol) = {
- (sym.isMutable) && // indicates that have not yet checked exhaustivity
- !(sym hasFlag NO_EXHAUSTIVE) && // indicates @unchecked
- (sym.tpe.typeSymbol.isSealed) &&
- !isPrimitiveValueClass(sym.tpe.typeSymbol) // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- }
-
- private lazy val inexhaustives: List[List[Combo]] = {
- // let's please not get too clever side-effecting the mutable flag.
- val toCollect = tvars.zipWithIndex filter { case (pv, i) => requiresExhaustive(pv.sym) }
- val collected = toCollect map { case (pv, i) =>
- // okay, now reset the flag
- pv.sym resetFlag MUTABLE
-
- i -> (
- pv.tpe.typeSymbol.sealedDescendants.toList sortBy (_.sealedSortName)
- // symbols which are both sealed and abstract need not be covered themselves, because
- // all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x))
- // have to filter out children which cannot match: see ticket #3683 for an example
- filter (_.tpe matchesPattern pv.tpe)
- )
- }
-
- val folded =
- collected.foldRight(List[List[Combo]]())((c, xs) => {
- val (i, syms) = c match { case (i, set) => (i, set.toList) }
- xs match {
- case Nil => syms map (s => List(Combo(i, s)))
- case _ => for (s <- syms ; rest <- xs) yield Combo(i, s) :: rest
- }
- })
-
- folded filterNot (combo => rows exists (r => rowCoversCombo(r, combo)))
- }
-
- private def mkPad(xs: List[Combo], i: Int): String = xs match {
- case Nil => pad("*")
- case Combo(j, sym) :: rest => if (j == i) pad(sym.name.toString) else mkPad(rest, i)
- }
- private def mkMissingStr(open: List[Combo]) =
- "missing combination %s\n" format tvars.indices.map(mkPad(open, _)).mkString
-
- /** The only public method. */
- def check = {
- def errMsg = (inexhaustives map mkMissingStr).mkString
- if (inexhaustives.nonEmpty)
- cunit.warning(matchPos, "match is not exhaustive!\n" + errMsg)
-
- rep
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
deleted file mode 100644
index b5e25f3809..0000000000
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ /dev/null
@@ -1,866 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Copyright 2007 Google Inc. All Rights Reserved.
- * Author: bqe@google.com (Burak Emir)
- */
-
-package scala.tools.nsc
-package matching
-
-import PartialFunction._
-import scala.collection.{ mutable }
-import transform.ExplicitOuter
-import mutable.ListBuffer
-import scala.language.postfixOps
-
-trait ParallelMatching extends ast.TreeDSL
- with MatchSupport
- with Matrix
- with Patterns
- with PatternBindings
-{
- self: ExplicitOuter =>
-
- import global.{ typer => _, _ }
- import definitions.{
- IntClass, BooleanClass, SomeClass, OptionClass,
- getProductArgs, productProj, Object_eq, Any_asInstanceOf
- }
- import CODE._
- import Types._
- import Debug._
-
- /** Transition **/
- def toPats(xs: List[Tree]): List[Pattern] = xs map Pattern.apply
-
- /** The umbrella matrix class. **/
- abstract class MatchMatrix(val context: MatrixContext) extends MatchMatrixOptimizer with MatrixExhaustiveness {
- import context._
-
- def data: MatrixContext#MatrixInit
-
- lazy val MatrixInit(roots, cases, failTree) = data
- lazy val (rows, targets) = expand(roots, cases).unzip
- lazy val expansion: Rep = make(roots, rows)
-
- private val shortCuts = perRunCaches.newMap[Int, Symbol]()
-
- final def createShortCut(theLabel: Symbol): Int = {
- val key = shortCuts.size + 1
- shortCuts(key) = theLabel
- -key
- }
- def createLabelDef(namePrefix: String, body: Tree, params: List[Symbol] = Nil, restpe: Type = matchResultType) = {
- val labelName = cunit.freshTermName(namePrefix)
- val labelSym = owner.newLabel(labelName, owner.pos)
- val labelInfo = MethodType(params, restpe)
-
- LabelDef(labelSym setInfo labelInfo, params, body setType restpe)
- }
-
- /** This is the recursively focal point for translating the current
- * list of pattern variables and a list of pattern match rows into
- * a tree suitable for entering erasure.
- *
- * The first time it is called, the variables are (copies of) the
- * original pattern matcher roots, and the rows correspond to the
- * original casedefs.
- */
- final def make(roots1: PatternVarGroup, rows1: List[Row]): Rep = {
- traceCategory("New Match", "%sx%s (%s)", roots1.size, rows1.size, roots1.syms.mkString(", "))
- def classifyPat(opat: Pattern, j: Int): Pattern = opat simplify roots1(j)
-
- val newRows = rows1 flatMap (_ expandAlternatives classifyPat)
- if (rows1.length != newRows.length) make(roots1, newRows) // recursive call if any change
- else {
- val rep = Rep(roots1, newRows)
- new ExhaustivenessChecker(rep, roots.head.sym.pos).check
- rep
- }
- }
-
- override def toString() = "MatchMatrix(%s) { %s }".format(matchResultType, indentAll(targets))
-
- /**
- * Encapsulates a symbol being matched on. It is created from a
- * PatternVar, which encapsulates the symbol's creation and assignment.
- *
- * We never match on trees directly - a temporary variable is created
- * (in a PatternVar) for any expression being matched on.
- */
- class Scrutinee(val pv: PatternVar) {
- import definitions._
-
- // presenting a face of our symbol
- def sym = pv.sym
- def tpe = sym.tpe
- def pos = sym.pos
- def id = ID(sym) setPos pos // attributed ident
-
- def accessors = if (isCaseClass) sym.caseFieldAccessors else Nil
- def accessorTypes = accessors map (x => (tpe memberType x).resultType)
-
- lazy val accessorPatternVars = PatternVarGroup(
- for ((accessor, tpe) <- accessors zip accessorTypes) yield
- createVar(tpe, _ => fn(id, accessor))
- )
-
- private def extraValDefs = if (pv.rhs.isEmpty) Nil else List(pv.valDef)
- def allValDefs = extraValDefs ::: accessorPatternVars.valDefs
-
- // tests
- def isDefined = sym ne NoSymbol
- def isSubrangeType = subrangeTypes(tpe.typeSymbol)
- def isCaseClass = tpe.typeSymbol.isCase
-
- // sequences
- def seqType = tpe.widen baseType SeqClass
- def elemType = tpe typeArgs 0
-
- private def elemAt(i: Int) = (id DOT (tpe member nme.apply))(LIT(i))
- private def createElemVar(i: Int) = createVar(elemType, _ => elemAt(i))
- private def createSeqVar(drop: Int) = createVar(seqType, _ => id DROP drop)
-
- def createSequenceVars(count: Int): List[PatternVar] =
- (0 to count).toList map (i => if (i < count) createElemVar(i) else createSeqVar(i))
-
- // for propagating "unchecked" to synthetic vars
- def isChecked = !(sym hasFlag NO_EXHAUSTIVE)
- // def flags: List[Long] = List(NO_EXHAUSTIVE) filter (sym hasFlag _)
-
- // this is probably where this actually belongs
- def createVar(tpe: Type, f: Symbol => Tree) = context.createVar(tpe, f, isChecked)
-
- def castedTo(headType: Type) =
- if (tpe =:= headType) this
- else new Scrutinee(createVar(headType, lhs => gen.mkAsInstanceOf(id, lhs.tpe)))
-
- override def toString() = "(%s: %s)".format(id, tpe)
- }
-
- def isPatternSwitch(scrut: Scrutinee, ps: List[Pattern]): Option[PatternSwitch] = {
- def isSwitchableConst(x: Pattern) = cond(x) { case x: LiteralPattern if x.isSwitchable => true }
- def isSwitchableDefault(x: Pattern) = isSwitchableConst(x) || x.isDefault
-
- // TODO - scala> (5: Any) match { case 5 => 5 ; case 6 => 7 }
- // ... should compile to a switch. It doesn't because the scrut isn't Int/Char, but
- // that could be handle in an if/else since every pattern requires an Int.
- // More immediately, Byte and Short scruts should also work.
- if (!scrut.isSubrangeType) None
- else {
- val (_lits, others) = ps span isSwitchableConst
- val lits = _lits collect { case x: LiteralPattern => x }
-
- condOpt(others) {
- case Nil => new PatternSwitch(scrut, lits, None)
- // TODO: This needs to also allow the case that the last is a compatible type pattern.
- case List(x) if isSwitchableDefault(x) => new PatternSwitch(scrut, lits, Some(x))
- }
- }
- }
-
- class PatternSwitch(
- scrut: Scrutinee,
- override val ps: List[LiteralPattern],
- val defaultPattern: Option[Pattern]
- ) extends PatternMatch(scrut, ps) {
- require(scrut.isSubrangeType && (ps forall (_.isSwitchable)))
- }
-
- case class PatternMatch(scrut: Scrutinee, ps: List[Pattern]) {
- def head = ps.head
- def tail = ps.tail
- // def size = ps.length
-
- def headType = head.necessaryType
- private val dummyCount = if (head.isCaseClass) headType.typeSymbol.caseFieldAccessors.length else 0
- def dummies = emptyPatterns(dummyCount)
-
- def apply(i: Int): Pattern = ps(i)
- def pzip() = ps.zipWithIndex
- def pzip[T](others: List[T]) = {
- assert(ps.size == others.size, "Internal error: ps = %s, others = %s".format(ps, others))
- ps zip others
- }
-
- // Any unapply - returns Some(true) if a type test is needed before the unapply can
- // be called (e.g. def unapply(x: Foo) = { ... } but our scrutinee is type Any.)
- object AnyUnapply {
- def unapply(x: Pattern): Option[Boolean] = condOpt(x.tree) {
- case UnapplyParamType(tpe) => !(scrut.tpe <:< tpe)
- }
- }
-
- def mkRule(rest: Rep): RuleApplication = {
- tracing("Rule")(head match {
- case x if isEquals(x.tree.tpe) => new MixEquals(this, rest)
- case x: SequencePattern => new MixSequence(this, rest, x)
- case AnyUnapply(false) => new MixUnapply(this, rest)
- case _ =>
- isPatternSwitch(scrut, ps) match {
- case Some(x) => new MixLiteralInts(x, rest)
- case _ => new MixTypes(this, rest)
- }
- })
- }
- override def toString() = "%s match {%s}".format(scrut, indentAll(ps))
- } // PatternMatch
-
- /***** Rule Applications *****/
-
- sealed abstract class RuleApplication {
- def pmatch: PatternMatch
- def rest: Rep
- def cond: Tree
- def success: Tree
- def failure: Tree
-
- lazy val PatternMatch(scrut, patterns) = pmatch
- lazy val head = pmatch.head
- lazy val codegen: Tree = IF (cond) THEN (success) ELSE (failure)
-
- def mkFail(xs: List[Row]): Tree =
- if (xs.isEmpty) failTree
- else remake(xs).toTree
-
- def remake(
- rows: List[Row],
- pvgroup: PatternVarGroup = emptyPatternVarGroup,
- includeScrut: Boolean = true): Rep =
- {
- val scrutpvs = if (includeScrut) List(scrut.pv) else Nil
- make(pvgroup.pvs ::: scrutpvs ::: rest.tvars, rows)
- }
-
- /** translate outcome of the rule application into code (possible involving recursive application of rewriting) */
- def tree(): Tree
-
- override def toString =
- "Rule/%s (%s =^= %s)".format(getClass.getSimpleName, scrut, head)
- }
-
- /** {case ... if guard => bx} else {guardedRest} */
- /** VariableRule: The top-most rows has only variable (non-constructor) patterns. */
- case class VariableRule(subst: Bindings, guard: Tree, guardedRest: Rep, bx: Int) extends RuleApplication {
- def pmatch: PatternMatch = impossible
- def rest: Rep = guardedRest
-
- private lazy val (valDefs, successTree) = targets(bx) applyBindings subst.toMap
- lazy val cond = guard
- lazy val success = successTree
- lazy val failure = guardedRest.toTree
-
- final def tree(): Tree =
- if (bx < 0) REF(shortCuts(-bx))
- else squeezedBlock(
- valDefs,
- if (cond.isEmpty) success else codegen
- )
-
- override def toString = "(case %d) {\n Bindings: %s\n\n if (%s) { %s }\n else { %s }\n}".format(
- bx, subst, guard, success, guardedRest
- )
- }
-
- class MixLiteralInts(val pmatch: PatternSwitch, val rest: Rep) extends RuleApplication {
- val literals = pmatch.ps
- val defaultPattern = pmatch.defaultPattern
-
- private lazy val casted: Tree =
- if (!scrut.tpe.isInt) scrut.id DOT nme.toInt else scrut.id
-
- // creates a row transformer for injecting the default case bindings at a given index
- private def addDefaultVars(index: Int): Row => Row =
- if (defaultVars.isEmpty) identity
- else rebindAll(_, pmatch(index).boundVariables, scrut.sym)
-
- // add bindings for all the given vs to the given tvar
- private def rebindAll(r: Row, vs: Iterable[Symbol], tvar: Symbol) =
- r rebind r.subst.add(vs, tvar)
-
- private def bindVars(Tag: Int, orig: Bindings): Bindings = {
- def myBindVars(rest: List[(Int, List[Symbol])], bnd: Bindings): Bindings = rest match {
- case Nil => bnd
- case (Tag,vs)::xs => myBindVars(xs, bnd.add(vs, scrut.sym))
- case (_, vs)::xs => myBindVars(xs, bnd)
- }
- myBindVars(varMap, orig)
- }
-
- // bound vars and rows for default pattern (only one row, but a list is easier to use later)
- lazy val (defaultVars, defaultRows) = defaultPattern match {
- case None => (Nil, Nil)
- case Some(p) => (p.boundVariables, List(rebindAll(rest rows literals.size, p.boundVariables, scrut.sym)))
- }
-
- // literalMap is a map from each literal to a list of row indices.
- // varMap is a list from each literal to a list of the defined vars.
- lazy val (litPairs, varMap) = (
- literals.zipWithIndex map {
- case (lit, index) =>
- val tag = lit.intValue
- (tag -> index, tag -> lit.boundVariables)
- } unzip
- )
- def literalMap = litPairs groupBy (_._1) map {
- case (k, vs) => (k, vs map (_._2))
- }
-
- lazy val cases =
- for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield {
- val newRows = indices map (i => addDefaultVars(i)(rest rows i))
- val r = remake(newRows ++ defaultRows, includeScrut = false)
- val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst)))
-
- CASE(Literal(Constant(tag))) ==> r2.toTree
- }
-
- lazy val defaultTree = remake(defaultRows, includeScrut = false).toTree
- def defaultCase = CASE(WILD(IntClass.tpe)) ==> defaultTree
-
- // cond/success/failure only used if there is exactly one case.
- lazy val cond = scrut.id MEMBER_== cases.head.pat
- lazy val success = cases.head.body
- lazy val failure = defaultTree
-
- // only one case becomes if/else, otherwise match
- def tree() =
- if (cases.size == 1) codegen
- else casted MATCH (cases :+ defaultCase: _*)
- }
-
- /** mixture rule for unapply pattern
- */
- class MixUnapply(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- val Pattern(UnApply(unMethod, unArgs)) = head
- val Apply(unTarget, _ :: trailing) = unMethod
-
- object SameUnapplyCall {
- def isSame(t: Tree) = isEquivalentTree(unTarget, t)
- def unapply(x: Pattern) = /*tracing("SameUnapplyCall (%s vs. %s)".format(unTarget, x))*/(x match {
- case Pattern(UnApply(Apply(fn, _), args)) if isSame(fn) => Some(args)
- case _ => None
- })
- }
- object SameUnapplyPattern {
- def isSame(t: Tree) = isEquivalentTree(unMethod, t)
- def apply(x: Pattern) = unapply(x).isDefined
- def unapply(x: Pattern) = /*tracing("SameUnapplyPattern (%s vs. %s)".format(unMethod, x))*/(x match {
- case Pattern(UnApply(t, _)) if isSame(t) => Some(unArgs)
- case _ => None
- })
- }
-
- private lazy val zipped = pmatch pzip rest.rows
-
- lazy val unapplyResult: PatternVar =
- scrut.createVar(unMethod.tpe, Apply(unTarget, scrut.id :: trailing) setType _.tpe)
-
- lazy val cond: Tree = unapplyResult.tpe.normalize match {
- case TypeRef(_, BooleanClass, _) => unapplyResult.ident
- case TypeRef(_, SomeClass, _) => TRUE
- case _ => NOT(unapplyResult.ident DOT nme.isEmpty)
- }
-
- lazy val failure =
- mkFail(zipped.tail filterNot (x => SameUnapplyPattern(x._1)) map { case (pat, r) => r insert pat })
-
- private def doSuccess: (List[PatternVar], List[PatternVar], List[Row]) = {
- // pattern variable for the unapply result of Some(x).get
- def unMethodTypeArg = unMethod.tpe.baseType(OptionClass).typeArgs match {
- case Nil => log("No type argument for unapply result! " + unMethod.tpe) ; NoType
- case arg :: _ => arg
- }
- lazy val pv = scrut.createVar(unMethodTypeArg, _ => fn(ID(unapplyResult.lhs), nme.get))
- def tuple = pv.lhs
-
- // at this point it's Some[T1,T2...]
- lazy val tpes = getProductArgs(tuple.tpe)
-
- // one pattern variable per tuple element
- lazy val tuplePVs =
- for ((tpe, i) <- tpes.zipWithIndex) yield
- scrut.createVar(tpe, _ => fn(ID(tuple), productProj(tuple, i + 1)))
-
- // the filter prevents infinite unapply recursion
- def mkNewRows(sameFilter: (List[Tree]) => List[Tree]) = {
- val dum = if (unArgs.length <= 1) unArgs.length else tpes.size
- for ((pat, r) <- zipped) yield pat match {
- case SameUnapplyCall(xs) => r.insert2(toPats(sameFilter(xs)) :+ NoPattern, pat.boundVariables, scrut.sym)
- case _ => r insert (emptyPatterns(dum) :+ pat)
- }
- }
-
- // 0 is Boolean, 1 is Option[T], 2+ is Option[(T1,T2,...)]
- unArgs.length match {
- case 0 => (Nil, Nil, mkNewRows((xs) => Nil))
- case 1 => (List(pv), List(pv), mkNewRows(xs => List(xs.head)))
- case _ => (pv :: tuplePVs, tuplePVs, mkNewRows(identity))
- }
- }
-
- lazy val success = {
- val (squeezePVs, pvs, rows) = doSuccess
- val srep = remake(rows, pvs).toTree
-
- squeezedBlock(squeezePVs map (_.valDef), srep)
- }
-
- final def tree() =
- squeezedBlock(List(handleOuter(unapplyResult.valDef)), codegen)
- }
-
- /** Handle Sequence patterns (including Star patterns.)
- * Note: pivot == head, just better typed.
- */
- sealed class MixSequence(val pmatch: PatternMatch, val rest: Rep, pivot: SequencePattern) extends RuleApplication {
- require(scrut.tpe <:< head.tpe)
-
- def hasStar = pivot.hasStar
- private def pivotLen = pivot.nonStarLength
- private def seqDummies = emptyPatterns(pivot.elems.length + 1)
-
- // Should the given pattern join the expanded pivot in the success matrix? If so,
- // this partial function will be defined for the pattern, and the result of the apply
- // is the expanded sequence of new patterns.
- lazy val successMatrixFn = new PartialFunction[Pattern, List[Pattern]] {
- private def seqIsDefinedAt(x: SequenceLikePattern) = (hasStar, x.hasStar) match {
- case (true, true) => true
- case (true, false) => pivotLen <= x.nonStarLength
- case (false, true) => pivotLen >= x.nonStarLength
- case (false, false) => pivotLen == x.nonStarLength
- }
-
- def isDefinedAt(pat: Pattern) = pat match {
- case x: SequenceLikePattern => seqIsDefinedAt(x)
- case WildcardPattern() => true
- case _ => false
- }
-
- def apply(pat: Pattern): List[Pattern] = pat match {
- case x: SequenceLikePattern =>
- def isSameLength = pivotLen == x.nonStarLength
- def rebound = x.nonStarPatterns :+ (x.elemPatterns.last rebindTo WILD(scrut.seqType))
-
- (pivot.hasStar, x.hasStar, isSameLength) match {
- case (true, true, true) => rebound :+ NoPattern
- case (true, true, false) => (seqDummies drop 1) :+ x
- case (true, false, true) => x.elemPatterns ++ List(NilPattern, NoPattern)
- case (false, true, true) => rebound
- case (false, false, true) => x.elemPatterns :+ NoPattern
- case _ => seqDummies
- }
-
- case _ => seqDummies
- }
- }
-
- // Should the given pattern be in the fail matrix? This is true of any sequences
- // as long as the result of the length test on the pivot doesn't make it impossible:
- // for instance if neither sequence is right ignoring and they are of different
- // lengths, the later one cannot match since its length must be wrong.
- def failureMatrixFn(c: Pattern) = (pivot ne c) && (c match {
- case x: SequenceLikePattern =>
- (hasStar, x.hasStar) match {
- case (_, true) => true
- case (true, false) => pivotLen > x.nonStarLength
- case (false, false) => pivotLen != x.nonStarLength
- }
- case WildcardPattern() => true
- case _ => false
- })
-
- // divide the remaining rows into success/failure branches, expanding subsequences of patterns
- val successRows = pmatch pzip rest.rows collect {
- case (c, row) if successMatrixFn isDefinedAt c => row insert successMatrixFn(c)
- }
- val failRows = pmatch pzip rest.rows collect {
- case (c, row) if failureMatrixFn(c) => row insert c
- }
-
- // the discrimination test for sequences is a call to lengthCompare. Note that
- // this logic must be fully consistent wiith successMatrixFn and failureMatrixFn above:
- // any inconsistency will (and frequently has) manifested as pattern matcher crashes.
- lazy val cond = {
- // the method call symbol
- val methodOp: Symbol = head.tpe member nme.lengthCompare
-
- // the comparison to perform. If the pivot is right ignoring, then a scrutinee sequence
- // of >= pivot length could match it; otherwise it must be exactly equal.
- val compareOp: (Tree, Tree) => Tree = if (hasStar) _ INT_>= _ else _ INT_== _
-
- // scrutinee.lengthCompare(pivotLength) [== | >=] 0
- val compareFn: Tree => Tree = (t: Tree) => compareOp((t DOT methodOp)(LIT(pivotLen)), ZERO)
-
- // wrapping in a null check on the scrutinee
- // XXX this needs to use the logic in "def condition"
- nullSafe(compareFn, FALSE)(scrut.id)
- // condition(head.tpe, scrut.id, head.boundVariables.nonEmpty)
- }
- lazy val success = {
- // one pattern var per sequence element up to elemCount, and one more for the rest of the sequence
- lazy val pvs = scrut createSequenceVars pivotLen
-
- squeezedBlock(pvs map (_.valDef), remake(successRows, pvs, hasStar).toTree)
- }
- lazy val failure = remake(failRows).toTree
-
- final def tree(): Tree = codegen
- }
-
- class MixEquals(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- private lazy val rhs =
- decodedEqualsType(head.tpe) match {
- case SingleType(pre, sym) => REF(pre, sym)
- case PseudoType(o) => o
- }
- private lazy val labelDef =
- createLabelDef("fail%", remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree)
-
- lazy val cond = handleOuter(rhs MEMBER_== scrut.id)
- lazy val successOne = rest.rows.head.insert2(List(NoPattern), head.boundVariables, scrut.sym)
- lazy val successTwo = Row(emptyPatterns(1 + rest.tvars.size), NoBinding, EmptyTree, createShortCut(labelDef.symbol))
- lazy val success = remake(List(successOne, successTwo)).toTree
- lazy val failure = labelDef
-
- final def tree() = codegen
- override def toString() = "MixEquals(%s == %s)".format(scrut, head)
- }
-
- /** Mixture rule for type tests.
- * moreSpecific: more specific patterns
- * subsumed: more general patterns (subsuming current), rows index and subpatterns
- * remaining: remaining, rows index and pattern
- */
- class MixTypes(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
- case class Yes(bx: Int, moreSpecific: Pattern, subsumed: List[Pattern])
- case class No(bx: Int, remaining: Pattern)
-
- val (yeses, noes) = {
- val _ys = new ListBuffer[Yes]
- val _ns = new ListBuffer[No]
-
- for ((pattern, j) <- pmatch.pzip()) {
- // scrutinee, head of pattern group
- val (s, p) = (pattern.tpe, head.necessaryType)
-
- def isEquivalent = head.necessaryType =:= pattern.tpe
- def isObjectTest = pattern.isObject && (p =:= pattern.necessaryType)
-
- def sMatchesP = matches(s, p)
- def pMatchesS = matches(p, s)
-
- def ifEquiv(yes: Pattern): Pattern = if (isEquivalent) yes else pattern
-
- def passl(p: Pattern = NoPattern, ps: List[Pattern] = pmatch.dummies) = Some(Yes(j, p, ps))
- def passr() = Some( No(j, pattern))
-
- def typed(pp: Tree) = passl(ifEquiv(Pattern(pp)))
- def subs() = passl(ifEquiv(NoPattern), pattern subpatterns pmatch)
-
- val (oneY, oneN) = pattern match {
- case Pattern(LIT(null)) if !(p =:= s) => (None, passr) // (1)
- case x if isObjectTest => (passl(), None) // (2)
- case Pattern(Typed(pp, _)) if sMatchesP => (typed(pp), None) // (4)
- // The next line used to be this which "fixed" 1697 but introduced
- // numerous regressions including #3136.
- // case Pattern(_: UnApply, _) => (passl(), passr)
- case Pattern(_: UnApply) => (None, passr)
- case x if !x.isDefault && sMatchesP => (subs(), None)
- case x if x.isDefault || pMatchesS => (passl(), passr)
- case _ => (None, passr)
- }
- oneY map (_ys +=)
- oneN map (_ns +=)
- }
- (_ys.toList, _ns.toList)
- }
-
- // val moreSpecific = yeses map (_.moreSpecific)
- val subsumed = yeses map (x => (x.bx, x.subsumed))
- val remaining = noes map (x => (x.bx, x.remaining))
-
- private def mkZipped =
- for (Yes(j, moreSpecific, subsumed) <- yeses) yield
- j -> (moreSpecific :: subsumed)
-
- lazy val casted = scrut castedTo pmatch.headType
- lazy val cond = condition(casted.tpe, scrut, head.boundVariables.nonEmpty)
-
- private def isAnyMoreSpecific = yeses exists (x => !x.moreSpecific.isEmpty)
- lazy val (subtests, subtestVars) =
- if (isAnyMoreSpecific) (mkZipped, List(casted.pv))
- else (subsumed, Nil)
-
- lazy val newRows =
- for ((j, ps) <- subtests) yield
- (rest rows j).insert2(ps, pmatch(j).boundVariables, casted.sym)
-
- lazy val success = {
- val srep = remake(newRows, subtestVars ::: casted.accessorPatternVars, includeScrut = false)
- squeezedBlock(casted.allValDefs, srep.toTree)
- }
-
- lazy val failure =
- mkFail(remaining map { case (p1, p2) => rest rows p1 insert p2 })
-
- final def tree(): Tree = codegen
- }
-
- /*** States, Rows, Etc. ***/
-
- case class Row(pats: List[Pattern], subst: Bindings, guard: Tree, bx: Int) {
- private def nobindings = subst.get().isEmpty
- private def bindstr = if (nobindings) "" else pp(subst)
-
- /** Extracts the 'i'th pattern. */
- def extractColumn(i: Int) = {
- val (x, xs) = extractIndex(pats, i)
- (x, copy(pats = xs))
- }
-
- /** Replaces the 'i'th pattern with the argument. */
- def replaceAt(i: Int, p: Pattern) = {
- val newps = (pats take i) ::: p :: (pats drop (i + 1))
- copy(pats = newps)
- }
-
- def insert(h: Pattern) = copy(pats = h :: pats)
- def insert(hs: List[Pattern]) = copy(pats = hs ::: pats) // prepends supplied pattern
- def rebind(b: Bindings) = copy(subst = b) // substitutes for bindings
-
- def insert2(hs: List[Pattern], vs: Iterable[Symbol], tvar: Symbol) =
- tracing("insert2")(copy(pats = hs ::: pats, subst = subst.add(vs, tvar)))
-
- // returns this rows with alternatives expanded
- def expandAlternatives(classifyPat: (Pattern, Int) => Pattern): List[Row] = {
- def isNotAlternative(p: Pattern) = !cond(p.tree) { case _: Alternative => true }
-
- // classify all the top level patterns - alternatives come back unaltered
- val newPats: List[Pattern] = pats.zipWithIndex map classifyPat.tupled
- // see if any alternatives were in there
- val (ps, others) = newPats span isNotAlternative
- // make a new row for each alternative, with it spliced into the original position
- if (others.isEmpty) List(copy(pats = ps))
- else extractBindings(others.head) map (x => replaceAt(ps.size, x))
- }
- override def toString() = {
- val bs = if (nobindings) "" else "\n" + bindstr
- "Row(%d)(%s%s)".format(bx, pp(pats), bs)
- }
- }
- abstract class State {
- def bx: Int // index into the list of rows
- def params: List[Symbol] // bound names to be supplied as arguments to labeldef
- def body: Tree // body to execute upon match
- def label: Option[LabelDef] // label definition for this state
-
- // Called with a bindings map when a match is achieved.
- // Returns a list of variable declarations based on the labeldef parameters
- // and the given substitution, and the body to execute.
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]): (List[ValDef], Tree)
-
- final def applyBindings(subst: Map[Symbol, Symbol]): (List[ValDef], Tree) = {
- _referenceCount += 1
- applyBindingsImpl(subst)
- }
-
- private var _referenceCount = 0
- def referenceCount = _referenceCount
- def unreached = referenceCount == 0
- def shouldInline(sym: Symbol) = referenceCount == 1 && label.exists(_.symbol == sym)
-
- // Creates a simple Ident if the symbol's type conforms to
- // the val definition's type, or a casted Ident if not.
- private def newValIdent(lhs: Symbol, rhs: Symbol) =
- if (rhs.tpe <:< lhs.tpe) Ident(rhs)
- else gen.mkTypeApply(Ident(rhs), Any_asInstanceOf, List(lhs.tpe))
-
- protected def newValDefinition(lhs: Symbol, rhs: Symbol) =
- typer typedValDef ValDef(lhs, newValIdent(lhs, rhs))
-
- protected def newValReference(lhs: Symbol, rhs: Symbol) =
- typer typed newValIdent(lhs, rhs)
-
- protected def valDefsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValDefinition)
- protected def identsFor(subst: Map[Symbol, Symbol]) = mapSubst(subst)(newValReference)
-
- protected def mapSubst[T](subst: Map[Symbol, Symbol])(f: (Symbol, Symbol) => T): List[T] =
- params flatMap { lhs =>
- subst get lhs map (rhs => f(lhs, rhs)) orElse {
- // This should not happen; the code should be structured so it is
- // impossible, but that still lies ahead.
- cunit.warning(lhs.pos, "No binding")
- None
- }
- }
-
- // typer is not able to digest a body of type Nothing being assigned result type Unit
- protected def caseResultType =
- if (body.tpe.isNothing) body.tpe else matchResultType
- }
-
- case class LiteralState(bx: Int, params: List[Symbol], body: Tree) extends State {
- def label = None
-
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) =
- (valDefsFor(subst), body.duplicate setType caseResultType)
- }
-
- case class FinalState(bx: Int, params: List[Symbol], body: Tree) extends State {
- traceCategory("Final State", "(%s) => %s", paramsString, body)
- def label = Some(labelDef)
-
- private lazy val labelDef = createLabelDef("body%" + bx, body, params, caseResultType)
-
- protected def applyBindingsImpl(subst: Map[Symbol, Symbol]) = {
- val tree =
- if (referenceCount > 1) ID(labelDef.symbol) APPLY identsFor(subst)
- else labelDef
-
- (valDefsFor(subst), tree)
- }
-
- private def paramsString = params map (s => s.name + ": " + s.tpe) mkString ", "
- override def toString() = pp("(%s) => %s".format(pp(params), body))
- }
-
- case class Rep(val tvars: PatternVarGroup, val rows: List[Row]) {
- lazy val Row(pats, subst, guard, index) = rows.head
- lazy val guardedRest = if (guard.isEmpty) Rep(Nil, Nil) else make(tvars, rows.tail)
- lazy val (defaults, others) = pats span (_.isDefault)
-
- /** Cut out the column containing the non-default pattern. */
- class Cut(index: Int) {
- /** The first two separate out the 'i'th pattern in each row from the remainder. */
- private val (_column, _rows) = rows map (_ extractColumn index) unzip
-
- /** Now the 'i'th tvar is separated out and used as a new Scrutinee. */
- private val (_pv, _tvars) = tvars extractIndex index
-
- /** The non-default pattern (others.head) replaces the column head. */
- private val (_ncol, _nrep) =
- (others.head :: _column.tail, make(_tvars, _rows))
-
- def mix() = {
- val newScrut = new Scrutinee(new PatternVar(_pv.sym, EmptyTree, _pv.checked))
- PatternMatch(newScrut, _ncol) mkRule _nrep
- }
- }
-
- /** Converts this to a tree - recursively acquires subreps. */
- final def toTree(): Tree = tracing("toTree")(typer typed applyRule())
-
- /** The VariableRule. */
- private def variable() = {
- val binding = (defaults map (_.boundVariables) zip tvars.pvs) .
- foldLeft(subst)((b, pair) => b.add(pair._1, pair._2.lhs))
-
- VariableRule(binding, guard, guardedRest, index)
- }
- /** The MixtureRule: picks a rewrite rule to apply. */
- private def mixture() = new Cut(defaults.size) mix()
-
- /** Applying the rule will result in one of:
- *
- * VariableRule - if all patterns are default patterns
- * MixtureRule - if one or more patterns are not default patterns
- * Error - no rows remaining
- */
- final def applyRule(): Tree =
- if (rows.isEmpty) failTree
- else if (others.isEmpty) variable.tree()
- else mixture.tree()
-
- def ppn(x: Any) = pp(x, newlines = true)
- override def toString() =
- if (tvars.isEmpty) "Rep(%d) = %s".format(rows.size, ppn(rows))
- else "Rep(%dx%d)%s%s".format(tvars.size, rows.size, ppn(tvars), ppn(rows))
- }
-
- /** Expands the patterns recursively. */
- final def expand(roots: List[PatternVar], cases: List[CaseDef]) = tracing("expand") {
- for ((CaseDef(pat, guard, body), bx) <- cases.zipWithIndex) yield {
- val subtrees = pat match {
- case x if roots.length <= 1 => List(x)
- case Apply(_, args) => args
- case WILD() => emptyTrees(roots.length)
- }
- val params = pat filter (_.isInstanceOf[Bind]) map (_.symbol) distinct
- val row = Row(toPats(subtrees), NoBinding, guard, bx)
- val state = body match {
- case x: Literal => LiteralState(bx, params, body)
- case _ => FinalState(bx, params, body)
- }
-
- row -> state
- }
- }
-
- /** returns the condition in "if (cond) k1 else k2"
- */
- final def condition(tpe: Type, scrut: Scrutinee, isBound: Boolean): Tree = {
- assert(scrut.isDefined)
- val cond = handleOuter(condition(tpe, scrut.id, isBound))
-
- if (!needsOuterTest(tpe, scrut.tpe, owner)) cond
- else addOuterCondition(cond, tpe, scrut.id)
- }
-
- final def condition(tpe: Type, scrutTree: Tree, isBound: Boolean): Tree = {
- assert((tpe ne NoType) && (scrutTree.tpe ne NoType))
- def isMatchUnlessNull = scrutTree.tpe <:< tpe && tpe.isAnyRef
- def isRef = scrutTree.tpe.isAnyRef
-
- // See ticket #1503 for the motivation behind checking for a binding.
- // The upshot is that it is unsound to assume equality means the right
- // type, but if the value doesn't appear on the right hand side of the
- // match that's unimportant; so we add an instance check only if there
- // is a binding.
- def bindingWarning() = {
- if (isBound && settings.Xmigration28.value) {
- cunit.warning(scrutTree.pos,
- "A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
- }
- }
-
- def genEquals(sym: Symbol): Tree = {
- val t1: Tree = REF(sym) MEMBER_== scrutTree
-
- if (isBound) {
- bindingWarning()
- t1 AND (scrutTree IS tpe.widen)
- }
- else t1
- }
-
- typer typed {
- tpe match {
- case ConstantType(Constant(null)) if isRef => scrutTree OBJ_EQ NULL
- case ConstantType(const) => scrutTree MEMBER_== Literal(const)
- case SingleType(NoPrefix, sym) => genEquals(sym)
- case SingleType(pre, sym) if sym.isStable => genEquals(sym)
- case ThisType(sym) if sym.isModule => genEquals(sym)
- case _ if isMatchUnlessNull => scrutTree OBJ_NE NULL
- case _ => scrutTree IS tpe
- }
- }
- }
-
- /** adds a test comparing the dynamic outer to the static outer */
- final def addOuterCondition(cond: Tree, tpe2test: Type, scrut: Tree) = {
- val TypeRef(prefix, _, _) = tpe2test
- val theRef = handleOuter(prefix match {
- case NoPrefix => abort("assertion failed: NoPrefix")
- case ThisType(clazz) => THIS(clazz)
- case pre => REF(pre.prefix, pre.termSymbol)
- })
- outerAccessor(tpe2test.typeSymbol) match {
- case NoSymbol => ifDebug(cunit.warning(scrut.pos, "no outer acc for " + tpe2test.typeSymbol)) ; cond
- case outerAcc =>
- val casted = gen.mkAsInstanceOf(scrut, tpe2test, any = true, wrapInApply = true)
- cond AND ((casted DOT outerAcc)() OBJ_EQ theRef)
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala b/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
deleted file mode 100644
index c6fa6f6ba0..0000000000
--- a/src/compiler/scala/tools/nsc/matching/PatternBindings.scala
+++ /dev/null
@@ -1,126 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import transform.ExplicitOuter
-import scala.language.postfixOps
-
-trait PatternBindings extends ast.TreeDSL
-{
- self: ExplicitOuter with ParallelMatching =>
-
- import global.{ typer => _, _ }
- import definitions.{ EqualsPatternClass }
- import CODE._
-
- /** EqualsPattern **/
- def isEquals(tpe: Type) = tpe.typeSymbol == EqualsPatternClass
- def mkEqualsRef(tpe: Type) = typeRef(NoPrefix, EqualsPatternClass, List(tpe))
- def decodedEqualsType(tpe: Type) =
- if (tpe.typeSymbol == EqualsPatternClass) tpe.typeArgs.head else tpe
-
- // A subtype test which creates fresh existentials for type
- // parameters on the right hand side.
- def matches(arg1: Type, arg2: Type) = decodedEqualsType(arg1) matchesPattern decodedEqualsType(arg2)
-
- // For spotting duplicate unapplies
- def isEquivalentTree(t1: Tree, t2: Tree) = (t1.symbol == t2.symbol) && (t1 equalsStructure t2)
-
- // Reproduce the Bind trees wrapping oldTree around newTree
- def moveBindings(oldTree: Tree, newTree: Tree): Tree = oldTree match {
- case b @ Bind(x, body) => Bind(b.symbol, moveBindings(body, newTree))
- case _ => newTree
- }
-
- // used as argument to `EqualsPatternClass`
- case class PseudoType(o: Tree) extends SimpleTypeProxy {
- override def underlying: Type = o.tpe
- override def safeToString: String = "PseudoType("+o+")"
- }
-
- // If the given pattern contains alternatives, return it as a list of patterns.
- // Makes typed copies of any bindings found so all alternatives point to final state.
- def extractBindings(p: Pattern): List[Pattern] =
- toPats(_extractBindings(p.boundTree, identity))
-
- private def _extractBindings(p: Tree, prevBindings: Tree => Tree): List[Tree] = {
- def newPrev(b: Bind) = (x: Tree) => treeCopy.Bind(b, b.name, x) setType x.tpe
-
- p match {
- case b @ Bind(_, body) => _extractBindings(body, newPrev(b))
- case Alternative(ps) => ps map prevBindings
- }
- }
-
- trait PatternBindingLogic {
- self: Pattern =>
-
- // The outermost Bind(x1, Bind(x2, ...)) surrounding the tree.
- private var _boundTree: Tree = tree
- def boundTree = _boundTree
- def setBound(x: Bind): Pattern = {
- _boundTree = x
- this
- }
- def boundVariables = strip(boundTree)
-
- // If a tree has bindings, boundTree looks something like
- // Bind(v3, Bind(v2, Bind(v1, tree)))
- // This takes the given tree and creates a new pattern
- // using the same bindings.
- def rebindTo(t: Tree): Pattern = Pattern(moveBindings(boundTree, t))
-
- // Wrap this pattern's bindings around (_: Type)
- def rebindToType(tpe: Type, ascription: Type = null): Pattern = {
- val aType = if (ascription == null) tpe else ascription
- rebindTo(Typed(WILD(tpe), TypeTree(aType)) setType tpe)
- }
-
- // Wrap them around _
- def rebindToEmpty(tpe: Type): Pattern =
- rebindTo(Typed(EmptyTree, TypeTree(tpe)) setType tpe)
-
- // Wrap them around a singleton type for an EqualsPattern check.
- def rebindToEqualsCheck(): Pattern =
- rebindToType(equalsCheck)
-
- // Like rebindToEqualsCheck, but subtly different. Not trying to be
- // mysterious -- I haven't sorted it all out yet.
- def rebindToObjectCheck(): Pattern =
- rebindToType(mkEqualsRef(sufficientType), sufficientType)
-
- /** Helpers **/
- private def wrapBindings(vs: List[Symbol], pat: Tree): Tree = vs match {
- case Nil => pat
- case x :: xs => Bind(x, wrapBindings(xs, pat)) setType pat.tpe
- }
- private def strip(t: Tree): List[Symbol] = t match {
- case b @ Bind(_, pat) => b.symbol :: strip(pat)
- case _ => Nil
- }
- }
-
- case class Binding(pvar: Symbol, tvar: Symbol) {
- override def toString() = pvar.name + " -> " + tvar.name
- }
-
- class Bindings(private val vlist: List[Binding]) {
- def get() = vlist
- def toMap = vlist map (x => (x.pvar, x.tvar)) toMap
-
- def add(vs: Iterable[Symbol], tvar: Symbol): Bindings = {
- val newBindings = vs.toList map (v => Binding(v, tvar))
- new Bindings(newBindings ++ vlist)
- }
-
- override def toString() =
- if (vlist.isEmpty) "<none>"
- else vlist.mkString(", ")
- }
-
- val NoBinding: Bindings = new Bindings(Nil)
-}
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
deleted file mode 100644
index df536da108..0000000000
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ /dev/null
@@ -1,457 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * Author: Paul Phillips
- */
-
-package scala.tools.nsc
-package matching
-
-import PartialFunction._
-
-/** Patterns are wrappers for Trees with enhanced semantics.
- *
- * @author Paul Phillips
- */
-
-trait Patterns extends ast.TreeDSL {
- self: transform.ExplicitOuter =>
-
- import global.{ typer => _, _ }
- import definitions._
- import CODE._
- import Debug._
- import treeInfo.{ unbind, isStar, isVarPattern }
-
- type PatternMatch = MatchMatrix#PatternMatch
- private type PatternVar = MatrixContext#PatternVar
-
- // Fresh patterns
- def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern)
- def emptyTrees(i: Int): List[Tree] = List.fill(i)(EmptyTree)
-
- // An empty pattern
- def NoPattern = WildcardPattern()
-
- // The Nil pattern
- def NilPattern = Pattern(gen.mkNil)
-
- // 8.1.1
- case class VariablePattern(tree: Ident) extends NamePattern {
- lazy val Ident(name) = tree
- require(isVarPattern(tree) && name != nme.WILDCARD)
- override def covers(sym: Symbol) = true
- override def description = "%s".format(name)
- }
-
- // 8.1.1 (b)
- case class WildcardPattern() extends Pattern {
- def tree = EmptyTree
- override def covers(sym: Symbol) = true
- override def isDefault = true
- override def description = "_"
- }
-
- // 8.1.2
- case class TypedPattern(tree: Typed) extends Pattern {
- lazy val Typed(expr, tpt) = tree
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, tpt.tpe)
- override def sufficientType = tpt.tpe
- override def simplify(pv: PatternVar) = Pattern(expr) match {
- case ExtractorPattern(ua) if pv.sym.tpe <:< tpt.tpe => this rebindTo expr
- case _ => this
- }
- override def description = "%s: %s".format(Pattern(expr), tpt)
- }
-
- // 8.1.3
- case class LiteralPattern(tree: Literal) extends Pattern {
- lazy val Literal(const @ Constant(value)) = tree
-
- def isSwitchable = cond(const.tag) { case ByteTag | ShortTag | IntTag | CharTag => true }
- def intValue = const.intValue
- override def description = {
- val s = if (value == null) "null" else value.toString
- "Lit(%s)".format(s)
- }
- }
-
- // 8.1.4 (a)
- case class ApplyIdentPattern(tree: Apply) extends ApplyPattern with NamePattern {
- // XXX - see bug 3411 for code which violates this assumption
- // require (!isVarPattern(fn) && args.isEmpty)
- lazy val ident @ Ident(name) = fn
-
- override def sufficientType = Pattern(ident).equalsCheck
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = "Id(%s)".format(name)
- }
- // 8.1.4 (b)
- case class ApplySelectPattern(tree: Apply) extends ApplyPattern with SelectPattern {
- require (args.isEmpty)
- lazy val Apply(select: Select, _) = tree
-
- override lazy val sufficientType = qualifier.tpe match {
- case t: ThisType => singleType(t, sym) // this.X
- case _ =>
- qualifier match {
- case _: Apply => PseudoType(tree)
- case _ => singleType(Pattern(qualifier).necessaryType, sym)
- }
- }
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = backticked match {
- case Some(s) => "this." + s
- case _ => "Sel(%s.%s)".format(Pattern(qualifier), name)
- }
-
- }
- // 8.1.4 (c)
- case class StableIdPattern(tree: Select) extends SelectPattern {
- def select = tree
- override def description = "St(%s)".format(printableSegments.mkString(" . "))
- private def printableSegments =
- pathSegments filter (x => !x.isEmpty && (x.toString != "$iw"))
- }
- // 8.1.4 (d)
- case class ObjectPattern(tree: Apply) extends ApplyPattern { // NamePattern?
- require(!fn.isType && isModule)
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, sufficientType)
- override def sufficientType = tpe.narrow
- override def simplify(pv: PatternVar) = this.rebindToObjectCheck()
- override def description = "Obj(%s)".format(fn)
- }
- // 8.1.4 (e)
- case class SimpleIdPattern(tree: Ident) extends NamePattern {
- val Ident(name) = tree
- override def covers(sym: Symbol) = newMatchesPattern(sym, tpe.narrow)
- override def description = "Id(%s)".format(name)
- }
-
- // 8.1.5
- case class ConstructorPattern(tree: Apply) extends ApplyPattern with NamePattern {
- require(fn.isType && this.isCaseClass, "tree: " + tree + " fn: " + fn)
- def name = tpe.typeSymbol.name
- def cleanName = tpe.typeSymbol.decodedName
-
- private def isColonColon = cleanName == "::"
-
- override def subpatterns(pm: MatchMatrix#PatternMatch) =
- if (pm.head.isCaseClass) toPats(args)
- else super.subpatterns(pm)
-
- override def simplify(pv: PatternVar) =
- if (args.isEmpty) this rebindToEmpty tree.tpe
- else this
-
- override def covers(sym: Symbol) = {
- debugging("[constructor] Does " + this + " cover " + sym + " ? ") {
- sym.tpe.typeSymbol == this.tpe.typeSymbol
- }
- }
- override def description = {
- if (isColonColon) "%s :: %s".format(Pattern(args(0)), Pattern(args(1)))
- else "%s(%s)".format(name, toPats(args).mkString(", "))
- }
- }
- // 8.1.6
- case class TuplePattern(tree: Apply) extends ApplyPattern {
- override def description = "((%s))".format(args.size, toPats(args).mkString(", "))
- }
-
- // 8.1.7 / 8.1.8 (unapply and unapplySeq calls)
- case class ExtractorPattern(tree: UnApply) extends UnapplyPattern {
- private def uaTyped = Typed(tree, TypeTree(arg.tpe)) setType arg.tpe
-
- override def simplify(pv: PatternVar) = {
- if (pv.tpe <:< arg.tpe) this
- else this rebindTo uaTyped
- }
- override def description = "Unapply(%s => %s)".format(necessaryType, resTypesString)
- }
-
- // Special List handling. It was like that when I got here.
- case class ListExtractorPattern(tree: UnApply, tpt: Tree, elems: List[Tree]) extends UnapplyPattern with SequenceLikePattern {
- // As yet I can't testify this is doing any good relative to using
- // tpt.tpe, but it doesn't seem to hurt either.
- private lazy val packedType = global.typer.computeType(tpt, tpt.tpe)
- private lazy val consRef = appliedType(ConsClass, packedType)
- private lazy val listRef = appliedType(ListClass, packedType)
-
- // Fold a list into a well-typed x :: y :: etc :: tree.
- private def listFolder(hd: Tree, tl: Tree): Tree = unbind(hd) match {
- case t @ Star(_) => moveBindings(hd, WILD(t.tpe))
- case _ =>
- val dummyMethod = NoSymbol.newTermSymbol(newTermName("matching$dummy"))
- val consType = MethodType(dummyMethod newSyntheticValueParams List(packedType, listRef), consRef)
-
- Apply(TypeTree(consType), List(hd, tl)) setType consRef
- }
- private def foldedPatterns = elems.foldRight(gen.mkNil)((x, y) => listFolder(x, y))
- override def necessaryType = if (nonStarPatterns.nonEmpty) consRef else listRef
-
- override def simplify(pv: PatternVar) = {
- if (pv.tpe <:< necessaryType)
- Pattern(foldedPatterns)
- else
- this rebindTo (Typed(tree, TypeTree(necessaryType)) setType necessaryType)
- }
- override def description = "List(%s => %s)".format(packedType, resTypesString)
- }
-
- trait SequenceLikePattern extends Pattern {
- def elems: List[Tree]
- override def hasStar = elems.nonEmpty && isStar(elems.last)
-
- def elemPatterns = toPats(elems)
- def nonStarElems = if (hasStar) elems.init else elems
- def nonStarPatterns = toPats(nonStarElems)
- def nonStarLength = nonStarElems.length
- }
-
- // 8.1.8 (b) (literal ArrayValues)
- case class SequencePattern(tree: ArrayValue) extends Pattern with SequenceLikePattern {
- lazy val ArrayValue(_, elems) = tree
-
- override def description = "Seq(%s)".format(elemPatterns mkString ", ")
- }
-
- // 8.1.8 (c)
- case class StarPattern(tree: Star) extends Pattern {
- override def description = "_*"
- }
- // XXX temporary?
- case class ThisPattern(tree: This) extends NamePattern {
- lazy val This(name) = tree
- override def description = "this"
- }
-
- // 8.1.9
- // InfixPattern ... subsumed by Constructor/Extractor Patterns
-
- // 8.1.10
- case class AlternativePattern(tree: Alternative) extends Pattern {
- private lazy val Alternative(subtrees) = tree
- private def alts = toPats(subtrees)
- override def description = "Alt(%s)".format(alts mkString " | ")
- }
-
- // 8.1.11
- // XMLPattern ... for now, subsumed by SequencePattern, but if we want
- // to make it work right, it probably needs special handling.
-
- private def abortUnknownTree(tree: Tree) =
- abort("Unknown Tree reached pattern matcher: %s/%s".format(tree, tree.getClass))
-
- object Pattern {
- // a small tree -> pattern cache
- private val cache = perRunCaches.newMap[Tree, Pattern]()
-
- def apply(tree: Tree): Pattern = {
- if (cache contains tree)
- return cache(tree)
-
- val p = tree match {
- case x: Bind => apply(unbind(tree)) setBound x
- case EmptyTree => WildcardPattern()
- case Ident(nme.WILDCARD) => WildcardPattern()
- case x @ Alternative(ps) => AlternativePattern(x)
- case x: Apply => ApplyPattern(x)
- case x: Typed => TypedPattern(x)
- case x: Literal => LiteralPattern(x)
- case x: UnApply => UnapplyPattern(x)
- case x: Ident => if (isVarPattern(x)) VariablePattern(x) else SimpleIdPattern(x)
- case x: ArrayValue => SequencePattern(x)
- case x: Select => StableIdPattern(x)
- case x: Star => StarPattern(x)
- case x: This => ThisPattern(x) // XXX ?
- case _ => abortUnknownTree(tree)
- }
- cache(tree) = p
-
- // limiting the trace output
- p match {
- case WildcardPattern() => p
- case _: LiteralPattern => p
- case _ => tracing("Pattern")(p)
- }
- }
- // matching on Pattern(...) always skips the bindings.
- def unapply(other: Any): Option[Tree] = other match {
- case x: Tree => unapply(Pattern(x))
- case x: Pattern => Some(x.tree)
- case _ => None
- }
- }
-
- object UnapplyPattern {
- private object UnapplySeq {
- def unapply(x: UnApply) = x match {
- case UnApply(
- Apply(TypeApply(Select(qual, nme.unapplySeq), List(tpt)), _),
- List(ArrayValue(_, elems))) =>
- Some((qual.symbol, tpt, elems))
- case _ =>
- None
- }
- }
-
- def apply(x: UnApply): Pattern = x match {
- case UnapplySeq(ListModule, tpt, elems) =>
- ListExtractorPattern(x, tpt, elems)
- case _ =>
- ExtractorPattern(x)
- }
- }
-
- // right now a tree like x @ Apply(fn, Nil) where !fn.isType
- // is handled by creating a singleton type:
- //
- // val stype = Types.singleType(x.tpe.prefix, x.symbol)
- //
- // and then passing that as a type argument to EqualsPatternClass:
- //
- // val tpe = typeRef(NoPrefix, EqualsPatternClass, List(stype))
- //
- // then creating a Typed pattern and rebinding.
- //
- // val newpat = Typed(EmptyTree, TypeTree(tpe)) setType tpe)
- //
- // This is also how Select(qual, name) is handled.
- object ApplyPattern {
- def apply(x: Apply): Pattern = {
- val Apply(fn, args) = x
- def isModule = x.symbol.isModule || x.tpe.termSymbol.isModule
-
- if (fn.isType) {
- if (isTupleType(fn.tpe)) TuplePattern(x)
- else ConstructorPattern(x)
- }
- else if (args.isEmpty) {
- if (isModule) ObjectPattern(x)
- else fn match {
- case _: Ident => ApplyIdentPattern(x)
- case _: Select => ApplySelectPattern(x)
- }
- }
- else abortUnknownTree(x)
- }
- }
-
- /** Some intermediate pattern classes with shared structure **/
-
- sealed trait SelectPattern extends NamePattern {
- def select: Select
- lazy val Select(qualifier, name) = select
- def pathSegments = getPathSegments(tree)
- def backticked: Option[String] = qualifier match {
- case _: This if nme.isVariableName(name) => Some("`%s`".format(name))
- case _ => None
- }
- override def covers(sym: Symbol) = newMatchesPattern(sym, tree.tpe)
- protected def getPathSegments(t: Tree): List[Name] = t match {
- case Select(q, name) => name :: getPathSegments(q)
- case Apply(f, Nil) => getPathSegments(f)
- case _ => Nil
- }
- }
-
- sealed trait NamePattern extends Pattern {
- def name: Name
- override def sufficientType = tpe.narrow
- override def simplify(pv: PatternVar) = this.rebindToEqualsCheck()
- override def description = name.toString
- }
-
- sealed trait UnapplyPattern extends Pattern {
- lazy val UnApply(unfn, args) = tree
- lazy val Apply(fn, _) = unfn
- lazy val MethodType(List(arg, _*), _) = fn.tpe
-
- // Covers if the symbol matches the unapply method's argument type,
- // and the return type of the unapply is Some.
- override def covers(sym: Symbol) = newMatchesPattern(sym, arg.tpe)
- override def necessaryType = arg.tpe
-
- def resTypes = analyzer.unapplyTypeList(unfn.symbol, unfn.tpe, args.length)
- def resTypesString = resTypes match {
- case Nil => "Boolean"
- case xs => xs.mkString(", ")
- }
- }
-
- sealed trait ApplyPattern extends Pattern {
- lazy val Apply(fn, args) = tree
-
- override def covers(sym: Symbol) = newMatchesPattern(sym, fn.tpe)
- }
-
- sealed abstract class Pattern extends PatternBindingLogic {
- def tree: Tree
-
- // returns either a simplification of this pattern or identity.
- def simplify(pv: PatternVar): Pattern = this
-
- // Is this a default pattern (untyped "_" or an EmptyTree inserted by the matcher)
- def isDefault = false
-
- // what type must a scrutinee have to have any chance of matching this pattern?
- def necessaryType = tpe
-
- // what type could a scrutinee have which would automatically indicate a match?
- // (nullness and guards will still be checked.)
- def sufficientType = tpe
-
- // the subpatterns for this pattern (at the moment, that means constructor arguments)
- def subpatterns(pm: MatchMatrix#PatternMatch): List[Pattern] = pm.dummies
-
- // if this pattern should be considered to cover the given symbol
- def covers(sym: Symbol): Boolean = newMatchesPattern(sym, sufficientType)
- def newMatchesPattern(sym: Symbol, pattp: Type) = {
- debugging("[" + kindString + "] Does " + pattp + " cover " + sym + " ? ") {
- (sym.isModuleClass && (sym.tpe.typeSymbol eq pattp.typeSymbol)) ||
- (sym.tpe.baseTypeSeq exists (_ matchesPattern pattp))
- }
- }
-
- def sym = tree.symbol
- def tpe = tree.tpe
- def isEmpty = tree.isEmpty
-
- def isModule = sym.isModule || tpe.termSymbol.isModule
- def isCaseClass = tpe.typeSymbol.isCase
- def isObject = (sym != null) && (sym != NoSymbol) && tpe.prefix.isStable // XXX not entire logic
- def hasStar = false
-
- def equalsCheck =
- tracing("equalsCheck")(
- if (sym.isValue) singleType(NoPrefix, sym)
- else tpe.narrow
- )
-
- /** Standard methods **/
- override def equals(other: Any) = other match {
- case x: Pattern => this.boundTree == x.boundTree
- case _ => super.equals(other)
- }
- override def hashCode() = boundTree.hashCode()
- def description = super.toString
-
- final override def toString = description
-
- def kindString = ""
- }
-
- /*** Extractors ***/
-
- object UnapplyParamType {
- def unapply(x: Tree): Option[Type] = condOpt(unbind(x)) {
- case UnApply(Apply(fn, _), _) => fn.tpe match {
- case m: MethodType => m.paramTypes.head
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 8f964cf9e1..9c8ffc5ae3 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -104,7 +104,6 @@ trait ScalaSettings extends AbsScalaSettings
val showPhases = BooleanSetting ("-Xshow-phases", "Print a synopsis of compiler phases.")
val sourceReader = StringSetting ("-Xsource-reader", "classname", "Specify a custom method for reading source files.", "")
- val XoldPatmat = BooleanSetting ("-Xoldpatmat", "Use the pre-2.10 pattern matcher. Otherwise, the 'virtualizing' pattern matcher is used in 2.10.")
val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.")
val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.")
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 01c22245cb..9696692146 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -9,7 +9,6 @@ package transform
import symtab._
import Flags.{ CASE => _, _ }
import scala.collection.mutable.ListBuffer
-import matching.{ Patterns, ParallelMatching }
/** This class ...
*
@@ -17,15 +16,12 @@ import matching.{ Patterns, ParallelMatching }
* @version 1.0
*/
abstract class ExplicitOuter extends InfoTransform
- with Patterns
- with ParallelMatching
with TypingTransformers
with ast.TreeDSL
{
import global._
import definitions._
import CODE._
- import Debug.TRACE
/** The following flags may be set by this phase: */
override def phaseNewFlags: Long = notPROTECTED
@@ -76,9 +72,7 @@ abstract class ExplicitOuter extends InfoTransform
class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer {
override def transform(tree: Tree) = tree match {
- case Bind(_, body) if toRemove(tree.symbol) =>
- TRACE("Dropping unused binding: " + tree.symbol)
- super.transform(body)
+ case Bind(_, body) if toRemove(tree.symbol) => super.transform(body)
case _ => super.transform(tree)
}
}
@@ -363,74 +357,6 @@ abstract class ExplicitOuter extends InfoTransform
}
}
- // requires settings.XoldPatmat.value
- def matchTranslation(tree: Match) = {
- val Match(selector, cases) = tree
- var nselector = transform(selector)
-
- def makeGuardDef(vs: List[Symbol], guard: Tree) = {
- val gdname = unit.freshTermName("gd")
- val method = currentOwner.newMethod(gdname, tree.pos, SYNTHETIC)
- val params = method newSyntheticValueParams vs.map(_.tpe)
- method setInfo new MethodType(params, BooleanClass.tpe)
-
- localTyper typed {
- DEF(method) === guard.changeOwner(currentOwner -> method).substituteSymbols(vs, params)
- }
- }
-
- val nguard = new ListBuffer[Tree]
- val ncases =
- for (CaseDef(pat, guard, body) <- cases) yield {
- // Strip out any unused pattern bindings up front
- val patternIdents = for (b @ Bind(_, _) <- pat) yield b.symbol
- val references: Set[Symbol] = Set(guard, body) flatMap { t => for (id @ Ident(name) <- t) yield id.symbol }
- val (used, unused) = patternIdents partition references
- val strippedPat = if (unused.isEmpty) pat else new RemoveBindingsTransformer(unused.toSet) transform pat
-
- val gdcall =
- if (guard == EmptyTree) EmptyTree
- else {
- val guardDef = makeGuardDef(used, guard)
- nguard += transform(guardDef) // building up list of guards
-
- localTyper typed (Ident(guardDef.symbol) APPLY (used map Ident))
- }
-
- (CASE(transform(strippedPat)) IF gdcall) ==> transform(body)
- }
-
- val (checkExhaustive, requireSwitch) = nselector match {
- case Typed(nselector1, tpt) =>
- val unchecked = tpt.tpe hasAnnotation UncheckedClass
- if (unchecked)
- nselector = nselector1
-
- // Don't require a tableswitch if there are 1-2 casedefs
- // since the matcher intentionally emits an if-then-else.
- (!unchecked, treeInfo.isSwitchAnnotation(tpt.tpe) && ncases.size > 2)
- case _ =>
- (true, false)
- }
-
- val t = atPos(tree.pos) {
- val context = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe)
- val t_untyped = handlePattern(nselector, ncases, checkExhaustive, context)
-
- /* if @switch annotation is present, verify the resulting tree is a Match */
- if (requireSwitch) t_untyped match {
- case Block(_, Match(_, _)) => // ok
- case _ =>
- unit.error(tree.pos, "could not emit switch for @switch annotated match")
- }
-
- localTyper.typed(t_untyped, context.matchResultType)
- }
-
- if (nguard.isEmpty) t
- else Block(nguard.toList, t) setType t.tpe
- }
-
/** The main transformation method */
override def transform(tree: Tree): Tree = {
val sym = tree.symbol
@@ -512,14 +438,10 @@ abstract class ExplicitOuter extends InfoTransform
})
super.transform(treeCopy.Apply(tree, sel, outerVal :: args))
- // entry point for pattern matcher translation
- case m: Match if settings.XoldPatmat.value => // the new pattern matcher runs in its own phase right after typer
- matchTranslation(m)
-
// for the new pattern matcher
// base.<outer>.eq(o) --> base.$outer().eq(o) if there's an accessor, else the whole tree becomes TRUE
// TODO remove the synthetic `<outer>` method from outerFor??
- case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) if !settings.XoldPatmat.value =>
+ case Apply(eqsel@Select(eqapp@Apply(sel@Select(base, nme.OUTER_SYNTH), Nil), eq), args) =>
val outerFor = sel.symbol.owner.toInterface // TODO: toInterface necessary?
val acc = outerAccessor(outerFor)
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index b94ae99263..6e89f6387e 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -211,16 +211,11 @@ abstract class UnCurry extends InfoTransform
* }
* new $anon()
*
- * If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
- *
*/
def transformFunction(fun: Function): Tree =
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
- case _ if fun.tpe.typeSymbol == PartialFunctionClass =>
- // only get here when running under -Xoldpatmat
- synthPartialFunction(fun)
case _ =>
val parents = (
if (isFunctionType(fun.tpe)) addSerializable(abstractFunctionForFunctionType(fun.tpe))
@@ -259,131 +254,6 @@ abstract class UnCurry extends InfoTransform
}
- /** Transform a function node (x => body) of type PartialFunction[T, R] where
- * body = expr match { case P_i if G_i => E_i }_i=1..n
- * to (assuming none of the cases is a default case):
- *
- * class $anon() extends AbstractPartialFunction[T, R] with Serializable {
- * def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = (expr: @unchecked) match {
- * case P_1 if G_1 => E_1
- * ...
- * case P_n if G_n => E_n
- * case _ => default(expr)
- * }
- * def isDefinedAt(x: T): boolean = (x: @unchecked) match {
- * case P_1 if G_1 => true
- * ...
- * case P_n if G_n => true
- * case _ => false
- * }
- * }
- * new $anon()
- *
- * If there's a default case, the original match is used for applyOrElse, and isDefinedAt returns `true`
- */
- def synthPartialFunction(fun: Function) = {
- if (!settings.XoldPatmat.value)
- devWarning("Under the new pattern matching scheme, PartialFunction should have been synthesized during typers.")
-
- val targs = fun.tpe.typeArgs
- val (formals, restpe) = (targs.init, targs.last)
-
- val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
- val parents = addSerializable(appliedType(AbstractPartialFunctionClass, targs: _*))
- anonClass setInfo ClassInfoType(parents, newScope, anonClass)
-
- // duplicate before applyOrElseMethodDef is run so that it does not mess up our trees and label symbols (we have a fresh set)
- // otherwise `TreeSymSubstituter(fun.vparams map (_.symbol), params)` won't work as the subst has been run already
- val bodyForIDA = {
- val duped = fun.body.duplicate
- val oldParams = new mutable.ListBuffer[Symbol]()
- val newParams = new mutable.ListBuffer[Symbol]()
-
- val oldSyms0 =
- duped filter {
- case l@LabelDef(_, params, _) =>
- params foreach {p =>
- val oldSym = p.symbol
- p.symbol = oldSym.cloneSymbol
- oldParams += oldSym
- newParams += p.symbol
- }
- true
- case _ => false
- } map (_.symbol)
- val oldSyms = oldParams.toList ++ oldSyms0
- val newSyms = newParams.toList ++ (oldSyms0 map (_.cloneSymbol))
- // println("duping "+ oldSyms +" --> "+ (newSyms map (_.ownerChain)))
-
- val substLabels = new TreeSymSubstituter(oldSyms, newSyms)
-
- substLabels(duped)
- }
-
- // def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 =
- val applyOrElseMethodDef = {
- val methSym = anonClass.newMethod(nme.applyOrElse, fun.pos, newFlags = FINAL | OVERRIDE | SYNTHETIC)
-
- val List(argtpe) = formals
- val A1 = methSym newTypeParameter(newTypeName("A1")) setInfo TypeBounds.upper(argtpe)
- val B1 = methSym newTypeParameter(newTypeName("B1")) setInfo TypeBounds.lower(restpe)
- val methFormals = List(A1.tpe, functionType(List(A1.tpe), B1.tpe))
- val params@List(x, default) = methSym newSyntheticValueParams methFormals
- methSym setInfoAndEnter polyType(List(A1, B1), MethodType(params, B1.tpe))
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), List(x))
- val body = localTyper.typedPos(fun.pos) { import CODE._
- def defaultAction(scrut: Tree) = REF(default) APPLY (REF(x))
-
- substParam(fun.body) match {
- case orig@Match(selector, cases) =>
- if (cases exists treeInfo.isDefaultCase) orig
- else {
- val defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, defaultAction(selector.duplicate))
- Match(/*gen.mkUnchecked*/(selector), cases :+ defaultCase)
- }
-
- }
- }
- body.changeOwner(fun.symbol -> methSym)
-
- val methDef = DefDef(methSym, body)
-
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
-
- val isDefinedAtMethodDef = {
- val methSym = anonClass.newMethod(nme.isDefinedAt, fun.pos, FINAL | SYNTHETIC)
- val params = methSym newSyntheticValueParams formals
- methSym setInfoAndEnter MethodType(params, BooleanClass.tpe)
-
- val substParam = new TreeSymSubstituter(fun.vparams map (_.symbol), params)
- def doSubst(x: Tree) = substParam(resetLocalAttrsKeepLabels(x)) // see pos/t1761 for why `resetLocalAttrs`, but must keep label symbols around
-
- val body = bodyForIDA match {
- case Match(selector, cases) =>
- if (cases exists treeInfo.isDefaultCase) TRUE
- else
- doSubst(Match(/*gen.mkUnchecked*/(selector),
- (cases map (c => deriveCaseDef(c)(x => TRUE))) :+ (
- DEFAULT ==> FALSE)))
-
- }
- body.changeOwner(fun.symbol -> methSym)
-
- DefDef(methSym, body)
- }
-
- localTyper.typedPos(fun.pos) {
- Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
- Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
- }
- }
-
def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = {
val isJava = fun.isJavaDefined
def transformVarargs(varargsElemType: Type) = {
@@ -674,35 +544,6 @@ abstract class UnCurry extends InfoTransform
def isDefaultCatch(cdef: CaseDef) = isThrowable(cdef.pat) && cdef.guard.isEmpty
- def postTransformTry(tree: Try) = {
- val body = tree.block
- val catches = tree.catches
- val finalizer = tree.finalizer
- if (!settings.XoldPatmat.value) {
- if (catches exists (cd => !treeInfo.isCatchCase(cd)))
- devWarning("VPM BUG - illegal try/catch " + catches)
- tree
- } else if (catches forall treeInfo.isCatchCase) {
- tree
- } else {
- val exname = unit.freshTermName("ex$")
- val cases =
- if ((catches exists treeInfo.isDefaultCase) || isDefaultCatch(catches.last)) catches
- else catches :+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Throw(Ident(exname)))
- val catchall =
- atPos(tree.pos) {
- CaseDef(
- Bind(exname, Ident(nme.WILDCARD)),
- EmptyTree,
- Match(Ident(exname), cases))
- }
- debuglog("rewrote try: " + catches + " ==> " + catchall);
- val catches1 = localTyper.typedCases(
- List(catchall), ThrowableClass.tpe, WildcardType)
- treeCopy.Try(tree, body, catches1, finalizer)
- }
- }
-
tree match {
/* Some uncurry post transformations add members to templates.
*
@@ -734,7 +575,9 @@ abstract class UnCurry extends InfoTransform
addJavaVarargsForwarders(dd, flatdd)
case tree: Try =>
- postTransformTry(tree)
+ if (tree.catches exists (cd => !treeInfo.isCatchCase(cd)))
+ devWarning("VPM BUG - illegal try/catch " + tree.catches)
+ tree
case Apply(Apply(fn, args), args1) =>
treeCopy.Apply(tree, fn, args ::: args1)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index e8c48184b0..e24f0bca1d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -793,6 +793,12 @@ trait Contexts { self: Analyzer =>
case _ => LookupSucceeded(qual, sym)
}
)
+ def finishDefSym(sym: Symbol, pre0: Type): NameLookup =
+ if (requiresQualifier(sym))
+ finish(gen.mkAttributedQualifier(pre0), sym)
+ else
+ finish(EmptyTree, sym)
+
def isPackageOwnedInDifferentUnit(s: Symbol) = (
s.isDefinedInPackage && (
!currentRun.compiles(s)
@@ -816,17 +822,36 @@ trait Contexts { self: Analyzer =>
found1
}
+
+ def lookupInScope(scope: Scope) =
+ (scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList
+
+ def newOverloaded(owner: Symbol, pre: Type, entries: List[ScopeEntry]) =
+ logResult(s"!!! lookup overloaded")(owner.newOverloaded(pre, entries map (_.sym)))
+
+ // Constructor lookup should only look in the decls of the enclosing class
+ // not in the self-type, nor in the enclosing context, nor in imports (SI-4460, SI-6745)
+ if (name == nme.CONSTRUCTOR) return {
+ val enclClassSym = cx.enclClass.owner
+ val scope = cx.enclClass.prefix.baseType(enclClassSym).decls
+ val constructorSym = lookupInScope(scope) match {
+ case Nil => NoSymbol
+ case hd :: Nil => hd.sym
+ case entries => newOverloaded(enclClassSym, cx.enclClass.prefix, entries)
+ }
+ finishDefSym(constructorSym, cx.enclClass.prefix)
+ }
+
// cx.scope eq null arises during FixInvalidSyms in Duplicators
while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) {
- pre = cx.enclClass.prefix
- val entries = (cx.scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList
- defSym = entries match {
- case Nil => searchPrefix
- case hd :: tl =>
+ pre = cx.enclClass.prefix
+ defSym = lookupInScope(cx.scope) match {
+ case Nil => searchPrefix
+ case entries @ (hd :: tl) =>
// we have a winner: record the symbol depth
symbolDepth = (cx.depth - cx.scope.nestingLevel) + hd.depth
if (tl.isEmpty) hd.sym
- else logResult(s"!!! lookup overloaded")(cx.owner.newOverloaded(pre, entries map (_.sym)))
+ else newOverloaded(cx.owner, pre, entries)
}
if (!defSym.exists)
cx = cx.outer // push further outward
@@ -864,12 +889,8 @@ trait Contexts { self: Analyzer =>
}
// At this point only one or the other of defSym and impSym might be set.
- if (defSym.exists) {
- if (requiresQualifier(defSym))
- finish(gen.mkAttributedQualifier(pre), defSym)
- else
- finish(EmptyTree, defSym)
- }
+ if (defSym.exists)
+ finishDefSym(defSym, pre)
else if (impSym.exists) {
// We continue walking down the imports as long as the tail is non-empty, which gives us:
// imports == imp1 :: imp2 :: _
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 8d869b669c..ed1e6d01e8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -439,8 +439,8 @@ trait Implicits {
val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null
val result = normSubType(tp, pt) || isView && {
pt match {
- case TypeRef(_, Function1.Sym, args) =>
- matchesPtView(tp, args.head, args.tail.head, undet)
+ case TypeRef(_, Function1.Sym, arg1 :: arg2 :: Nil) =>
+ matchesPtView(tp, arg1, arg2, undet)
case _ =>
false
}
@@ -484,7 +484,7 @@ trait Implicits {
loop(restpe, pt)
else pt match {
case tr @ TypeRef(pre, sym, args) =>
- if (sym.isAliasType) loop(tp, pt.normalize)
+ if (sym.isAliasType) loop(tp, pt.dealias)
else if (sym.isAbstractType) loop(tp, pt.bounds.lo)
else {
val len = args.length - 1
@@ -528,18 +528,15 @@ trait Implicits {
* to a final true or false.
*/
private def isPlausiblySubType(tp1: Type, tp2: Type) = !isImpossibleSubType(tp1, tp2)
- private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.normalize.widen match {
- case tr1 @ TypeRef(_, sym1, _) =>
- // We can only rule out a subtype relationship if the left hand
- // side is a class, else we may not know enough.
- sym1.isClass && (tp2.normalize.widen match {
- case TypeRef(_, sym2, _) =>
- sym2.isClass && !(sym1 isWeakSubClass sym2)
- case RefinedType(parents, decls) =>
- decls.nonEmpty &&
- tr1.member(decls.head.name) == NoSymbol
- case _ => false
- })
+ private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.dealiasWiden match {
+ // We can only rule out a subtype relationship if the left hand
+ // side is a class, else we may not know enough.
+ case tr1 @ TypeRef(_, sym1, _) if sym1.isClass =>
+ tp2.dealiasWiden match {
+ case TypeRef(_, sym2, _) => sym2.isClass && !(sym1 isWeakSubClass sym2)
+ case RefinedType(parents, decls) => decls.nonEmpty && tr1.member(decls.head.name) == NoSymbol
+ case _ => false
+ }
case _ => false
}
@@ -1010,7 +1007,7 @@ trait Implicits {
args foreach (getParts(_))
}
} else if (sym.isAliasType) {
- getParts(tp.normalize)
+ getParts(tp.dealias)
} else if (sym.isAbstractType) {
getParts(tp.bounds.hi)
}
@@ -1041,88 +1038,6 @@ trait Implicits {
infoMap
}
- /** The parts of a type is the smallest set of types that contains
- * - the type itself
- * - the parts of its immediate components (prefix and argument)
- * - the parts of its base types
- * - for alias types and abstract types, we take instead the parts
- * - of their upper bounds.
- * @return For those parts that refer to classes with companion objects that
- * can be accessed with unambiguous stable prefixes, the implicits infos
- * which are members of these companion objects.
-
- private def companionImplicits(tp: Type): Infoss = {
- val partMap = new LinkedHashMap[Symbol, Type]
- val seen = mutable.HashSet[Type]() // cycle detection
-
- /** Enter all parts of `tp` into `parts` set.
- * This method is performance critical: about 2-4% of all type checking is spent here
- */
- def getParts(tp: Type) {
- if (seen(tp))
- return
- seen += tp
- tp match {
- case TypeRef(pre, sym, args) =>
- if (sym.isClass) {
- if (!((sym.name == tpnme.REFINE_CLASS_NAME) ||
- (sym.name startsWith tpnme.ANON_CLASS_NAME) ||
- (sym.name == tpnme.ROOT)))
- partMap get sym match {
- case Some(pre1) =>
- if (!(pre =:= pre1)) partMap(sym) = NoType // ambiguous prefix - ignore implicit members
- case None =>
- if (pre.isStable) partMap(sym) = pre
- val bts = tp.baseTypeSeq
- var i = 1
- while (i < bts.length) {
- getParts(bts(i))
- i += 1
- }
- getParts(pre)
- args foreach getParts
- }
- } else if (sym.isAliasType) {
- getParts(tp.normalize)
- } else if (sym.isAbstractType) {
- getParts(tp.bounds.hi)
- }
- case ThisType(_) =>
- getParts(tp.widen)
- case _: SingletonType =>
- getParts(tp.widen)
- case RefinedType(ps, _) =>
- for (p <- ps) getParts(p)
- case AnnotatedType(_, t, _) =>
- getParts(t)
- case ExistentialType(_, t) =>
- getParts(t)
- case PolyType(_, t) =>
- getParts(t)
- case _ =>
- }
- }
-
- getParts(tp)
-
- val buf = new ListBuffer[Infos]
- for ((clazz, pre) <- partMap) {
- if (pre != NoType) {
- val companion = clazz.companionModule
- companion.moduleClass match {
- case mc: ModuleClassSymbol =>
- buf += (mc.implicitMembers map (im =>
- new ImplicitInfo(im.name, singleType(pre, companion), im)))
- case _ =>
- }
- }
- }
- //println("companion implicits of "+tp+" = "+buf.toList) // DEBUG
- buf.toList
- }
-
-*/
-
/** The implicits made available by type `pt`.
* These are all implicits found in companion objects of classes C
* such that some part of `tp` has C as one of its superclasses.
@@ -1250,7 +1165,7 @@ trait Implicits {
implicit def wrapResult(tree: Tree): SearchResult =
if (tree == EmptyTree) SearchFailure else new SearchResult(tree, if (from.isEmpty) EmptyTreeTypeSubstituter else new TreeTypeSubstituter(from, to))
- val tp1 = tp0.normalize
+ val tp1 = tp0.dealias
tp1 match {
case ThisType(_) | SingleType(_, _) =>
// can't generate a reference to a value that's abstracted over by an existential
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index a541906a99..7188290688 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -43,7 +43,7 @@ trait Infer extends Checkable {
case formal => formal
} else formals
if (isVarArgTypes(formals1) && (removeRepeated || formals.length != nargs)) {
- val ft = formals1.last.normalize.typeArgs.head
+ val ft = formals1.last.dealiasWiden.typeArgs.head
formals1.init ::: (for (i <- List.range(formals1.length - 1, nargs)) yield ft)
} else formals1
}
@@ -575,14 +575,13 @@ trait Infer extends Checkable {
&& (restpe.isWildcard || (varianceInType(restpe)(tparam) & COVARIANT) == 0) // don't retract covariant occurrences
)
- // checks !settings.XoldPatmat.value directly so one need not run under -Xexperimental to use virtpatmat
buf += ((tparam,
if (retract) None
else Some(
if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass)
else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass)
// this infers Foo.type instead of "object Foo" (see also widenIfNecessary)
- else if (targ.typeSymbol.isModuleClass || ((settings.Xexperimental.value || !settings.XoldPatmat.value) && tvar.constr.avoidWiden)) targ
+ else if (targ.typeSymbol.isModuleClass || tvar.constr.avoidWiden) targ
else targ.widen
)
))
@@ -1123,15 +1122,17 @@ trait Infer extends Checkable {
*/
def inferExprInstance(tree: Tree, tparams: List[Symbol], pt: Type = WildcardType, treeTp0: Type = null, keepNothings: Boolean = true, useWeaklyCompatible: Boolean = false): List[Symbol] = {
val treeTp = if(treeTp0 eq null) tree.tpe else treeTp0 // can't refer to tree in default for treeTp0
+ val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
printInference(
ptBlock("inferExprInstance",
"tree" -> tree,
"tree.tpe"-> tree.tpe,
"tparams" -> tparams,
- "pt" -> pt
+ "pt" -> pt,
+ "targs" -> targs,
+ "tvars" -> tvars
)
)
- val (targs, tvars) = exprTypeArgs(tparams, treeTp, pt, useWeaklyCompatible)
if (keepNothings || (targs eq null)) { //@M: adjustTypeArgs fails if targs==null, neg/t0226
substExpr(tree, tparams, targs, pt)
@@ -1438,9 +1439,9 @@ trait Infer extends Checkable {
}
object approximateAbstracts extends TypeMap {
- def apply(tp: Type): Type = tp.normalize match {
+ def apply(tp: Type): Type = tp.dealiasWiden match {
case TypeRef(pre, sym, _) if sym.isAbstractType => WildcardType
- case _ => mapOver(tp)
+ case _ => mapOver(tp)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 4d1ab98fa0..6ed879af14 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -546,6 +546,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
/** Calculate the arguments to pass to a macro implementation when expanding the provided tree.
*/
case class MacroArgs(c: MacroContext, others: List[Any])
+
private def macroArgs(typer: Typer, expandee: Tree): MacroArgs = {
val macroDef = expandee.symbol
val prefixTree = expandee.collect{ case Select(qual, name) => qual }.headOption.getOrElse(EmptyTree)
@@ -574,9 +575,11 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
val preparedArgss: List[List[Any]] =
if (fastTrack contains macroDef) {
- if (fastTrack(macroDef) validate context) argss
+ // Take a dry run of the fast track implementation
+ if (fastTrack(macroDef) validate expandee) argss
else typer.TyperErrorGen.MacroPartialApplicationError(expandee)
- } else {
+ }
+ else {
// if paramss have typetag context bounds, add an arglist to argss if necessary and instantiate the corresponding evidences
// consider the following example:
//
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index dba2f25e32..49eca828a9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -67,9 +67,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- def newTransformer(unit: CompilationUnit): Transformer =
- if (!settings.XoldPatmat.value) new MatchTransformer(unit)
- else noopTransformer
+ def newTransformer(unit: CompilationUnit): Transformer = new MatchTransformer(unit)
// duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 4fd65c18d1..b5f456d1ae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -13,7 +13,7 @@ package scala.tools.nsc
package typechecker
import scala.collection.mutable
-import scala.reflect.internal.util.{ BatchSourceFile, Statistics }
+import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance }
import mutable.ListBuffer
import symtab.Flags._
@@ -96,8 +96,8 @@ trait Typers extends Modes with Adaptations with Tags {
// when true:
// - we may virtualize matches (if -Xexperimental and there's a suitable __match in scope)
// - we synthesize PartialFunction implementations for `x => x match {...}` and `match {...}` when the expected type is PartialFunction
- // this is disabled by: -Xoldpatmat or interactive compilation (we run it for scaladoc due to SI-5933)
- private def newPatternMatching = !settings.XoldPatmat.value && !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
+ // this is disabled by: interactive compilation (we run it for scaladoc due to SI-5933)
+ private def newPatternMatching = !forInteractive //&& !forScaladoc && (phase.id < currentRun.uncurryPhase.id)
abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with TyperContextErrors {
import context0.unit
@@ -227,7 +227,7 @@ trait Typers extends Modes with Adaptations with Tags {
case ExistentialType(tparams, tpe) =>
new SubstWildcardMap(tparams).apply(tp)
case TypeRef(_, sym, _) if sym.isAliasType =>
- val tp0 = tp.normalize
+ val tp0 = tp.dealias
val tp1 = dropExistential(tp0)
if (tp1 eq tp0) tp else tp1
case _ => tp
@@ -413,7 +413,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym &&
sym.isAliasType && sameLength(sym.typeParams, args)) {
hiddenSymbols = hiddenSymbols.tail
- t.normalize
+ t.dealias
} else t
case SingleType(_, sym) =>
checkNoEscape(sym)
@@ -1033,9 +1033,9 @@ trait Typers extends Modes with Adaptations with Tags {
adapt(tree setType restpe, mode, pt, original)
case TypeRef(_, ByNameParamClass, List(arg)) if ((mode & EXPRmode) != 0) => // (2)
adapt(tree setType arg, mode, pt, original)
- case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.normalize.isInstanceOf[ExistentialType] &&
+ case tr @ TypeRef(_, sym, _) if sym.isAliasType && tr.dealias.isInstanceOf[ExistentialType] &&
((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
- adapt(tree setType tr.normalize.skolemizeExistential(context.owner, tree), mode, pt, original)
+ adapt(tree setType tr.dealias.skolemizeExistential(context.owner, tree), mode, pt, original)
case et @ ExistentialType(_, _) if ((mode & (EXPRmode | LHSmode)) == EXPRmode) =>
adapt(tree setType et.skolemizeExistential(context.owner, tree), mode, pt, original)
case PolyType(tparams, restpe) if inNoModes(mode, TAPPmode | PATTERNmode | HKmode) => // (3)
@@ -1105,7 +1105,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (tree1.tpe <:< pt) adapt(tree1, mode, pt, original)
else {
if (inExprModeButNot(mode, FUNmode)) {
- pt.normalize match {
+ pt.dealias match {
case TypeRef(_, sym, _) =>
// note: was if (pt.typeSymbol == UnitClass) but this leads to a potentially
// infinite expansion if pt is constant type ()
@@ -1251,7 +1251,7 @@ trait Typers extends Modes with Adaptations with Tags {
def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
if (isAdaptableWithView(qual)) {
- qual.tpe.widen.normalize match {
+ qual.tpe.dealiasWiden match {
case et: ExistentialType =>
qual setType et.skolemizeExistential(context.owner, qual) // open the existential
case _ =>
@@ -1766,7 +1766,7 @@ trait Typers extends Modes with Adaptations with Tags {
_.typedTemplate(cdef.impl, parentTypes(cdef.impl))
}
val impl2 = finishMethodSynthesis(impl1, clazz, context)
- if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.normalize.typeSymbol == AnyClass)
+ if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
if ((clazz != ClassfileAnnotationClass) &&
(clazz isNonBottomSubClass ClassfileAnnotationClass))
@@ -2440,18 +2440,14 @@ trait Typers extends Modes with Adaptations with Tags {
val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector)
val casesTyped = typedCases(cases, selectorTp, pt)
- val (resTp, needAdapt) =
- if (!settings.XoldPatmat.value) ptOrLubPacked(casesTyped, pt)
- else ptOrLub(casesTyped map (_.tpe), pt)
+ val (resTp, needAdapt) = ptOrLubPacked(casesTyped, pt)
val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, resTp))
treeCopy.Match(tree, selector1, casesAdapted) setType resTp
}
- // match has been typed -- virtualize it if we're feeling experimental
- // (virtualized matches are expanded during type checking so they have the full context available)
- // otherwise, do nothing: matches are translated during phase `patmat` (unless -Xoldpatmat)
+ // match has been typed -- virtualize it during type checking so the full context is available
def virtualizedMatch(match_ : Match, mode: Int, pt: Type) = {
import patmat.{ vpmName, PureMatchTranslator }
@@ -3333,7 +3329,7 @@ trait Typers extends Modes with Adaptations with Tags {
// if there's a ClassTag that allows us to turn the unchecked type test for `pt` into a checked type test
// return the corresponding extractor (an instance of ClassTag[`pt`])
- def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (settings.XoldPatmat.value || isPastTyper) None else {
+ def extractorForUncheckedType(pos: Position, pt: Type): Option[Tree] = if (isPastTyper) None else {
// only look at top-level type, can't (reliably) do anything about unchecked type args (in general)
pt.normalize.typeConstructor match {
// if at least one of the types in an intersection is checkable, use the checkable ones
@@ -3679,7 +3675,7 @@ trait Typers extends Modes with Adaptations with Tags {
val normalizeLocals = new TypeMap {
def apply(tp: Type): Type = tp match {
case TypeRef(pre, sym, args) =>
- if (sym.isAliasType && containsLocal(tp)) apply(tp.normalize)
+ if (sym.isAliasType && containsLocal(tp)) apply(tp.dealias)
else {
if (pre.isVolatile)
InferTypeWithVolatileTypeSelectionError(tree, pre)
@@ -4142,8 +4138,7 @@ trait Typers extends Modes with Adaptations with Tags {
// in the special (though common) case where the types are equal, it pays to pack before comparing
// especially virtpatmat needs more aggressive unification of skolemized types
// this breaks src/library/scala/collection/immutable/TrieIterator.scala
- if ( !settings.XoldPatmat.value && !isPastTyper
- && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
+ if (!isPastTyper && thenp1.tpe.annotations.isEmpty && elsep1.tpe.annotations.isEmpty // annotated types need to be lubbed regardless (at least, continations break if you by pass them like this)
&& thenTp =:= elseTp
) (thenp1.tpe.deconst, false) // use unpacked type. Important to deconst, as is done in ptOrLub, otherwise `if (???) 0 else 0` evaluates to 0 (SI-6331)
// TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
@@ -4157,7 +4152,7 @@ trait Typers extends Modes with Adaptations with Tags {
}
}
- // under -Xexperimental (and not -Xoldpatmat), and when there's a suitable __match in scope, virtualize the pattern match
+ // When there's a suitable __match in scope, virtualize the pattern match
// otherwise, type the Match and leave it until phase `patmat` (immediately after typer)
// empty-selector matches are transformed into synthetic PartialFunction implementations when the expected type demands it
def typedVirtualizedMatch(tree: Match): Tree = {
@@ -4213,7 +4208,13 @@ trait Typers extends Modes with Adaptations with Tags {
def typedNew(tree: New) = {
val tpt = tree.tpt
val tpt1 = {
- val tpt0 = typedTypeConstructor(tpt)
+ // This way typedNew always returns a dealiased type. This used to happen by accident
+ // for instantiations without type arguments due to ad hoc code in typedTypeConstructor,
+ // and annotations depended on it (to the extent that they worked, which they did
+ // not when given a parameterized type alias which dealiased to an annotation.)
+ // typedTypeConstructor dealiases nothing now, but it makes sense for a "new" to always be
+ // given a dealiased type.
+ val tpt0 = typedTypeConstructor(tpt) modifyType (_.dealias)
if (checkStablePrefixClassType(tpt0))
if (tpt0.hasSymbolField && !tpt0.symbol.typeParams.isEmpty) {
context.undetparams = cloneSymbols(tpt0.symbol.typeParams)
@@ -5312,29 +5313,18 @@ trait Typers extends Modes with Adaptations with Tags {
def typedTypeConstructor(tree: Tree, mode: Int): Tree = {
val result = typed(tree, forTypeMode(mode) | FUNmode, WildcardType)
- val restpe = result.tpe.normalize // normalize to get rid of type aliases for the following check (#1241)
- if (!phase.erasedTypes && restpe.isInstanceOf[TypeRef] && !restpe.prefix.isStable && !context.unit.isJava) {
- // The isJava exception if OK only because the only type constructors scalac gets
- // to see are those in the signatures. These do not need a unique object as a prefix.
- // The situation is different for new's and super's, but scalac does not look deep
- // enough to see those. See #3938
- ConstructorPrefixError(tree, restpe)
- } else {
- //@M fix for #2208
- // if there are no type arguments, normalization does not bypass any checks, so perform it to get rid of AnyRef
- if (result.tpe.typeArgs.isEmpty) {
- // minimal check: if(result.tpe.typeSymbolDirect eq AnyRefClass) {
- // must expand the fake AnyRef type alias, because bootstrapping (init in Definitions) is not
- // designed to deal with the cycles in the scala package (ScalaObject extends
- // AnyRef, but the AnyRef type alias is entered after the scala package is
- // loaded and completed, so that ScalaObject is unpickled while AnyRef is not
- // yet defined )
- // !!! TODO - revisit now that ScalaObject is gone.
- result setType(restpe)
- } else { // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
+ // get rid of type aliases for the following check (#1241)
+ result.tpe.dealias match {
+ case restpe @ TypeRef(pre, _, _) if !phase.erasedTypes && !pre.isStable && !context.unit.isJava =>
+ // The isJava exception if OK only because the only type constructors scalac gets
+ // to see are those in the signatures. These do not need a unique object as a prefix.
+ // The situation is different for new's and super's, but scalac does not look deep
+ // enough to see those. See #3938
+ ConstructorPrefixError(tree, restpe)
+ case _ =>
+ // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208
// during uncurry (after refchecks), all types are normalized
result
- }
}
}
diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala
index 792a659ad6..039fec8605 100644
--- a/src/compiler/scala/tools/nsc/util/package.scala
+++ b/src/compiler/scala/tools/nsc/util/package.scala
@@ -69,7 +69,7 @@ package object util {
* (to exclude assert, require, etc.)
*/
def stackTraceHeadString(ex: Throwable): String = {
- val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef").head
+ val frame = ex.getStackTrace.dropWhile(_.getClassName contains "Predef") take 1 mkString ""
val msg = ex.getMessage match { case null | "" => "" ; case s => s"""("$s")""" }
val clazz = ex.getClass.getName.split('.').last
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index d35ac43424..ac50324fa9 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -2,7 +2,9 @@ package scala.tools
package reflect
import scala.reflect.reify.Taggers
-import scala.tools.nsc.typechecker.{Analyzer, Macros}
+import scala.tools.nsc.typechecker.{ Analyzer, Macros }
+import scala.reflect.runtime.Macros.currentMirror
+import scala.reflect.api.Universe
/** Optimizes system macro expansions by hardwiring them directly to their implementations
* bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
@@ -12,30 +14,32 @@ trait FastTrack {
import global._
import definitions._
-
import scala.language.implicitConversions
- private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } = new { val c: c0.type = c0 } with Taggers
- private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } = new { val c: c0.type = c0 } with MacroImplementations
+ import treeInfo.Applied
+
+ private implicit def context2taggers(c0: MacroContext): Taggers { val c: c0.type } =
+ new { val c: c0.type = c0 } with Taggers
+ private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } =
+ new { val c: c0.type = c0 } with MacroImplementations
+ private def make(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
+ sym -> new FastTrackEntry(pf)
- implicit def fastTrackEntry2MacroRuntime(entry: FastTrackEntry): MacroRuntime = args => entry.run(args.c)
- type FastTrackExpander = PartialFunction[(MacroContext, Tree), Tree]
- case class FastTrackEntry(sym: Symbol, expander: FastTrackExpander) {
- def validate(c: MacroContext): Boolean = expander.isDefinedAt((c, c.expandee))
- def run(c: MacroContext): Any = {
- val result = expander((c, c.expandee))
- c.Expr[Nothing](result)(c.WeakTypeTag.Nothing)
+ final class FastTrackEntry(pf: PartialFunction[Applied, MacroContext => Tree]) extends (MacroArgs => Any) {
+ def validate(tree: Tree) = pf isDefinedAt Applied(tree)
+ def apply(margs: MacroArgs) = {
+ val MacroArgs(c, args) = margs
+ // Macros validated that the pf is defined here - and there's not much we could do if it weren't.
+ c.Expr[Nothing](pf(Applied(c.expandee))(c))(c.WeakTypeTag.Nothing)
}
}
- lazy val fastTrack: Map[Symbol, FastTrackEntry] = {
- var registry = Map[Symbol, FastTrackEntry]()
- implicit class BindTo(sym: Symbol) { def bindTo(expander: FastTrackExpander): Unit = if (sym != NoSymbol) registry += sym -> FastTrackEntry(sym, expander) }
- materializeClassTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List())) => c.materializeClassTag(tt.tpe) }
- materializeWeakTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = false) }
- materializeTypeTag bindTo { case (c, Apply(TypeApply(_, List(tt)), List(u))) => c.materializeTypeTag(u, EmptyTree, tt.tpe, concrete = true) }
- ApiUniverseReify bindTo { case (c, Apply(TypeApply(_, List(tt)), List(expr))) => c.materializeExpr(c.prefix.tree, EmptyTree, expr) }
- ReflectRuntimeCurrentMirror bindTo { case (c, _) => scala.reflect.runtime.Macros.currentMirror(c).tree }
- StringContext_f bindTo { case (c, app@Apply(Select(Apply(_, parts), _), args)) => c.macro_StringInterpolation_f(parts, args, app.pos) }
- registry
- }
+ /** A map from a set of pre-established macro symbols to their implementations. */
+ lazy val fastTrack = Map[Symbol, FastTrackEntry](
+ make( materializeClassTag) { case Applied(_, ttag :: Nil, _) => _.materializeClassTag(ttag.tpe) },
+ make( materializeWeakTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = false) },
+ make( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
+ make( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
+ make( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) },
+ make(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree }
+ )
}
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index d7c50504a8..ab967496c4 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -37,7 +37,7 @@ abstract class MacroImplementations {
val argsStack = Stack(args : _*)
def defval(value: Tree, tpe: Type): Unit = {
- val freshName = newTermName(c.fresh("arg$"))
+ val freshName = newTermName(c.freshName("arg$"))
evals += ValDef(Modifiers(), freshName, TypeTree(tpe) setPos value.pos.focus, value) setPos value.pos
ids += Ident(freshName)
}
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index 34be977905..637a7a4c40 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -89,6 +89,12 @@ trait Trees { self: Universe =>
*/
def isEmpty: Boolean
+ /** Is this tree one of the empty trees?
+ *
+ * @see `isEmpty`
+ */
+ def nonEmpty: Boolean
+
/** Can this tree carry attributes (i.e. symbols, types or positions)?
* Typically the answer is yes, except for the `EmptyTree` null object and
* two special singletons: `emptyValDef` and `pendingSuperCall`.
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 13b761086c..9614513458 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -621,6 +621,8 @@ abstract class TreeInfo {
* For advanced use, call `dissectApplied` explicitly and use its methods instead of pattern matching.
*/
object Applied {
+ def apply(tree: Tree): Applied = new Applied(tree)
+
def unapply(applied: Applied): Option[(Tree, List[Tree], List[List[Tree]])] =
Some((applied.core, applied.targs, applied.argss))
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 9795299342..2029fb67b3 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -39,6 +39,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
def isDef = false
def isEmpty = false
+ def nonEmpty = !isEmpty
+
def canHaveAttrs = true
/** The canonical way to test if a Tree represents a term.
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 282d7e18ac..c121c6020e 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -563,6 +563,26 @@ trait Types extends api.Types { self: SymbolTable =>
/** Expands type aliases. */
def dealias = this
+ /** Repeatedly apply widen and dealias until they have no effect.
+ * This compensates for the fact that type aliases can hide beneath
+ * singleton types and singleton types can hide inside type aliases.
+ */
+ def dealiasWiden: Type = (
+ if (this ne widen) widen.dealiasWiden
+ else if (this ne dealias) dealias.dealiasWiden
+ else this
+ )
+
+ /** All the types encountered in the course of dealiasing/widening,
+ * including each intermediate beta reduction step (whereas calling
+ * dealias applies as many as possible.)
+ */
+ def dealiasWidenChain: List[Type] = this :: (
+ if (this ne widen) widen.dealiasWidenChain
+ else if (this ne betaReduce) betaReduce.dealiasWidenChain
+ else Nil
+ )
+
def etaExpand: Type = this
/** Performs a single step of beta-reduction on types.
@@ -3124,23 +3144,20 @@ trait Types extends api.Types { self: SymbolTable =>
* Checks subtyping of higher-order type vars, and uses variances as defined in the
* type parameter we're trying to infer (the result will be sanity-checked later).
*/
- def unifyFull(tpe: Type) = {
- // The alias/widen variations are often no-ops.
- val tpes = (
- if (isLowerBound) List(tpe, tpe.widen, tpe.dealias, tpe.widen.dealias).distinct
- else List(tpe)
- )
- tpes exists { tp =>
- val lhs = if (isLowerBound) tp.typeArgs else typeArgs
- val rhs = if (isLowerBound) typeArgs else tp.typeArgs
-
- sameLength(lhs, rhs) && {
+ def unifyFull(tpe: Type): Boolean = {
+ def unifySpecific(tp: Type) = {
+ sameLength(typeArgs, tp.typeArgs) && {
+ val lhs = if (isLowerBound) tp.typeArgs else typeArgs
+ val rhs = if (isLowerBound) typeArgs else tp.typeArgs
// this is a higher-kinded type var with same arity as tp.
// side effect: adds the type constructor itself as a bound
addBound(tp.typeConstructor)
isSubArgs(lhs, rhs, params, AnyDepth)
}
}
+ // The type with which we can successfully unify can be hidden
+ // behind singleton types and type aliases.
+ tpe.dealiasWidenChain exists unifySpecific
}
// There's a <: test taking place right now, where tp is a concrete type and this is a typevar
@@ -3236,7 +3253,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (constr.instValid) constr.inst
// get here when checking higher-order subtyping of the typevar by itself
// TODO: check whether this ever happens?
- else if (isHigherKinded) typeFun(params, applyArgs(params map (_.typeConstructor)))
+ else if (isHigherKinded) logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor))))
else super.normalize
)
override def typeSymbol = origin.typeSymbol
@@ -3663,7 +3680,7 @@ trait Types extends api.Types { self: SymbolTable =>
def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type =
if (tparams.isEmpty) tpe0
else {
- val tpe = deAlias(tpe0)
+ val tpe = normalizeAliases(tpe0)
val tpe1 = new ExistentialExtrapolation(tparams) extrapolate tpe
var tparams0 = tparams
var tparams1 = tparams0 filter tpe1.contains
@@ -3677,13 +3694,16 @@ trait Types extends api.Types { self: SymbolTable =>
newExistentialType(tparams1, tpe1)
}
- /** Remove any occurrences of type aliases from this type */
- object deAlias extends TypeMap {
- def apply(tp: Type): Type = mapOver {
- tp match {
- case TypeRef(pre, sym, args) if sym.isAliasType => tp.normalize
- case _ => tp
- }
+ /** Normalize any type aliases within this type (@see Type#normalize).
+ * Note that this depends very much on the call to "normalize", not "dealias",
+ * so it is no longer carries the too-stealthy name "deAlias".
+ */
+ object normalizeAliases extends TypeMap {
+ def apply(tp: Type): Type = tp match {
+ case TypeRef(_, sym, _) if sym.isAliasType =>
+ def msg = if (tp.isHigherKinded) s"Normalizing type alias function $tp" else s"Dealiasing type alias $tp"
+ mapOver(logResult(msg)(tp.normalize))
+ case _ => mapOver(tp)
}
}
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index 81ed63bfc6..d5ed9dab5b 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -44,7 +44,6 @@ abstract class MutableSettings extends AbsSettings {
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
def Xexperimental: BooleanSetting
- def XoldPatmat: BooleanSetting
def XnoPatmatAnalysis: BooleanSetting
def XfullLubs: BooleanSetting
def breakCycles: BooleanSetting
diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala
index 8bbaa5f848..7e2ac5e02d 100644
--- a/src/reflect/scala/reflect/macros/Names.scala
+++ b/src/reflect/scala/reflect/macros/Names.scala
@@ -11,13 +11,27 @@ trait Names {
self: Context =>
/** Creates a unique string. */
+ @deprecated("Use freshName instead", "2.11.0")
def fresh(): String
/** Creates a unique string having a given prefix. */
+ @deprecated("Use freshName instead", "2.11.0")
def fresh(name: String): String
/** Creates a unique name having a given name as a prefix and
* having the same flavor (term name or type name) as the given name.
*/
+ @deprecated("Use freshName instead", "2.11.0")
def fresh[NameType <: Name](name: NameType): NameType
+
+ /** Creates a unique string. */
+ def freshName(): String
+
+ /** Creates a unique string having a given prefix. */
+ def freshName(name: String): String
+
+ /** Creates a unique name having a given name as a prefix and
+ * having the same flavor (term name or type name) as the given name.
+ */
+ def freshName[NameType <: Name](name: NameType): NameType
}
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 7d04202455..ba524f4df2 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -32,7 +32,6 @@ private[reflect] class Settings extends MutableSettings {
val Xexperimental = new BooleanSetting(false)
val XfullLubs = new BooleanSetting(false)
val XnoPatmatAnalysis = new BooleanSetting(false)
- val XoldPatmat = new BooleanSetting(false)
val Xprintpos = new BooleanSetting(false)
val Ynotnull = new BooleanSetting(false)
val Yshowsymkinds = new BooleanSetting(false)