summaryrefslogtreecommitdiff
path: root/src/compiler
diff options
context:
space:
mode:
Diffstat (limited to 'src/compiler')
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala122
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala15
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala30
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala194
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala190
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala225
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala510
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala186
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala23
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala99
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala13
24 files changed, 1324 insertions, 431 deletions
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 046122d83b..79f0bcf149 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -288,10 +288,16 @@ trait Scanners extends ScannersCommon {
prev copyFrom this
val nextLastOffset = charOffset - 1
fetchToken()
+ def resetOffset() {
+ offset = prev.offset
+ lastOffset = prev.lastOffset
+ }
if (token == CLASS) {
token = CASECLASS
+ resetOffset()
} else if (token == OBJECT) {
token = CASEOBJECT
+ resetOffset()
} else {
lastOffset = nextLastOffset
next copyFrom this
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index fee683ce3a..d4a6d18c60 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -18,6 +18,9 @@ abstract class DeadCodeElimination extends SubComponent {
import icodes.opcodes._
import definitions.RuntimePackage
+ /** The block and index where an instruction is located */
+ type InstrLoc = (BasicBlock, Int)
+
val phaseName = "dce"
/** Create a new phase */
@@ -55,27 +58,35 @@ abstract class DeadCodeElimination extends SubComponent {
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
- var defs: immutable.Map[(BasicBlock, Int), immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
+ var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
/** Useful instructions which have not been scanned yet. */
- val worklist: mutable.Set[(BasicBlock, Int)] = new mutable.LinkedHashSet
+ val worklist: mutable.Set[InstrLoc] = new mutable.LinkedHashSet
/** what instructions have been marked as useful? */
val useful: mutable.Map[BasicBlock, mutable.BitSet] = perRunCaches.newMap()
/** what local variables have been accessed at least once? */
var accessedLocals: List[Local] = Nil
+
+ /** Map from a local and a basic block to the instructions that store to that local in that basic block */
+ val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()}
+
+ /** Stores that clobber previous stores to array or ref locals. See SI-5313 */
+ val clobbers = mutable.Set[InstrLoc]()
/** the current method. */
var method: IMethod = _
/** Map instructions who have a drop on some control path, to that DROP instruction. */
- val dropOf: mutable.Map[(BasicBlock, Int), List[(BasicBlock, Int)]] = perRunCaches.newMap()
+ val dropOf: mutable.Map[InstrLoc, List[InstrLoc]] = perRunCaches.newMap()
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
debuglog("dead code elimination on " + m);
dropOf.clear()
+ localStores.clear()
+ clobbers.clear()
m.code.blocks.clear()
accessedLocals = m.params.reverse
m.code.blocks ++= linearizer.linearize(m)
@@ -104,10 +115,10 @@ abstract class DeadCodeElimination extends SubComponent {
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
i match {
- case LOAD_LOCAL(l) =>
+ case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
- case STORE_LOCAL(_) =>
+ case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
* (otherwise any side-effects of the module's constructor go lost).
* (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
@@ -125,6 +136,11 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
if (necessary) worklist += ((bb, idx))
+ // add it to the localStores map
+ val key = (l, bb)
+ val set = localStores(key)
+ set += idx
+ localStores(key) = set
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
@@ -162,11 +178,18 @@ abstract class DeadCodeElimination extends SubComponent {
def mark() {
// log("Starting with worklist: " + worklist)
while (!worklist.isEmpty) {
- val (bb, idx) = worklist.iterator.next
+ val (bb, idx) = worklist.head
worklist -= ((bb, idx))
debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
+ // adds the instrutions that define the stack values about to be consumed to the work list to
+ // be marked useful
+ def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
+ debuglog(s"\t${bb1(idx1)} is consumed by $instr")
+ worklist += ((bb1, idx1))
+ }
+
if (!useful(bb)(idx)) {
useful(bb) += idx
dropOf.get(bb, idx) foreach {
@@ -180,6 +203,15 @@ abstract class DeadCodeElimination extends SubComponent {
worklist += ((bb1, idx1))
}
+ case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
+ addDefs()
+ // see SI-5313
+ // search for clobbers of this store if we aren't doing l1 = null
+ // this doesn't catch the second store in x=null;l1=x; but in practice this catches
+ // a lot of null stores very cheaply
+ if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null)))
+ findClobbers(l1, bb, idx + 1)
+
case nw @ NEW(REFERENCE(sym)) =>
assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")")
worklist += findInstruction(bb, nw.init)
@@ -199,14 +231,72 @@ abstract class DeadCodeElimination extends SubComponent {
()
case _ =>
- for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- debuglog("\tAdding " + bb1(idx1))
- worklist += ((bb1, idx1))
- }
+ addDefs()
}
}
}
}
+
+ /**
+ * Finds and marks all clobbers of the given local starting in the given
+ * basic block at the given index
+ *
+ * Storing to local variables of reference or array type may be indirectly
+ * observable because it may remove a reference to an object which may allow the object
+ * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
+ * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
+ * useful then its clobbers must go into the set of clobbers, which will be
+ * compensated for later
+ */
+ def findClobbers(l: Local, bb: BasicBlock, idx: Int) {
+ // previously visited blocks tracked to prevent searching forever in a cycle
+ val inspected = mutable.Set[BasicBlock]()
+ // our worklist of blocks that still need to be checked
+ val blocksToBeInspected = mutable.Set[BasicBlock]()
+
+ // Tries to find the next clobber of l1 in bb1 starting at idx1.
+ // if it finds one it adds the clobber to clobbers set for later
+ // handling. If not it adds the direct successor blocks to
+ // the uninspectedBlocks to try to find clobbers there. Either way
+ // it adds the exception successor blocks for further search
+ def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
+ val key = ((l, bb1))
+ val foundClobber = (localStores contains key) && {
+ def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min
+
+ // find the smallest index greater than or equal to idx1
+ val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1))
+ if (clobberIdx == -1)
+ false
+ else {
+ debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}")
+ clobbers += ((bb1, clobberIdx))
+ true
+ }
+ }
+
+ // always need to look into the exception successors for additional clobbers
+ // because we don't know when flow might enter an exception handler
+ blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected)
+ // If we didn't find a clobber here then we need to look at successor blocks.
+ // if we found a clobber then we don't need to search in the direct successors
+ if (!foundClobber) {
+ blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected)
+ }
+ }
+
+ // first search starting at the current index
+ // note we don't put bb in the inspected list yet because a loop may later force
+ // us back around to search from the beginning of bb
+ findClobberInBlock(idx, bb)
+ // then loop until we've exhausted the set of uninspected blocks
+ while(!blocksToBeInspected.isEmpty) {
+ val bb1 = blocksToBeInspected.head
+ blocksToBeInspected -= bb1
+ inspected += bb1
+ findClobberInBlock(0, bb1)
+ }
+ }
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
@@ -236,6 +326,12 @@ abstract class DeadCodeElimination extends SubComponent {
i match {
case NEW(REFERENCE(sym)) =>
log(s"Eliminated instantation of $sym inside $m")
+ case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
+ // if an unused instruction was a clobber of a used store to a reference or array type
+ // then we'll replace it with the store of a null to make sure the reference is
+ // eliminated. See SI-5313
+ bb emit CONSTANT(Constant(null))
+ bb emit STORE_LOCAL(l)
case _ => ()
}
debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
@@ -247,8 +343,8 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
- val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
+ private def computeCompensations(m: IMethod): mutable.Map[InstrLoc, List[Instruction]] = {
+ val compensations: mutable.Map[InstrLoc, List[Instruction]] = new mutable.HashMap
m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
@@ -287,7 +383,7 @@ abstract class DeadCodeElimination extends SubComponent {
res
}
- private def findInstruction(bb: BasicBlock, i: Instruction): (BasicBlock, Int) = {
+ private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
if (idx != -1)
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 10e2f23142..4ee6daf73e 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -20,7 +20,7 @@ object IndexModelFactory {
/* Owner template ordering */
implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
- implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase }
+ implicit def orderingMap = math.Ordering.String
def addMember(d: MemberEntity) = {
val firstLetter = {
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index c6cfc317ea..0a469c9227 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -314,12 +314,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
inform("Creating doc template for " + sym)
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
- def inSource =
- if (sym.sourceFile != null && ! sym.isSynthetic)
- Some((sym.sourceFile, sym.pos.line))
+
+ protected def inSourceFromSymbol(symbol: Symbol) =
+ if (symbol.sourceFile != null && ! symbol.isSynthetic)
+ Some((symbol.sourceFile, symbol.pos.line))
else
None
+ def inSource = inSourceFromSymbol(sym)
+
def sourceUrl = {
def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
@@ -508,11 +511,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override lazy val linearization = {
- val symbol = sym.info.members.find {
+ override lazy val (inSource, linearization) = {
+ val representive = sym.info.members.find {
s => s.isPackageObject
} getOrElse sym
- linearizationFromSymbol(symbol)
+ (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
}
def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 9d01e73063..dbb9b7a003 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -15,6 +15,7 @@ import symtab.Flags
import mutable.ListBuffer
import scala.annotation.elidable
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
@@ -821,7 +822,7 @@ trait ParallelMatching extends ast.TreeDSL
// match that's unimportant; so we add an instance check only if there
// is a binding.
def bindingWarning() = {
- if (isBound && settings.Xmigration28.value) {
+ if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
cunit.warning(scrutTree.pos,
"A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
}
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index f1f289ed4d..e4f99474e1 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -221,6 +221,7 @@ class MutableSettings(val errorFn: String => Unit)
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -486,6 +487,35 @@ class MutableSettings(val errorFn: String => Unit)
withHelpSyntax(name + " <" + arg + ">")
}
+ /** A setting represented by a Scala version, (`default` unless set) */
+ class ScalaVersionSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String,
+ default: ScalaVersion)
+ extends Setting(name, descr) {
+ import ScalaVersion._
+
+ type T = ScalaVersion
+ protected var v: T = NoScalaVersion
+
+ override def tryToSet(args: List[String]) = {
+ value = default
+ Some(args)
+ }
+
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => value = default; Some(Nil)
+ case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ }
+
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
+
+ def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
+
+ withHelpSyntax(s"${name}:<${arg}>")
+ }
+
class PathSetting private[nsc](
name: String,
descr: String,
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index b820d10ddc..0a98d45cac 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -85,8 +85,7 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.").
- withDeprecationMessage("This setting is no longer useful and will be removed. Please remove it from your build.")
+ val Xmigration = ScalaVersionSetting("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -169,6 +168,7 @@ trait ScalaSettings extends AbsScalaSettings
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
+ val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
new file mode 100644
index 0000000000..d6a0149411
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -0,0 +1,194 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author James Iry
+ */
+// $Id$
+
+package scala.tools.nsc.settings
+
+/**
+ * Represents a single Scala version in a manner that
+ * supports easy comparison and sorting.
+ */
+abstract class ScalaVersion extends Ordered[ScalaVersion] {
+ def unparse: String
+}
+
+/**
+ * A scala version that sorts higher than all actual versions
+ */
+case object NoScalaVersion extends ScalaVersion {
+ def unparse = "none"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case NoScalaVersion => 0
+ case _ => 1
+ }
+}
+
+/**
+ * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion
+ * may or may not be a released version - i.e. this same class is used to represent
+ * final, release candidate, milestone, and development builds. The build argument is used
+ * to segregate builds
+ */
+case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
+ // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these
+ // comparisons a lot so I'm using brute force direct style code
+ if (major < thatMajor) -1
+ else if (major > thatMajor) 1
+ else if (minor < thatMinor) -1
+ else if (minor > thatMinor) 1
+ else if (rev < thatRev) -1
+ else if (rev > thatRev) 1
+ else build compare thatBuild
+ case AnyScalaVersion => 1
+ case NoScalaVersion => -1
+ }
+}
+
+/**
+ * A Scala version that sorts lower than all actual versions
+ */
+case object AnyScalaVersion extends ScalaVersion {
+ def unparse = "any"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case AnyScalaVersion => 0
+ case _ => -1
+ }
+}
+
+/**
+ * Factory methods for producing ScalaVersions
+ */
+object ScalaVersion {
+ private val dot = "\\."
+ private val dash = "\\-"
+ private def not(s:String) = s"[^${s}]"
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+
+ def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
+ def errorAndValue() = {
+ errorHandler(
+ s"There was a problem parsing ${versionString}. " +
+ "Versions should be in the form major[.minor[.revision]] " +
+ "where each part is a positive number, as in 2.10.1. " +
+ "The minor and revision parts are optional."
+ )
+ AnyScalaVersion
+ }
+
+ def toInt(s: String) = s match {
+ case null | "" => 0
+ case _ => s.toInt
+ }
+
+ def isInt(s: String) = util.Try(toInt(s)).isSuccess
+
+ def toBuild(s: String) = s match {
+ case null | "FINAL" => Final
+ case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
+ case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
+ case _ => Development(s)
+ }
+
+ try versionString match {
+ case "none" => NoScalaVersion
+ case "any" => AnyScalaVersion
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
+ case _ =>
+ errorAndValue()
+ } catch {
+ case e: NumberFormatException => errorAndValue()
+ }
+ }
+
+ def apply(versionString: String): ScalaVersion =
+ apply(versionString, msg => throw new NumberFormatException(msg))
+
+ /**
+ * The version of the compiler running now
+ */
+ val current = apply(util.Properties.versionNumberString)
+
+ /**
+ * The 2.8.0 version.
+ */
+ val twoDotEight = SpecificScalaVersion(2, 8, 0, Final)
+}
+
+/**
+ * Represents the data after the dash in major.minor.rev-build
+ */
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
+ /**
+ * Return a version of this build information that can be parsed back into the
+ * same ScalaBuild
+ */
+ def unparse: String
+}
+/**
+ * A development, test, nightly, snapshot or other "unofficial" build
+ */
+case class Development(id: String) extends ScalaBuild {
+ def unparse = s"-${id}"
+
+ def compare(that: ScalaBuild) = that match {
+ // sorting two development builds based on id is reasonably valid for two versions created with the same schema
+ // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
+ // this is a pragmatic compromise
+ case Development(thatId) => id compare thatId
+ // assume a development build is newer than anything else, that's not really true, but good luck
+ // mapping development build versions to other build types
+ case _ => 1
+ }
+}
+/**
+ * A final final
+ */
+case object Final extends ScalaBuild {
+ def unparse = ""
+
+ def compare(that: ScalaBuild) = that match {
+ case Final => 0
+ // a final is newer than anything other than a development build or another final
+ case Development(_) => -1
+ case _ => 1
+ }
+}
+
+/**
+ * A candidate for final release
+ */
+case class RC(n: Int) extends ScalaBuild {
+ def unparse = s"-RC${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two rcs based on their RC numbers
+ case RC(thatN) => n - thatN
+ // an rc is older than anything other than a milestone or another rc
+ case Milestone(_) => 1
+ case _ => -1
+ }
+}
+
+/**
+ * An intermediate release
+ */
+case class Milestone(n: Int) extends ScalaBuild {
+ def unparse = s"-M${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two milestones based on their milestone numbers
+ case Milestone(thatN) => n - thatN
+ // a milestone is older than anything other than another milestone
+ case _ => -1
+
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 1003d417f6..78c120c1ad 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -10,6 +10,7 @@ import symtab._
import Flags.{ CASE => _, _ }
import scala.collection.mutable.ListBuffer
import matching.{ Patterns, ParallelMatching }
+import scala.tools.nsc.settings.ScalaVersion
/** This class ...
*
@@ -553,7 +554,7 @@ abstract class ExplicitOuter extends InfoTransform
}
case _ =>
- if (settings.Xmigration28.value) tree match {
+ if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
if (isArraySeqTest(qual.tpe, args.head.tpe))
unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 589aa43ac2..bc54054028 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -91,39 +91,42 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
matching.head
}
+ /** Recognize a MethodType which represents an extension method.
+ *
+ * It may have a curried parameter list with the `$this` alone in the first
+ * parameter list, in which case that parameter list is dropped. Or, since
+ * the curried lists disappear during uncurry, it may have a single parameter
+ * list with `$this` as the first parameter, in which case that parameter is
+ * removed from the list.
+ */
+ object ExtensionMethodType {
+ def unapply(tp: Type) = tp match {
+ case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF =>
+ Some((thiz, if (rest.isEmpty) restpe else MethodType(rest, restpe) ))
+ case _ =>
+ None
+ }
+ }
+
/** This method removes the `$this` argument from the parameter list a method.
*
* A method may be a `PolyType`, in which case we tear out the `$this` and the class
- * type params from its nested `MethodType`.
- * It may be a `MethodType`, either with a curried parameter list in which the first argument
- * is a `$this` - we just return the rest of the list.
- * This means that the corresponding symbol was generated during `extmethods`.
- *
- * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list.
- * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards,
- * for instance, during `specialize`.
- * In this case, the first argument is `$this` and we just get rid of it.
+ * type params from its nested `MethodType`. Or it may be a MethodType, as
+ * described at the ExtensionMethodType extractor.
*/
private def normalize(stpe: Type, clazz: Symbol): Type = stpe match {
case PolyType(tparams, restpe) =>
- // Split the type parameters of the extension method into two groups,
- // corresponding the to class and method type parameters.
- val numClassParams = clazz.typeParams.length
- val methTParams = tparams dropRight numClassParams
- val classTParams = tparams takeRight numClassParams
-
- GenPolyType(methTParams,
- normalize(restpe.substSym(classTParams, clazz.typeParams), clazz))
- case MethodType(List(thiz), restpe) if thiz.name == nme.SELF =>
- restpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil)
- case MethodType(thiz :: params, restpe) =>
- MethodType(params, restpe)
+ // method type parameters, class type parameters
+ val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length)
+ GenPolyType(mtparams,
+ normalize(restpe.substSym(ctparams, clazz.typeParams), clazz))
+ case ExtensionMethodType(thiz, etpe) =>
+ etpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil)
case _ =>
stpe
}
class Extender(unit: CompilationUnit) extends TypingTransformer(unit) {
-
private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]()
def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit =
@@ -134,31 +137,54 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
}
+ /** We will need to clone the info of the original method (which obtains clones
+ * of the method type parameters), clone the type parameters of the value class,
+ * and create a new polymethod with the union of all those type parameters, with
+ * their infos adjusted to be consistent with their new home. Example:
+ *
+ * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ * def baz[B >: A](x: B): List[B] = x :: xs
+ * // baz has to be transformed into this extension method, where
+ * // A is cloned from class Foo and B is cloned from method baz:
+ * // def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B]
+ * }
+ *
+ * TODO: factor out the logic for consolidating type parameters from a class
+ * and a method for re-use elsewhere, because nobody will get this right without
+ * some higher level facilities.
+ */
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
- // No variance for method type parameters
- var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
- val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
+ val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth
+ // Start with the class type parameters - clones will be method type parameters
+ // so must drop their variance.
+ val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
+
+ val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*)
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
- def transform(clonedType: Type): Type = clonedType match {
- case MethodType(params, restpe) =>
- // I assume it was a bug that this was dropping params... [Martin]: No, it wasn't; it's curried.
- MethodType(List(thisParam), clonedType)
- case NullaryMethodType(restpe) =>
- MethodType(List(thisParam), restpe)
- }
- val GenPolyType(tparams, restpe) = origInfo cloneInfo extensionMeth
- val selfParamSingletonType = singleType(currentOwner.companionModule.thisType, thisParam)
- GenPolyType(
- tparams ::: newTypeParams,
- transform(restpe) substThisAndSym (clazz, selfParamSingletonType, clazz.typeParams, newTypeParams)
- )
- }
+ val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult))
+ val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam)
- private def allParams(tpe: Type): List[Symbol] = tpe match {
- case MethodType(params, res) => params ::: allParams(res)
- case _ => List()
- }
+ def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass)
+ def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass)
+
+ // We can't substitute symbols on the entire polytype because we
+ // need to modify the bounds of the cloned type parameters, but we
+ // don't want to substitute for the cloned type parameters themselves.
+ val tparams = tparamsFromMethod ::: tparamsFromClass
+ GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType))
+ // For reference, calling fix on the GenPolyType plays out like this:
+ // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966]
+ // do not conform to method extension$baz#16148's type parameter bounds
+ //
+ // And the difference is visible here. See how B is bounded from below by A#16149
+ // in both cases, but in the failing case, the other type parameter has turned into
+ // a different A. (What is that A? It is a clone of the original A created in
+ // SubstMap during the call to substSym, but I am not clear on all the particulars.)
+ //
+ // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
+ // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
+ }
override def transform(tree: Tree): Tree = {
tree match {
case Template(_, _, _) =>
@@ -167,42 +193,62 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
wrap over other value classes anyway.
checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
+ currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
} else tree
case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension =>
- val companion = currentOwner.companionModule
- val origMeth = tree.symbol
- val extensionName = extensionNames(origMeth).head
- val extensionMeth = companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
- .setAnnotations(origMeth.annotations)
- companion.info.decls.enter(extensionMeth)
- val newInfo = extensionMethInfo(extensionMeth, origMeth.info, currentOwner)
+ val origMeth = tree.symbol
+ val origThis = currentOwner
+ val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params
+ val origParams = vparamss.flatten map (_.symbol)
+ val companion = origThis.companionModule
+
+ def makeExtensionMethodSymbol = {
+ val extensionName = extensionNames(origMeth).head
+ val extensionMeth = (
+ companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ setAnnotations origMeth.annotations
+ )
+ companion.info.decls.enter(extensionMeth)
+ }
+
+ val extensionMeth = makeExtensionMethodSymbol
+ val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis)
extensionMeth setInfo newInfo
- log("Value class %s spawns extension method.\n Old: %s\n New: %s".format(
- currentOwner,
- origMeth.defString,
- extensionMeth.defString)) // extensionMeth.defStringSeenAs(origInfo
-
- def thisParamRef = gen.mkAttributedStableRef(extensionMeth.info.params.head setPos extensionMeth.pos)
- val GenPolyType(extensionTpeParams, extensionMono) = extensionMeth.info
- val origTpeParams = (tparams map (_.symbol)) ::: currentOwner.typeParams
- val extensionBody = rhs
+
+ log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}")
+
+ val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo
+ val extensionParams = allParameters(extensionMono)
+ val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
+
+ val extensionBody = (
+ rhs
.substituteSymbols(origTpeParams, extensionTpeParams)
- .substituteSymbols(vparamss.flatten map (_.symbol), allParams(extensionMono).tail)
- .substituteThis(currentOwner, thisParamRef)
- .changeOwner((origMeth, extensionMeth))
- extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) }
- val extensionCallPrefix = Apply(
- gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)),
- List(This(currentOwner)))
- val extensionCall = atOwner(origMeth) {
- localTyper.typedPos(rhs.pos) {
- gen.mkForwarder(extensionCallPrefix, mmap(vparamss)(_.symbol))
- }
- }
- deriveDefDef(tree)(_ => extensionCall)
+ .substituteSymbols(origParams, extensionParams)
+ .substituteThis(origThis, extensionThis)
+ .changeOwner(origMeth -> extensionMeth)
+ )
+
+ // Record the extension method ( FIXME: because... ? )
+ extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody))
+
+ // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
+ // which leaves the actual argument application for extensionCall.
+ val sel = Select(gen.mkAttributedRef(companion), extensionMeth)
+ val targs = origTpeParams map (_.tpeHK)
+ val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil)
+
+ // Apply all the argument lists.
+ deriveDefDef(tree)(_ =>
+ atOwner(origMeth)(
+ localTyper.typedPos(rhs.pos)(
+ gen.mkForwarder(callPrefix, mmap(vparamss)(_.symbol))
+ )
+ )
+ )
case _ =>
super.transform(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index f338e390bb..965063a724 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -231,7 +231,17 @@ abstract class UnCurry extends InfoTransform
* If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
*
*/
- def transformFunction(fun: Function): Tree =
+ def transformFunction(fun: Function): Tree = {
+ fun.tpe match {
+ // can happen when analyzer plugins assign refined types to functions, e.g.
+ // (() => Int) { def apply(): Int @typeConstraint }
+ case RefinedType(List(funTp), decls) =>
+ debuglog(s"eliminate refinement from function type ${fun.tpe}")
+ fun.tpe = funTp
+ case _ =>
+ ()
+ }
+
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
@@ -239,10 +249,7 @@ abstract class UnCurry extends InfoTransform
// only get here when running under -Xoldpatmat
synthPartialFunction(fun)
case _ =>
- val parents = (
- if (isFunctionType(fun.tpe)) addSerializable(abstractFunctionForFunctionType(fun.tpe))
- else addSerializable(ObjectClass.tpe, fun.tpe)
- )
+ val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
@@ -275,6 +282,7 @@ abstract class UnCurry extends InfoTransform
}
}
+ }
/** Transform a function node (x => body) of type PartialFunction[T, R] where
* body = expr match { case P_i if G_i => E_i }_i=1..n
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 78175f393a..b50486306d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -25,6 +25,7 @@ trait Analyzer extends AnyRef
with TypeDiagnostics
with ContextErrors
with StdAttachments
+ with AnalyzerPlugins
{
val global : Global
import global._
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
new file mode 100644
index 0000000000..28f620dbb5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -0,0 +1,225 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package typechecker
+
+/**
+ * @author Lukas Rytz
+ * @version 1.0
+ */
+trait AnalyzerPlugins { self: Analyzer =>
+ import global._
+
+
+ trait AnalyzerPlugin {
+ /**
+ * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+ *
+ * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+ * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+ * at the phase in which their symbol is created. Observations show that this can even be the
+ * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+ * need to be active also in phases other than namer and typer.
+ *
+ * Typically, this method can be implemented as
+ *
+ * global.phase.id < global.currentRun.picklerPhase.id
+ */
+ def isActive(): Boolean = true
+
+ /**
+ * Let analyzer plugins change the expected type before type checking a tree.
+ */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+
+ /**
+ * Let analyzer plugins modify the type that has been computed for a tree.
+ *
+ * @param tpe The type inferred by the type checker, initially (for first plugin) `tree.tpe`
+ * @param typer The yper that type checked `tree`
+ * @param tree The type-checked tree
+ * @param mode Mode that was used for typing `tree`
+ * @param pt Expected type that was used for typing `tree`
+ */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+
+ /**
+ * Let analyzer plugins change the types assigned to definitions. For definitions that have
+ * an annotated type, the assigned type is obtained by typing that type tree. Otherwise, the
+ * type is inferred by typing the definition's righthand side.
+ *
+ * In order to know if the type was inferred, you can query the `wasEmpty` field in the `tpt`
+ * TypeTree of the definition (for DefDef and ValDef).
+ *
+ * (*) If the type of a method or value is inferred, the type-checked tree is stored in the
+ * `analyzer.transformed` hash map, indexed by the definition's rhs tree.
+ *
+ * NOTE: Invoking the type checker can lead to cyclic reference errors. For instance, if this
+ * method is called from the type completer of a recursive method, type checking the mehtod
+ * rhs will invoke the same completer again. It might be possible to avoid this situation by
+ * assigning `tpe` to `defTree.symbol` (untested) - the final type computed by this method
+ * will then be assigned to the definition's symbol by monoTypeCompleter (in Namers).
+ *
+ * The hooks into `typeSig` allow analyzer plugins to add annotations to (or change the types
+ * of) definition symbols. This cannot not be achieved by using `pluginsTyped`: this method
+ * is only called during type checking, so changing the type of a symbol at this point is too
+ * late: references to the symbol might already be typed and therefore obtain the the original
+ * type assigned during naming.
+ *
+ * @param defTree is the definition for which the type was computed. The different cases are
+ * outlined below. Note that this type is untyped (for methods and values with inferred type,
+ * the typed rhs trees are available in analyzer.transformed).
+ *
+ * Case defTree: Template
+ * - tpe : A ClassInfoType for the template
+ * - typer: The typer for template members, i.e. expressions and definitions of defTree.body
+ * - pt : WildcardType
+ * - the class symbol is accessible through typer.context.owner
+ *
+ * Case defTree: ClassDef
+ * - tpe : A ClassInfoType, or a PolyType(params, ClassInfoType) for polymorphic classes.
+ * The class type is the one computed by templateSig, i.e. through the above case
+ * - typer: The typer for the class. Note that this typer has a different context than the
+ * typer for the template.
+ * - pt : WildcardType
+ *
+ * Case defTree: ModuleDef
+ * - tpe : A ClassInfoType computed by templateSig
+ * - typer: The typer for the module. context.owner of this typer is the module class symbol
+ * - pt : WildcardType
+ *
+ * Case defTree: DefDef
+ * - tpe : The type of the method (MethodType, PolyType or NullaryMethodType). (*)
+ * - typer: The typer the rhs of this method
+ * - pt : If tpt.isEmpty, either the result type from the overridden method, or WildcardType.
+ * Otherwise the type obtained from typing tpt.
+ * - Note that for constructors, pt is the class type which the constructor creates. To type
+ * check the rhs of the constructor however, the expected type has to be WildcardType (see
+ * Typers.typedDefDef)
+ *
+ * Case defTree: ValDef
+ * - tpe : The type of this value. (*)
+ * - typer: The typer for the rhs of this value
+ * - pt : If tpt.isEmpty, WildcardType. Otherwise the type obtained from typing tpt.
+ * - Note that pluginsTypeSig might be called multiple times for the same ValDef since it is
+ * used to compute the types of the accessor methods (see `pluginsTypeSigAccessor`)
+ *
+ * Case defTree: TypeDef
+ * - tpe : The type obtained from typing rhs (PolyType if the TypeDef defines a polymorphic type)
+ * - typer: The typer for the rhs of this type
+ * - pt : WildcardType
+ */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = tpe
+
+ /**
+ * Modify the types of field accessors. The namer phase creates method types for getters and
+ * setters based on the type of the corresponding field.
+ *
+ * Note: in order to compute the method type of an accessor, the namer calls `typeSig` on the
+ * `ValDef` tree of the corresponding field. This implies that the `pluginsTypeSig` method
+ * is potentially called multiple times for the same ValDef tree.
+ *
+ * @param tpe The method type created by the namer for the accessor
+ * @param typer The typer for the ValDef (not for the rhs)
+ * @param tree The ValDef corresponding to the accessor
+ * @param sym The accessor method symbol (getter, setter, beanGetter or beanSetter)
+ */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = tpe
+
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adapting, it should return the tree unchanged.
+ */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+
+ /**
+ * Modify the type of a return expression. By default, return expressions have type
+ * NothingClass.tpe.
+ *
+ * @param tpe The type of the return expression
+ * @param typer The typer that was used for typing the return tree
+ * @param tree The typed return expression tree
+ * @param pt The return type of the enclosing method
+ */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
+ }
+
+
+
+ /** A list of registered analyzer plugins */
+ private var analyzerPlugins: List[AnalyzerPlugin] = Nil
+
+ /** Registers a new analyzer plugin */
+ def addAnalyzerPlugin(plugin: AnalyzerPlugin) {
+ if (!analyzerPlugins.contains(plugin))
+ analyzerPlugins = plugin :: analyzerPlugins
+ }
+
+
+ /** @see AnalyzerPlugin.pluginsPt */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ if (analyzerPlugins.isEmpty) pt
+ else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
+ if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+
+ /** @see AnalyzerPlugin.pluginsTyped */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTpe = addAnnotations(tree, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersTpe
+ else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypeSig */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+
+ /** @see AnalyzerPlugin.pluginsTypeSigAccessor */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+
+ /** @see AnalyzerPlugin.canAdaptAnnotations */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
+ annotCheckersExists || {
+ if (analyzerPlugins.isEmpty) false
+ else analyzerPlugins.exists(plugin =>
+ plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
+ }
+ }
+
+ /** @see AnalyzerPlugin.adaptAnnotations */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
+ if (analyzerPlugins.isEmpty) annotCheckersTree
+ else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
+ if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypedReturn */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersType
+ else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 2e5d61cc6b..fbf23968f0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -760,10 +760,14 @@ trait ContextErrors {
else " of " + expanded.getClass
))
- def MacroImplementationNotFoundError(expandee: Tree) =
- macroExpansionError(expandee,
+ def MacroImplementationNotFoundError(expandee: Tree) = {
+ val message =
"macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)")
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
+ (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ else "")
+ macroExpansionError(expandee, message)
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index af2aeefecd..620665126e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -35,7 +35,7 @@ trait Contexts { self: Analyzer =>
val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
- private val startContext = {
+ private lazy val startContext = {
NoContext.make(
Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
rootMirror.RootClass,
@@ -342,6 +342,16 @@ trait Contexts { self: Analyzer =>
c
}
+ /**
+ * A context for typing constructor parameter ValDefs, super or self invocation arguments and default getters
+ * of constructors. These expressions need to be type checked in a scope outside the class, cf. spec 5.3.1.
+ *
+ * This method is called by namer / typer where `this` is the context for the constructor DefDef. The
+ * owner of the resulting (new) context is the outer context for the Template, i.e. the context for the
+ * ClassDef. This means that class type parameters will be in scope. The value parameters of the current
+ * constructor are also entered into the new constructor scope. Members of the class however will not be
+ * accessible.
+ */
def makeConstructorContext = {
var baseContext = enclClass.outer
while (baseContext.tree.isInstanceOf[Template])
@@ -361,6 +371,8 @@ trait Contexts { self: Analyzer =>
enterLocalElems(c.scope.elems)
}
}
+ // Enter the scope elements of this (the scope for the constructor DefDef) into the new constructor scope.
+ // Concretely, this will enter the value parameters of constructor.
enterElems(this)
argContext
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 581f9f3bfa..74078a4ed3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -257,8 +257,8 @@ trait Infer extends Checkable {
tp1 // @MAT aliases already handled by subtyping
}
- private val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
- private val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
+ private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
+ private lazy val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
/** The context-dependent inferencer part */
class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
@@ -411,8 +411,19 @@ trait Infer extends Checkable {
/** Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
+ *
* [Martin] I think Infer is also created by Erasure, with the default
* implementation of isCoercible
+ * [Paulp] (Assuming the above must refer to my comment on isCoercible)
+ * Nope, I examined every occurrence of Inferencer in trunk. It
+ * appears twice as a self-type, once at its definition, and once
+ * where it is instantiated in Typers. There are no others.
+ *
+ % ack -A0 -B0 --no-filename '\bInferencer\b' src
+ self: Inferencer =>
+ self: Inferencer =>
+ class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
+ val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 0ba76643ca..245656e2d7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -684,6 +684,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* the expandee with an error marker set if there has been an error
*/
def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
+ if (settings.Ymacronoexpand.value) return expandee // SI-6812
val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index acc4f7ff67..99557d1527 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -235,8 +235,8 @@ trait MethodSynthesis {
context.unit.synthetics get meth match {
case Some(mdef) =>
context.unit.synthetics -= meth
- meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, false)
- cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, true)
+ meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, keepClean = false)
+ cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, keepClean = true)
List(cd, mdef)
case _ =>
// Shouldn't happen, but let's give ourselves a reasonable error when it does
@@ -329,6 +329,7 @@ trait MethodSynthesis {
*/
def category: Symbol
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
final def fieldSelection = Select(This(enclClass), basisSym)
final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
@@ -389,7 +390,7 @@ trait MethodSynthesis {
result
}
def derivedTree: DefDef =
- factoryMeth(mods & flagsMask | flagsExtra, name, tree, symbolic = false)
+ factoryMeth(mods & flagsMask | flagsExtra, name, tree)
def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC
def flagsMask: Long = AccessFlags
def name: TermName = tree.name.toTermName
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 98b6264051..c728185d4e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -65,7 +65,18 @@ trait Namers extends MethodSynthesis {
case ModuleDef(_, _, _) => tree.symbol.moduleClass
case _ => tree.symbol
}
- newNamer(context.makeNewScope(tree, sym))
+ def isConstrParam(vd: ValDef) = {
+ (sym hasFlag PARAM | PRESUPER) &&
+ !vd.mods.isJavaDefined &&
+ sym.owner.isConstructor
+ }
+ val ownerCtx = tree match {
+ case vd: ValDef if isConstrParam(vd) =>
+ context.makeConstructorContext
+ case _ =>
+ context
+ }
+ newNamer(ownerCtx.makeNewScope(tree, sym))
}
def createInnerNamer() = {
newNamer(context.make(context.tree, owner, newScope))
@@ -423,6 +434,7 @@ trait Namers extends MethodSynthesis {
def enterSyms(trees: List[Tree]): Namer = {
trees.foldLeft(this: Namer) { (namer, t) =>
val ctx = namer enterSym t
+ // for Import trees, enterSym returns a changed context, so we need a new namer
if (ctx eq namer.context) namer
else newNamer(ctx)
}
@@ -521,20 +533,19 @@ trait Namers extends MethodSynthesis {
noDuplicates(selectors map (_.rename), AppearsTwice)
}
- def enterCopyMethod(copyDefDef: Tree, tparams: List[TypeDef]): Symbol = {
- val sym = copyDefDef.symbol
- val lazyType = completerOf(copyDefDef, tparams)
+ def enterCopyMethod(copyDef: DefDef): Symbol = {
+ val sym = copyDef.symbol
+ val lazyType = completerOf(copyDef)
/** Assign the types of the class parameters to the parameters of the
* copy method. See comment in `Unapplies.caseClassCopyMeth` */
def assignParamTypes() {
val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
- val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
+ val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol))
val classParamss = constructorType.paramss
- val DefDef(_, _, _, copyParamss, _, _) = copyDefDef
- map2(copyParamss, classParamss)((copyParams, classParams) =>
+ map2(copyDef.vparamss, classParamss)((copyParams, classParams) =>
map2(copyParams, classParams)((copyP, classP) =>
copyP.tpt setType subst(classP.tpe)
)
@@ -542,24 +553,28 @@ trait Namers extends MethodSynthesis {
}
sym setInfo {
- mkTypeCompleter(copyDefDef) { sym =>
+ mkTypeCompleter(copyDef) { sym =>
assignParamTypes()
lazyType complete sym
}
}
}
- def completerOf(tree: Tree): TypeCompleter = completerOf(tree, treeInfo.typeParameters(tree))
- def completerOf(tree: Tree, tparams: List[TypeDef]): TypeCompleter = {
+
+ def completerOf(tree: Tree): TypeCompleter = {
val mono = namerOf(tree.symbol) monoTypeCompleter tree
+ val tparams = treeInfo.typeParameters(tree)
if (tparams.isEmpty) mono
else {
- //@M! TypeDef's type params are handled differently
- //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
- //@M x is only in scope in `A[x <: B]'
+ /* @M! TypeDef's type params are handled differently, e.g., in `type T[A[x <: B], B]`, A and B are entered
+ * first as both are in scope in the definition of x. x is only in scope in `A[x <: B]`.
+ * No symbols are created for the abstract type's params at this point, i.e. the following assertion holds:
+ * !tree.symbol.isAbstractType || { tparams.forall(_.symbol == NoSymbol)
+ * (tested with the above example, `trait C { type T[A[X <: B], B] }`). See also comment in PolyTypeCompleter.
+ */
if (!tree.symbol.isAbstractType) //@M TODO: change to isTypeMember ?
createNamer(tree) enterSyms tparams
- new PolyTypeCompleter(tparams, mono, tree, context) //@M
+ new PolyTypeCompleter(tparams, mono, context) //@M
}
}
@@ -621,9 +636,9 @@ trait Namers extends MethodSynthesis {
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
if (name == nme.copy && sym.isSynthetic)
- enterCopyMethod(tree, tparams)
+ enterCopyMethod(tree)
else
- sym setInfo completerOf(tree, tparams)
+ sym setInfo completerOf(tree)
}
def enterClassDef(tree: ClassDef) {
@@ -736,13 +751,13 @@ trait Namers extends MethodSynthesis {
}
}
- def accessorTypeCompleter(tree: ValDef, isSetter: Boolean = false) = mkTypeCompleter(tree) { sym =>
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
+ def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
logAndValidate(sym) {
sym setInfo {
- if (isSetter)
- MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
- else
- NullaryMethodType(typeSig(tree))
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ else NullaryMethodType(typeSig(tree))
+ pluginsTypeSigAccessor(tp, typer, tree, sym)
}
}
}
@@ -805,17 +820,12 @@ trait Namers extends MethodSynthesis {
* assigns the type to the tpt's node. Returns the type.
*/
private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
- // compute result type from rhs
- val typedBody =
+ val rhsTpe =
if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
else defnTyper.computeType(tree.rhs, pt)
- val typedDefn = widenIfNecessary(tree.symbol, typedBody, pt)
- assignTypeToTree(tree, typedDefn)
- }
-
- private def assignTypeToTree(tree: ValOrDefDef, tpe: Type): Type = {
- tree.tpt defineType tpe setPos tree.pos.focus
+ val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
+ tree.tpt defineType defnTpe setPos tree.pos.focus
tree.tpt.tpe
}
@@ -892,163 +902,257 @@ trait Namers extends MethodSynthesis {
for (cda <- module.attachments.get[ConstructorDefaultsAttachment]) {
cda.companionModuleClassNamer = templateNamer
}
- ClassInfoType(parents, decls, clazz)
+ val classTp = ClassInfoType(parents, decls, clazz)
+ pluginsTypeSig(classTp, templateNamer.typer, templ, WildcardType)
}
- private def classSig(tparams: List[TypeDef], impl: Template): Type = {
+ private def classSig(cdef: ClassDef): Type = {
+ val clazz = cdef.symbol
+ val ClassDef(_, _, tparams, impl) = cdef
val tparams0 = typer.reenterTypeParams(tparams)
val resultType = templateSig(impl)
- GenPolyType(tparams0, resultType)
+ val res = GenPolyType(tparams0, resultType)
+ val pluginsTp = pluginsTypeSig(res, typer, cdef, WildcardType)
+
+ // Already assign the type to the class symbol (monoTypeCompleter will do it again).
+ // Allows isDerivedValueClass to look at the info.
+ clazz setInfo pluginsTp
+ if (clazz.isDerivedValueClass) {
+ log("Ensuring companion for derived value class " + cdef.name + " at " + cdef.pos.show)
+ clazz setFlag FINAL
+ // Don't force the owner's info lest we create cycles as in SI-6357.
+ enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
+ }
+ pluginsTp
}
- private def methodSig(ddef: DefDef, mods: Modifiers, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): Type = {
- val meth = owner
- val clazz = meth.owner
- // enters the skolemized version into scope, returns the deSkolemized symbols
- val tparamSyms = typer.reenterTypeParams(tparams)
- // since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams
- var vparamSymss = enterValueParams(vparamss)
+ private def moduleSig(mdef: ModuleDef): Type = {
+ val moduleSym = mdef.symbol
+ // The info of both the module and the moduleClass symbols need to be assigned. monoTypeCompleter assigns
+ // the result of typeSig to the module symbol. The module class info is assigned here as a side-effect.
+ val result = templateSig(mdef.impl)
+ val pluginsTp = pluginsTypeSig(result, typer, mdef, WildcardType)
+ // Assign the moduleClass info (templateSig returns a ClassInfoType)
+ val clazz = moduleSym.moduleClass
+ clazz setInfo pluginsTp
+ // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
+ clazz.tpe
+ }
+
+ /**
+ * The method type for `ddef`.
+ *
+ * If a PolyType(tparams, restp) is returned, `tparams` are the external symbols (not type skolems),
+ * i.e. instances of AbstractTypeSymbol. All references in `restp` to the type parameters are TypeRefs
+ * to these non-skolems.
+ *
+ * For type-checking the rhs (in case the result type is inferred), the type skolems of the type parameters
+ * are entered in scope. Equally, the parameter symbols entered into scope have types which refer to those
+ * skolems: when type-checking the rhs, references to parameters need to have types that refer to the skolems.
+ * In summary, typing an rhs happens with respect to the skolems.
+ *
+ * This means that the method's result type computed by the typer refers to skolems. In order to put it
+ * into the method type (the result of methodSig), typeRefs to skolems have to be replaced by references
+ * to the non-skolems.
+ */
+ private def methodSig(ddef: DefDef): Type = {
// DEPMETTODO: do we need to skolemize value parameter symbols?
- if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
- tpt setPos meth.pos.focus
- }
- var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
- val site = clazz.thisType
- /** Called for all value parameter lists, right to left
- * @param vparams the symbols of one parameter list
- * @param restpe the result type (possibly a MethodType)
+ val DefDef(_, _, tparams, vparamss, tpt, _) = ddef
+
+ val meth = owner
+ val methOwner = meth.owner
+ val site = methOwner.thisType
+
+ /* tparams already have symbols (created in enterDefDef/completerOf), namely the skolemized ones (created
+ * by the PolyTypeCompleter constructor, and assigned to tparams). reenterTypeParams enters the type skolems
+ * into scope and returns the non-skolems.
*/
- def makeMethodType(vparams: List[Symbol], restpe: Type) = {
- // TODODEPMET: check that we actually don't need to do anything here
- // new dependent method types: probably OK already, since 'enterValueParams' above
- // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
- // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
- // so re-use / adapt that)
- if (owner.isJavaDefined)
- // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
- JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
- else
- MethodType(vparams, restpe)
- }
+ val tparamSyms = typer.reenterTypeParams(tparams)
+
+ val tparamSkolems = tparams.map(_.symbol)
+
+ /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems
+ * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter
+ * types from the overridden method).
+ */
+ var vparamSymss = enterValueParams(vparamss)
+
+ /**
+ * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
+ * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
+ * so the resulting type is a valid external method type, it does not contain (references to) skolems.
+ */
def thisMethodType(restpe: Type) = {
val checkDependencies = new DependentTypeChecker(context)(this)
checkDependencies check vparamSymss
// DEPMETTODO: check not needed when they become on by default
checkDependencies(restpe)
- GenPolyType(
+ val makeMethodType = (vparams: List[Symbol], restpe: Type) => {
+ // TODODEPMET: check that we actually don't need to do anything here
+ // new dependent method types: probably OK already, since 'enterValueParams' above
+ // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
+ // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
+ // so re-use / adapt that)
+ if (meth.isJavaDefined)
+ // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
+ JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
+ else
+ MethodType(vparams, restpe)
+ }
+
+
+ val res = GenPolyType(
tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
if (vparamSymss.isEmpty) NullaryMethodType(restpe)
// vparamss refer (if they do) to skolemized tparams
else (vparamSymss :\ restpe) (makeMethodType)
)
+ res.substSym(tparamSkolems, tparamSyms)
}
- def transformedResult =
- thisMethodType(resultPt).substSym(tparams map (_.symbol), tparamSyms)
+ /**
+ * Creates a schematic method type which has WildcardTypes for non specified
+ * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
+ * type schema is
+ *
+ * PolyType(T, MethodType(List(a: T, b: WildcardType), WildcardType))
+ *
+ * where T are non-skolems.
+ */
+ def methodTypeSchema(resTp: Type) = {
+ // for all params without type set WildcaradType
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ thisMethodType(resTp)
+ }
- // luc: added .substSym from skolemized to deSkolemized
- // site.memberType(sym): PolyType(tparams, MethodType(..., ...))
- // ==> all references to tparams are deSkolemized
- // thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized.
- // ==> the two didn't match
- //
- // for instance, B.foo would not override A.foo, and the default on parameter b would not be inherited
- // class A { def foo[T](a: T)(b: T = a) = a }
- // class B extends A { override def foo[U](a: U)(b: U) = b }
- def overriddenSymbol =
- intersectionType(clazz.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches transformedResult)
+ def overriddenSymbol(resTp: Type) = {
+ intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
+ sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
}
- // TODO: see whether this or something similar would work instead.
- //
+ }
+ // TODO: see whether this or something similar would work instead:
// def overriddenSymbol = meth.nextOverriddenSymbol
- // fill in result type and parameter types from overridden symbol if there is a unique one.
- if (clazz.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
- // try to complete from matching definition in base type
- mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
- val overridden = overriddenSymbol
- if (overridden != NoSymbol && !overridden.isOverloaded) {
- overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
- resultPt = site.memberType(overridden) match {
- case PolyType(tparams, rt) => rt.substSym(tparams, tparamSyms)
- case mt => mt
- }
+ /**
+ * If `meth` doesn't have an explicit return type, extracts the return type from the method
+ * overridden by `meth` (if there's an unique one). This type is lateron used as the expected
+ * type for computing the type of the rhs. The resulting type references type skolems for
+ * type parameters (consistent with the result of `typer.typedType(tpt).tpe`).
+ *
+ * As a first side effect, this method assigns a MethodType constructed using this
+ * return type to `meth`. This allows omitting the result type for recursive methods.
+ *
+ * As another side effect, this method also assigns paramter types from the overridden
+ * method to parameters of `meth` that have missing types (the parser accepts missing
+ * parameter types under -Yinfer-argument-types).
+ */
+ def typesFromOverridden(methResTp: Type): Type = {
+ val overridden = overriddenSymbol(methResTp)
+ if (overridden == NoSymbol || overridden.isOverloaded) {
+ methResTp
+ } else {
+ overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
+ var overriddenTp = site.memberType(overridden) match {
+ case PolyType(tparams, rt) => rt.substSym(tparams, tparamSkolems)
+ case mt => mt
+ }
for (vparams <- vparamss) {
- var pps = resultPt.params
+ var overriddenParams = overriddenTp.params
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- val paramtpe = pps.head.tpe
- vparam.symbol setInfo paramtpe
- vparam.tpt defineType paramtpe setPos vparam.pos.focus
+ val overriddenParamTp = overriddenParams.head.tpe
+ // references to type parameteres in overriddenParamTp link to the type skolems, so the
+ // assigned type is consistent with the other / existing parameter types in vparamSymss.
+ vparam.symbol setInfo overriddenParamTp
+ vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
}
- pps = pps.tail
+ overriddenParams = overriddenParams.tail
}
- resultPt = resultPt.resultType
+ overriddenTp = overriddenTp.resultType
}
- resultPt match {
- case NullaryMethodType(rtpe) => resultPt = rtpe
- case MethodType(List(), rtpe) => resultPt = rtpe
+
+ overriddenTp match {
+ case NullaryMethodType(rtpe) => overriddenTp = rtpe
+ case MethodType(List(), rtpe) => overriddenTp = rtpe
case _ =>
}
+
if (tpt.isEmpty) {
// provisionally assign `meth` a method type with inherited result type
// that way, we can leave out the result type even if method is recursive.
- meth setInfo thisMethodType(resultPt)
+ meth setInfo thisMethodType(overriddenTp)
+ overriddenTp
+ } else {
+ methResTp
}
}
}
- // Add a () parameter section if this overrides some method with () parameters.
- if (clazz.isClass && vparamss.isEmpty && overriddenSymbol.alternatives.exists(
- _.info.isInstanceOf[MethodType])) {
+
+ if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
+ tpt defineType context.enclClass.owner.tpe
+ tpt setPos meth.pos.focus
+ }
+
+ val methResTp = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
+ val resTpFromOverride = if (methOwner.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
+ typesFromOverridden(methResTp)
+ } else {
+ methResTp
+ }
+
+ // Add a () parameter section if this overrides some method with () parameters
+ if (methOwner.isClass && vparamss.isEmpty &&
+ overriddenSymbol(methResTp).alternatives.exists(_.info.isInstanceOf[MethodType])) {
vparamSymss = ListOfNil
}
+
+ // issue an error for missing parameter types
mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
MissingParameterOrValTypeError(vparam)
vparam.tpt defineType ErrorType
}
}
- addDefaultGetters(meth, vparamss, tparams, overriddenSymbol)
+
+ addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
// (either "macro ???" as they used to or just "???" to maximally simplify their compilation)
- if (fastTrack contains ddef.symbol) ddef.symbol setFlag MACRO
+ if (fastTrack contains meth) meth setFlag MACRO
// macro defs need to be typechecked in advance
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (ddef.symbol.isTermMacro) {
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- typer.computeMacroDefType(ddef, pt)
+ if (meth.isTermMacro) {
+ typer.computeMacroDefType(ddef, resTpFromOverride)
}
- thisMethodType({
+ val res = thisMethodType({
val rt = (
if (!tpt.isEmpty) {
- typer.typedType(tpt).tpe
+ methResTp
} else {
- // replace deSkolemized symbols with skolemized ones
- // (for resultPt computed by looking at overridden symbol, right?)
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- assignTypeToTree(ddef, typer, pt)
- }
- )
+ // return type is inferred, we don't just use resTpFromOverride. Here, C.f has type String:
+ // trait T { def f: Object }; class C <: T { def f = "" }
+ // using resTpFromOverride as expected type allows for the following (C.f has type A):
+ // trait T { def f: A }; class C <: T { implicit def b2a(t: B): A = ???; def f = new B }
+ assignTypeToTree(ddef, typer, resTpFromOverride)
+ })
// #2382: return type of default getters are always @uncheckedVariance
if (meth.hasDefault)
rt.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List()))
else rt
})
+ pluginsTypeSig(res, typer, ddef, methResTp)
}
/**
@@ -1060,9 +1164,9 @@ trait Namers extends MethodSynthesis {
* flag.
*/
private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
- val clazz = meth.owner
+ val methOwner = meth.owner
val isConstr = meth.isConstructor
- val overridden = if (isConstr || !clazz.isClass) NoSymbol else overriddenSymbol
+ val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
val overrides = overridden != NoSymbol && !overridden.isOverloaded
// value parameters of the base class (whose defaults might be overridden)
var baseParamss = (vparamss, overridden.tpe.paramss) match {
@@ -1112,7 +1216,7 @@ trait Namers extends MethodSynthesis {
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = companionSymbolOf(clazz, context)
+ val module = companionSymbolOf(methOwner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
module.attachments.get[ConstructorDefaultsAttachment] match {
@@ -1158,7 +1262,7 @@ trait Namers extends MethodSynthesis {
name, deftParams, defvParamss, defTpt, defRhs)
}
if (!isConstr)
- clazz.resetFlag(INTERFACE) // there's a concrete member now
+ methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
if (forInteractive && default.owner.isTerm) {
// save the default getters as attachments in the method symbol. if compiling the
@@ -1183,15 +1287,31 @@ trait Namers extends MethodSynthesis {
}
}
+ private def valDefSig(vdef: ValDef) = {
+ val ValDef(_, _, tpt, rhs) = vdef
+ val result = if (tpt.isEmpty) {
+ if (rhs.isEmpty) {
+ MissingParameterOrValTypeError(tpt)
+ ErrorType
+ }
+ else assignTypeToTree(vdef, typer, WildcardType)
+ } else {
+ typer.typedType(tpt).tpe
+ }
+ pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result)
+
+ }
+
//@M! an abstract type definition (abstract type member/type parameter)
// may take type parameters, which are in scope in its bounds
- private def typeDefSig(tpsym: Symbol, tparams: List[TypeDef], rhs: Tree) = {
+ private def typeDefSig(tdef: TypeDef) = {
+ val TypeDef(_, _, tparams, rhs) = tdef
// log("typeDefSig(" + tpsym + ", " + tparams + ")")
val tparamSyms = typer.reenterTypeParams(tparams) //@M make tparams available in scope (just for this abstypedef)
val tp = typer.typedType(rhs).tpe match {
case TypeBounds(lt, rt) if (lt.isError || rt.isError) =>
TypeBounds.empty
- case tp @ TypeBounds(lt, rt) if (tpsym hasFlag JAVA) =>
+ case tp @ TypeBounds(lt, rt) if (tdef.symbol hasFlag JAVA) =>
TypeBounds(lt, objToAny(rt))
case tp =>
tp
@@ -1213,9 +1333,32 @@ trait Namers extends MethodSynthesis {
// However, separate compilation requires the symbol info to be
// loaded to do this check, but loading the info will probably
// lead to spurious cyclic errors. So omit the check.
- GenPolyType(tparamSyms, tp)
+ val res = GenPolyType(tparamSyms, tp)
+ pluginsTypeSig(res, typer, tdef, WildcardType)
}
+ private def importSig(imp: Import) = {
+ val Import(expr, selectors) = imp
+ val expr1 = typer.typedQualifier(expr)
+ typer checkStable expr1
+ if (expr1.symbol != null && expr1.symbol.isRootPackage)
+ RootImportError(imp)
+
+ if (expr1.isErrorTyped)
+ ErrorType
+ else {
+ val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
+ checkSelectors(newImport)
+ transformed(imp) = newImport
+ // copy symbol and type attributes back into old expression
+ // so that the structure builder will find it.
+ expr.symbol = expr1.symbol
+ expr.tpe = expr1.tpe
+ ImportType(expr1)
+ }
+ }
+
+
/** Given a case class
* case class C[Ts] (ps: Us)
* Add the following methods to toScope:
@@ -1239,6 +1382,11 @@ trait Namers extends MethodSynthesis {
caseClassCopyMeth(cdef) foreach namer.enterSyntheticSym
}
+ /**
+ * TypeSig is invoked by monoTypeCompleters. It returns the type of a definition which
+ * is then assigned to the corresponding symbol (typeSig itself does not need to assign
+ * the type to the symbol, but it can if necessary).
+ */
def typeSig(tree: Tree): Type = {
// log("typeSig " + tree)
/** For definitions, transform Annotation trees to AnnotationInfos, assign
@@ -1271,84 +1419,33 @@ trait Namers extends MethodSynthesis {
}
val sym: Symbol = tree.symbol
- // @Lukas: I am not sure this is the right way to do things.
- // We used to only decorate the module class with annotations, which is
- // clearly wrong. Now we decorate both the class and the object.
- // But maybe some annotations are only meant for one of these but not for the other?
- //
- // TODO: meta-annotations to indicate class vs. object.
+
+ // TODO: meta-annotations to indicate where module annotations should go (module vs moduleClass)
annotate(sym)
if (sym.isModule) annotate(sym.moduleClass)
def getSig = tree match {
- case cdef @ ClassDef(_, name, tparams, impl) =>
- val clazz = tree.symbol
- val result = createNamer(tree).classSig(tparams, impl)
- clazz setInfo result
- if (clazz.isDerivedValueClass) {
- log("Ensuring companion for derived value class " + name + " at " + cdef.pos.show)
- clazz setFlag FINAL
- // Don't force the owner's info lest we create cycles as in SI-6357.
- enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
- }
- result
-
- case ModuleDef(_, _, impl) =>
- val clazz = sym.moduleClass
- clazz setInfo createNamer(tree).templateSig(impl)
- clazz.tpe
-
- case ddef @ DefDef(mods, _, tparams, vparamss, tpt, rhs) =>
- // TODO: cleanup parameter list
- createNamer(tree).methodSig(ddef, mods, tparams, vparamss, tpt, rhs)
-
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val isBeforeSupercall = (
- (sym hasFlag PARAM | PRESUPER)
- && !mods.isJavaDefined
- && sym.owner.isConstructor
- )
- val typer1 = typer.constrTyperIf(isBeforeSupercall)
- if (tpt.isEmpty) {
- if (rhs.isEmpty) {
- MissingParameterOrValTypeError(tpt)
- ErrorType
- }
- else assignTypeToTree(vdef, newTyper(typer1.context.make(vdef, sym)), WildcardType)
- }
- else typer1.typedType(tpt).tpe
-
- case TypeDef(_, _, tparams, rhs) =>
- createNamer(tree).typeDefSig(sym, tparams, rhs) //@M!
-
- case Import(expr, selectors) =>
- val expr1 = typer.typedQualifier(expr)
- typer checkStable expr1
- if (expr1.symbol != null && expr1.symbol.isRootPackage)
- RootImportError(tree)
-
- if (expr1.isErrorTyped)
- ErrorType
- else {
- val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
- checkSelectors(newImport)
- transformed(tree) = newImport
- // copy symbol and type attributes back into old expression
- // so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
- ImportType(expr1)
- }
- }
+ case cdef: ClassDef =>
+ createNamer(tree).classSig(cdef)
+
+ case mdef: ModuleDef =>
+ createNamer(tree).moduleSig(mdef)
+
+ case ddef: DefDef =>
+ createNamer(tree).methodSig(ddef)
- val result =
- try getSig
- catch typeErrorHandler(tree, ErrorType)
+ case vdef: ValDef =>
+ createNamer(tree).valDefSig(vdef)
- result match {
- case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => deskolemizeTypeParams(tparams)(result)
- case _ => result
+ case tdef: TypeDef =>
+ createNamer(tree).typeDefSig(tdef) //@M!
+
+ case imp: Import =>
+ importSig(imp)
}
+
+ try getSig
+ catch typeErrorHandler(tree, ErrorType)
}
def includeParent(tpe: Type, parent: Symbol): Type = tpe match {
@@ -1508,14 +1605,25 @@ trait Namers extends MethodSynthesis {
}
}
- /** A class representing a lazy type with known type parameters.
+ /**
+ * A class representing a lazy type with known type parameters. `ctx` is the namer context in which the
+ * `owner` is defined.
+ *
+ * Constructing a PolyTypeCompleter for a DefDef creates type skolems for the type parameters and
+ * assigns them to the `tparams` trees.
*/
- class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, owner: Tree, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
- private val ownerSym = owner.symbol
- override val typeParams = tparams map (_.symbol) //@M
- override val tree = restp.tree
+ class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
+ // @M. If `owner` is an abstract type member, `typeParams` are all NoSymbol (see comment in `completerOf`),
+ // otherwise, the non-skolemized (external) type parameter symbols
+ override val typeParams = tparams map (_.symbol)
+
+ /* The definition tree (poly ClassDef, poly DefDef or HK TypeDef) */
+ override val tree = restp.tree
+
+ private val defnSym = tree.symbol
- if (ownerSym.isTerm) {
+ if (defnSym.isTerm) {
+ // for polymorphic DefDefs, create type skolems and assign them to the tparam trees.
val skolems = deriveFreshSkolems(tparams map (_.symbol))
map2(tparams, skolems)(_ setSymbol _)
}
@@ -1523,8 +1631,8 @@ trait Namers extends MethodSynthesis {
def completeImpl(sym: Symbol) = {
// @M an abstract type's type parameters are entered.
// TODO: change to isTypeMember ?
- if (ownerSym.isAbstractType)
- newNamerFor(ctx, owner) enterSyms tparams //@M
+ if (defnSym.isAbstractType)
+ newNamerFor(ctx, tree) enterSyms tparams //@M
restp complete sym
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index be218fcb02..2340c78f8c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -268,26 +268,32 @@ trait NamesDefaults { self: Analyzer =>
*
* For by-name parameters, create a value
* x$n: () => T = () => arg
+ *
+ * For Ident(<unapply-selector>) arguments, no ValDef is created (SI-3353).
*/
- def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
+ def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[Option[ValDef]] = {
val context = blockTyper.context
- val symPs = map2(args, paramTypes)((arg, tpe) => {
- val byName = isByNameParamType(tpe)
- val repeated = isScalaRepeatedParamType(tpe)
- val argTpe = (
- if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
- case _ => seqType(arg.tpe)
- }
- else arg.tpe
- ).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
- (context.scope.enter(s), byName, repeated)
+ val symPs = map2(args, paramTypes)((arg, tpe) => arg match {
+ case Ident(nme.SELECTOR_DUMMY) =>
+ None // don't create a local ValDef if the argument is <unapply-selector>
+ case _ =>
+ val byName = isByNameParamType(tpe)
+ val repeated = isScalaRepeatedParamType(tpe)
+ val argTpe = (
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
+ case _ => seqType(arg.tpe)
+ }
+ else arg.tpe
+ ).widen // have to widen or types inferred from literal defaults will be singletons
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
+ Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
- case ((sym, byName, repeated), arg) =>
+ case (None, _) => None
+ case (Some((sym, byName, repeated)), arg) =>
val body =
if (byName) {
val res = blockTyper.typed(Function(List(), arg))
@@ -303,7 +309,7 @@ trait NamesDefaults { self: Analyzer =>
blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
} else arg
}
- atPos(body.pos)(ValDef(sym, body).setType(NoType))
+ Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
}
}
@@ -329,27 +335,29 @@ trait NamesDefaults { self: Analyzer =>
// ValDef's in the block), change the arguments to these local values.
case Apply(expr, typedArgs) =>
// typedArgs: definition-site order
- val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, false, false)
+ val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos),
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
- val ref = gen.mkAttributedRef(vDef.symbol)
- atPos(vDef.pos.focus) {
- // for by-name parameters, the local value is a nullary function returning the argument
- tpe.typeSymbol match {
- case ByNameParamClass => Apply(ref, Nil)
- case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
- case _ => ref
+ val refArgs = map3(reorderArgs(valDefs, argPos), formals, typedArgs)((vDefOpt, tpe, origArg) => vDefOpt match {
+ case None => origArg
+ case Some(vDef) =>
+ val ref = gen.mkAttributedRef(vDef.symbol)
+ atPos(vDef.pos.focus) {
+ // for by-name parameters, the local value is a nullary function returning the argument
+ tpe.typeSymbol match {
+ case ByNameParamClass => Apply(ref, Nil)
+ case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
+ case _ => ref
+ }
}
- }
})
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt)
res.setPos(res.pos.makeTransparent)
- val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
+ val block = Block(stats ::: valDefs.flatten, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
context.namedApplyBlockInfo =
Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index b0745b4c09..4b53802d95 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -409,15 +409,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// example check: List[Int] <:< ::[Int]
// TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- val (typeTestTreeMaker, patBinderOrCasted) =
- if (needsTypeTest(patBinder.info.widen, extractor.paramType)) {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
- (List(treeMaker), treeMaker.nextBinder)
- } else {
+ // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
+ val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
+ if (patBinder.info.widen <:< extractor.paramType) {
// no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
// SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
// TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
@@ -426,10 +420,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
*/
- (Nil, patBinder setInfo extractor.paramType)
+ (Nil, patBinder setInfo extractor.paramType, false)
+ } else {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+
+ // check whether typetest implies patBinder is not null,
+ // even though the eventual null check will be on patBinderOrCasted
+ // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
+ (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
}
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*)
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
}
@@ -622,8 +627,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// to which type should the previous binder be casted?
def paramType : Type
- // binder has been casted to paramType if necessary
- def treeMaker(binder: Symbol, pos: Position): TreeMaker
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
@@ -637,6 +647,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case bp => bp
}
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
+ case (b, PatternBoundToUnderscore()) => b
+ }.toSet
+
def subPatTypes: List[Type] =
if(isSeq) {
val TypeRef(pre, SeqClass, args) = seqTp
@@ -731,17 +746,25 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
protected def rawSubPatTypes = constructorTp.paramTypes
- // binder has type paramType
- def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
val paramAccessors = binder.constrParamAccessors
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
val mutableBinders =
- if (paramAccessors exists (_.isMutable))
+ if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
+ (paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
// checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders)
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
@@ -763,11 +786,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def resultType = tpe.finalResultType
def isSeq = extractorCall.symbol.name == nme.unapplySeq
- def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted)
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
}
override protected def seqTree(binder: Symbol): Tree =
@@ -824,6 +857,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
object Bound {
def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
@@ -991,10 +1034,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
trait PreserveSubPatBinders extends TreeMaker {
val subPatBinders: List[Symbol]
val subPatRefs: List[Tree]
+ val ignoredSubPatBinders: Set[Symbol]
// unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
// mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
- def storedBinders: Set[Symbol] = if (debugInfoEmitVars) subPatBinders.toSet else Set.empty
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
def emitVars = storedBinders.nonEmpty
@@ -1015,10 +1065,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
import CODE._
- def bindSubPats(in: Tree): Tree = if (!emitVars) in
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
else {
- val (subPatBindersStored, subPatRefsStored) = stored.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new collection.mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
}
}
@@ -1038,7 +1100,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val subPatRefs: List[Tree],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
- val prevBinder: Symbol) extends FunTreeMaker with PreserveSubPatBinders {
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
@@ -1081,27 +1147,35 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
- val mutableBinders: List[Symbol]) extends FunTreeMaker with PreserveSubPatBinders {
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
import CODE._
val nextBinder = prevBinder // just passing through
// mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
- // (the implementation could be optimized by duplicating code from `super.storedBinders`, but this seems more elegant)
- override def storedBinders: Set[Symbol] = super.storedBinders ++ mutableBinders.toSet
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
- casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ val cond =
+ if (binderKnownNonNull) extraCond
+ else (extraCond map (nullCheck AND _)
+ orElse Some(nullCheck))
+
+ cond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
}
override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
}
- // typetag-based tests are inserted by the type checker
- def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt)
-
object TypeTestTreeMaker {
// factored out so that we can consistently generate other representations of the tree that implements the test
// (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
@@ -1115,12 +1189,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result
def eqTest(pat: Tree, testedBinder: Symbol): Result
def and(a: Result, b: Result): Result
+ def tru: Result
}
object treeCondStrategy extends TypeTestCondStrategy { import CODE._
type Result = Tree
def and(a: Result, b: Result): Result = a AND b
+ def tru = TRUE_typed
def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
@@ -1151,6 +1227,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
def eqTest(pat: Tree, testedBinder: Symbol): Result = false
def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
}
}
@@ -1220,10 +1309,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// I think it's okay:
// - the isInstanceOf test includes a test for the element type
// - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if (expectedTp <:< AnyRefClass.tpe) && !needsTypeTest(testedBinder.info.widen, expectedTp) =>
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
+ case _ if testedBinder.info.widen <:< expectedTp =>
+ // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
+ // since the types conform, no further checking is required
+ if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ // have to test outer and non-null only when it's a reference type
+ else if (expectedTp <:< AnyRefClass.tpe) {
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+ } else default
case _ => default
}
@@ -1235,6 +1330,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
def isPureTypeTest = renderCondition(pureTypeTestChecker)
+ def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
+
override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
}
@@ -1733,6 +1830,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ def tru = TrueCond
}
ttm.renderCondition(condStrategy)
case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
@@ -3694,11 +3792,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// nextBinder: T
// next == MatchMonad[U]
// returns MatchMonad[U]
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- VAL(nextBinder) === res,
- next
- ))
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest =
+ // only emit a local val for `nextBinder` if it's actually referenced in `next`
+ if (next.exists(_.symbol eq nextBinder))
+ BLOCK(
+ VAL(nextBinder) === res,
+ next
+ )
+ else next
+ ifThenElseZero(cond, rest)
+ }
// guardTree: Boolean
// next: MatchMonad[T]
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index fab53de0f0..b9fdd7280e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -11,6 +11,9 @@ import scala.collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.nsc.settings.AnyScalaVersion
+import scala.tools.nsc.settings.NoScalaVersion
/** <p>
* Post-attribution checking and transformation.
@@ -1369,10 +1372,18 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* indicating it has changed semantics between versions.
*/
private def checkMigration(sym: Symbol, pos: Position) = {
- if (sym.hasMigrationAnnotation)
- unit.warning(pos, "%s has changed semantics in version %s:\n%s".format(
- sym.fullLocationString, sym.migrationVersion.get, sym.migrationMessage.get)
- )
+ if (sym.hasMigrationAnnotation) {
+ val changed = try
+ settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
+ catch {
+ case e : NumberFormatException =>
+ unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
+ true
+ }
+ if (changed)
+ unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
+ }
}
private def checkCompileTimeOnly(sym: Symbol, pos: Position) = {
@@ -1581,7 +1592,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* arbitrarily choose one as more important than the other.
*/
checkDeprecated(sym, tree.pos)
- if (settings.Xmigration28.value)
+ if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
@@ -1680,8 +1691,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
- if (currentOwner.isDerivedValueClass)
- currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler!
if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index a68a084d8f..026c130a87 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -453,12 +453,12 @@ trait Typers extends Modes with Adaptations with Tags {
def reenterValueParams(vparamss: List[List[ValDef]]) {
for (vparams <- vparamss)
for (vparam <- vparams)
- vparam.symbol = context.scope enter vparam.symbol
+ context.scope enter vparam.symbol
}
def reenterTypeParams(tparams: List[TypeDef]): List[Symbol] =
for (tparam <- tparams) yield {
- tparam.symbol = context.scope enter tparam.symbol
+ context.scope enter tparam.symbol
tparam.symbol.deSkolemize
}
@@ -872,7 +872,9 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
+ // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
+ // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
}
else
@@ -1052,15 +1054,21 @@ trait Typers extends Modes with Adaptations with Tags {
def insertApply(): Tree = {
assert(!inHKMode(mode), modeString(mode)) //@M
- val qual = adaptToName(tree, nme.apply) match {
- case id @ Ident(_) =>
- val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType
- else if (id.symbol.owner.isClass)
- context.enclosingSubClassContext(id.symbol.owner).prefix
- else NoPrefix
- stabilize(id, pre, EXPRmode | QUALmode, WildcardType)
- case sel @ Select(qualqual, _) =>
- stabilize(sel, qualqual.tpe, EXPRmode | QUALmode, WildcardType)
+ val adapted = adaptToName(tree, nme.apply)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
+ val qual = adapted match {
+ case This(_) =>
+ gen.stabilize(adapted)
+ case Ident(_) =>
+ val owner = adapted.symbol.owner
+ val pre =
+ if (owner.isPackageClass) owner.thisType
+ else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
+ else NoPrefix
+ stabilize0(pre)
+ case Select(qualqual, _) =>
+ stabilize0(qualqual.tpe)
case other =>
other
}
@@ -1071,8 +1079,8 @@ trait Typers extends Modes with Adaptations with Tags {
// begin adapt
tree.tpe match {
- case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1)
- adaptAnnotations(tree, mode, pt)
+ case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
+ adaptAnnotations(tree, this, mode, pt)
case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
val sym = tree.symbol
if (sym != null && sym.isDeprecated) {
@@ -1176,8 +1184,8 @@ trait Typers extends Modes with Adaptations with Tags {
Select(tree, "to" + sym.name)
}
}
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, mode, pt), mode, pt)
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
case _ =>
}
if (!context.undetparams.isEmpty) {
@@ -2058,13 +2066,23 @@ trait Typers extends Modes with Adaptations with Tags {
* @return ...
*/
def typedValDef(vdef: ValDef): ValDef = {
-// attributes(vdef)
+ val sym = vdef.symbol
+ val valDefTyper = {
+ val maybeConstrCtx =
+ if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext
+ else context
+ newTyper(maybeConstrCtx.makeNewScope(vdef, sym))
+ }
+ valDefTyper.typedValDefImpl(vdef)
+ }
+
+ // use typedValDef instead. this version is called after creating a new context for the ValDef
+ private def typedValDefImpl(vdef: ValDef) = {
val sym = vdef.symbol.initialize
- val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor)
val typedMods = typedModifiers(vdef.mods)
sym.annotations.map(_.completeInfo)
- var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt))
+ val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable)
@@ -2092,7 +2110,7 @@ trait Typers extends Modes with Adaptations with Tags {
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
- newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
+ transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
@@ -2391,13 +2409,12 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedTypeDef(tdef: TypeDef): TypeDef =
- typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty){
- _.typedTypeDef0(tdef)
+ typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty) {
+ _.typedTypeDefImpl(tdef)
}
- // call typedTypeDef instead
- // a TypeDef with type parameters must always be type checked in a new scope
- private def typedTypeDef0(tdef: TypeDef): TypeDef = {
+ // use typedTypeDef instead. this version is called after creating a new context for the TypeDef
+ private def typedTypeDefImpl(tdef: TypeDef): TypeDef = {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
@@ -4452,8 +4469,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (typed(expr).tpe.typeSymbol != UnitClass)
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
- treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
- .setType(adaptTypeOfReturn(expr1, restpt.tpe, NothingClass.tpe))
+ val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
+ val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ res.setType(tp)
}
}
}
@@ -5357,10 +5375,14 @@ trait Typers extends Modes with Adaptations with Tags {
typed(docdef.definition, mode, pt)
}
+ /**
+ * The typer with the correct context for a method definition. If the method is a default getter for
+ * a constructor default, the resulting typer has a constructor context (fixes SI-5543).
+ */
def defDefTyper(ddef: DefDef) = {
- val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ val isConstrDefaultGetter = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
- newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(flag)
+ newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
}
def typedAlternative(alt: Alternative) = {
@@ -5647,20 +5669,21 @@ trait Typers extends Modes with Adaptations with Tags {
lastTreeToTyper = tree
indentTyping()
- var alreadyTyped = false
+ val ptPlugins = pluginsPt(pt, this, tree, mode)
+
val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
- (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
+ (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- alreadyTyped = tree.tpe ne null
+ val alreadyTyped = tree.tpe ne null
var tree1: Tree = if (alreadyTyped) tree else {
printTyping(
- ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
+ ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
"enrichmentEnabled" -> context.enrichmentEnabled,
@@ -5669,7 +5692,7 @@ trait Typers extends Modes with Adaptations with Tags {
"context.owner" -> context.owner
)
)
- typed1(tree, mode, dropExistential(pt))
+ typed1(tree, mode, dropExistential(ptPlugins))
}
// Can happen during erroneous compilation - error(s) have been
// reported, but we need to avoid causing an NPE with this tree
@@ -5683,12 +5706,12 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
- val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
+ val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, ptPlugins, tree)
if (!alreadyTyped) {
printTyping("adapted %s: %s to %s, %s".format(
- tree1, tree1.tpe.widen, pt, context.undetparamsString)
+ tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString)
) //DEBUG
}
if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
@@ -5703,7 +5726,7 @@ trait Typers extends Modes with Adaptations with Tags {
setError(tree)
case ex: Exception =>
if (settings.debug.value) // @M causes cyclic reference error
- Console.println("exception when typing "+tree+", pt = "+pt)
+ Console.println("exception when typing "+tree+", pt = "+ptPlugins)
if (context != null && context.unit.exists && tree != null)
logError("AT: " + (tree.pos).dbgString, ex)
throw ex
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 5782d7bbca..577aa087ea 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -79,8 +79,9 @@ trait Unapplies extends ast.TreeDSL
private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus
- private def classType(cdef: ClassDef, tparams: List[TypeDef], symbolic: Boolean = true): Tree = {
- val tycon = if (symbolic) REF(cdef.symbol) else Ident(cdef.name)
+ private def classType(cdef: ClassDef, tparams: List[TypeDef]): Tree = {
+ // SI-7033 Unattributed to avoid forcing `cdef.symbol.info`.
+ val tycon = Ident(cdef.symbol)
if (tparams.isEmpty) tycon else AppliedTypeTree(tycon, tparams map toIdent)
}
@@ -112,7 +113,7 @@ trait Unapplies extends ast.TreeDSL
def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
def parents = if (inheritFromFun) List(createFun) else Nil
def toString = DefDef(
- Modifiers(OVERRIDE | FINAL),
+ Modifiers(OVERRIDE | FINAL | SYNTHETIC),
nme.toString_,
Nil,
ListOfNil,
@@ -133,10 +134,10 @@ trait Unapplies extends ast.TreeDSL
/** The apply method corresponding to a case class
*/
- def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef, symbolic: Boolean): DefDef = {
+ def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
val tparams = cdef.tparams map copyUntypedInvariant
val cparamss = constrParamss(cdef)
- def classtpe = classType(cdef, tparams, symbolic)
+ def classtpe = classType(cdef, tparams)
atPos(cdef.pos.focus)(
DefDef(mods, name, tparams, cparamss, classtpe,
New(classtpe, mmap(cparamss)(gen.paramToArg)))
@@ -145,7 +146,7 @@ trait Unapplies extends ast.TreeDSL
/** The apply method corresponding to a case class
*/
- def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef, symbolic = true)
+ def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef)
/** The unapply method corresponding to a case class
*/