summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/build/pack.xml22
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala5
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala1
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala65
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl5
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala3
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala57
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala62
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala36
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala11
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala1
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala1
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala33
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/comment/Body.scala18
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala19
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala29
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala25
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala43
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/TreeFactory.scala13
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala31
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala86
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala12
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala11
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala310
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala64
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala71
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala35
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala644
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala709
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala258
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala615
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala674
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala614
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala256
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala242
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala23
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala25
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala3876
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala62
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala42
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala201
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/SimpleTracer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/TreeSet.scala20
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala77
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala8
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala23
-rw-r--r--src/library/scala/collection/parallel/package.scala7
-rw-r--r--src/library/scala/concurrent/Future.scala12
-rw-r--r--src/library/scala/math/BigInt.scala2
-rw-r--r--src/library/scala/math/Ordering.scala2
-rw-r--r--src/library/scala/runtime/BoxedUnit.java2
-rwxr-xr-xsrc/library/scala/xml/Utility.scala2
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala2
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala28
-rw-r--r--src/partest/scala/tools/partest/nest/Diff.java873
-rw-r--r--src/partest/scala/tools/partest/nest/DiffPrint.java606
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala41
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala3
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala5
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerManager.scala3
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/api/TypeTags.scala38
-rw-r--r--src/reflect/scala/reflect/api/Types.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala77
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala14
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala17
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala19
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala38
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala11
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala2
98 files changed, 5228 insertions, 6265 deletions
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 79611b55a2..20c4034107 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -112,8 +112,10 @@ MAIN DISTRIBUTION PACKAGING
</target>
<target name="pack-archives.latest.unix" depends="pack-archives.src" unless="os.win">
+ <!-- be sure to use a relative symlink to make the distribution portable,
+ `resource` is relative to directory of `link` -->
<symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
- resource="${dists.dir}/archives/scala-${version.number}-sources.tgz"
+ resource="scala-${version.number}-sources.tgz"
overwrite="yes"/>
</target>
@@ -179,41 +181,41 @@ MAIN DISTRIBUTION PACKAGING
<target name="pack-maven.srcs" depends="pack-maven.libs">
<!-- Add missing src jars. -->
- <jar destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
basedir="${src.dir}/jline/src/main/java">
<include name="**/*"/>
</jar>
<!-- Continuations plugin -->
- <jar destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
basedir="${src.dir}/continuations/plugin">
<include name="**/*"/>
</jar>
</target>
<target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
- <jar destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
basedir="${build-docs.dir}/jline">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
basedir="${build-docs.dir}/library">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
basedir="${build-docs.dir}/compiler">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
basedir="${build-docs.dir}/scalap">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
- basedir="${build-docs.dir}/scala-partest">
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
+ basedir="${build-docs.dir}/partest">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
</jar>
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 7c66d5b9eb..2e57bc59a8 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -27,6 +27,11 @@ trait Errors {
throw new ReificationException(defaultErrorPosition, msg)
}
+ def CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt: CompoundTypeTree) = {
+ val msg = "implementation restriction: cannot reify refinement type trees with non-empty bodies"
+ throw new ReificationException(ctt.pos, msg)
+ }
+
def CannotReifyWeakType(details: Any) = {
val msg = "cannot create a TypeTag" + details + ": use WeakTypeTag instead"
throw new ReificationException(defaultErrorPosition, msg)
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 7406f5d02d..a320718084 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -181,6 +181,7 @@ trait Reshape {
private def toPreTyperCompoundTypeTree(ctt: CompoundTypeTree): Tree = {
val CompoundTypeTree(tmpl @ Template(parents, self, stats)) = ctt
+ if (stats.nonEmpty) CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt)
assert(self eq emptyValDef, self)
val att = tmpl.attachments.get[CompoundTypeTreeOriginalAttachment]
val CompoundTypeTreeOriginalAttachment(parents1, stats1) = att.getOrElse(CompoundTypeTreeOriginalAttachment(parents, stats))
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index 134ae13890..59cd4e5047 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -164,51 +164,30 @@ trait Extractors {
}
}
- object FreeDef {
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
- case FreeTermDef(uref, name, binding, flags, origin) =>
- Some((uref, name, binding, flags, origin))
- case FreeTypeDef(uref, name, binding, flags, origin) =>
- Some((uref, name, binding, flags, origin))
- case _ =>
- None
- }
- }
-
- object FreeTermDef {
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
- case
- ValDef(_, name, _, Apply(
- Select(Select(uref1 @ Ident(_), build1), newFreeTerm),
- List(
- _,
- _,
- Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
- Literal(Constant(origin: String)))))
- if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeTerm == nme.newFreeTerm &&
- uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, reifyBinding(tree), flags, origin)
- case _ =>
- None
- }
- }
-
- object FreeTypeDef {
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
- case
- ValDef(_, name, _, Apply(
- Select(Select(uref1 @ Ident(_), build1), newFreeType),
- List(
- _,
- Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
- Literal(Constant(origin: String)))))
- if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeType == nme.newFreeType &&
- uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, reifyBinding(tree), flags, origin)
- case _ =>
- None
+ sealed abstract class FreeDefExtractor(acceptTerms: Boolean, acceptTypes: Boolean) {
+ def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = {
+ def acceptFreeTermFactory(name: Name) = {
+ (acceptTerms && name == nme.newFreeTerm) ||
+ (acceptTypes && name == nme.newFreeType)
+ }
+ tree match {
+ case
+ ValDef(_, name, _, Apply(
+ Select(Select(uref1 @ Ident(_), build1), freeTermFactory),
+ _ :+
+ Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))) :+
+ Literal(Constant(origin: String))))
+ if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && acceptFreeTermFactory(freeTermFactory) &&
+ uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
+ Some(uref1, name, reifyBinding(tree), flags, origin)
+ case _ =>
+ None
+ }
}
}
+ object FreeDef extends FreeDefExtractor(acceptTerms = true, acceptTypes = true)
+ object FreeTermDef extends FreeDefExtractor(acceptTerms = true, acceptTypes = false)
+ object FreeTypeDef extends FreeDefExtractor(acceptTerms = false, acceptTypes = true)
object FreeRef {
def unapply(tree: Tree): Option[(Tree, TermName)] = tree match {
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 411a27b9c7..bd6cf561b9 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -14,8 +14,9 @@ set _LINE_TOOLCP=
:another_param
-if "%1%"=="-toolcp" (
- set _LINE_TOOLCP=%2%
+rem Use "%~1" to handle spaces in paths. See http://ss64.com/nt/syntax-args.html
+if "%~1"=="-toolcp" (
+ set _LINE_TOOLCP=%~2
shift
shift
goto another_param
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 116684c161..aea3e0d930 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -23,6 +23,7 @@ import plugins.Plugins
import ast._
import ast.parser._
import typechecker._
+import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
@@ -1179,7 +1180,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
("\n" + info1) :: info2 :: info3 mkString "\n\n"
}
- catch { case x: Exception => errorMessage }
+ catch { case _: Exception | _: TypeError => errorMessage }
/** The id of the currently active run
*/
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index e635c5e87d..6e39fc9aa1 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -56,6 +56,11 @@ trait DocComments { self: Global =>
else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
}
+ def fillDocComment(sym: Symbol, comment: DocComment) {
+ docComments(sym) = comment
+ comment.defineVariables(sym)
+ }
+
/** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
* missing sections of an inherited doc comment.
* If a symbol does not have a doc comment but some overridden version of it does,
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 2ad762fd55..0a12737572 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -327,22 +327,61 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
case tpt: TypeTree =>
if (tpt.original != null)
transform(tpt.original)
- else if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => locals contains tp.typeSymbol)))) {
- val dupl = tpt.duplicate
- dupl.tpe = null
- dupl
+ else {
+ val refersToLocalSymbols = tpt.tpe != null && (tpt.tpe exists (tp => locals contains tp.typeSymbol))
+ val isInferred = tpt.wasEmpty
+ if (refersToLocalSymbols || isInferred) {
+ val dupl = tpt.duplicate
+ dupl.tpe = null
+ dupl
+ } else {
+ tpt
+ }
}
- else tree
+ // If one of the type arguments of a TypeApply gets reset to an empty TypeTree, then this means that:
+ // 1) It isn't empty now (tpt.tpe != null), but it was empty before (tpt.wasEmpty).
+ // 2) Thus, its argument got inferred during a preceding typecheck.
+ // 3) Thus, all its arguments were inferred (because scalac can only infer all or nothing).
+ // Therefore, we can safely erase the TypeApply altogether and have it inferred once again in a subsequent typecheck.
+ // UPD: Actually there's another reason for erasing a type behind the TypeTree
+ // is when this type refers to symbols defined in the tree being processed.
+ // These symbols will be erased, because we can't leave alive a type referring to them.
+ // Here we can only hope that everything will work fine afterwards.
case TypeApply(fn, args) if args map transform exists (_.isEmpty) =>
transform(fn)
- case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
- tree
case EmptyTree =>
tree
case _ =>
val dupl = tree.duplicate
- if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol)) && !(keepLabels && tree.symbol.isLabel))
- dupl.symbol = NoSymbol
+ // Typically the resetAttrs transformer cleans both symbols and types.
+ // However there are exceptions when we cannot erase symbols due to idiosyncrasies of the typer.
+ // vetoXXX local variables declared below describe the conditions under which we cannot erase symbols.
+ //
+ // The first reason to not erase symbols is the threat of non-idempotency (SI-5464).
+ // Here we take care of labels (SI-5562) and references to package classes (SI-5705).
+ // There are other non-idempotencies, but they are not worked around yet.
+ //
+ // The second reason has to do with the fact that resetAttrs itself has limited usefulness.
+ //
+ // First of all, why do we need resetAttrs? Gor one, it's absolutely required to move trees around.
+ // One cannot just take a typed tree from one lexical context and transplant it somewhere else.
+ // Most likely symbols defined by those trees will become borked and the compiler will blow up (SI-5797).
+ // To work around we just erase all symbols and types and then hope that we'll be able to correctly retypecheck.
+ // For ones who're not affected by scalac Stockholm syndrome, this might seem to be an extremely naive fix, but well...
+ //
+ // Of course, sometimes erasing everything won't work, because if a given identifier got resolved to something
+ // in one lexical scope, it can get resolved to something else.
+ //
+ // What do we do in these cases? Enter the workaround for the workaround: resetLocalAttrs, which only destroys
+ // locally defined symbols, but doesn't touch references to stuff declared outside of a given tree.
+ // That's what localOnly and vetoScope are for.
+ if (dupl.hasSymbol) {
+ val sym = dupl.symbol
+ val vetoScope = localOnly && !(locals contains sym)
+ val vetoLabel = keepLabels && sym.isLabel
+ val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
+ if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
+ }
dupl.tpe = null
dupl
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 6f79f639b9..b8791c15dc 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1107,32 +1107,33 @@ self =>
* | symbol
* | null
* }}}
- * @note The returned tree does not yet have a position
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false): Tree = {
- def finish(value: Any): Tree = {
- val t = Literal(Constant(value))
- in.nextToken()
- t
- }
- if (in.token == SYMBOLLIT)
- Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
- else if (in.token == INTERPOLATIONID)
- interpolatedString(inPattern = inPattern)
- else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT | STRINGPART => in.strVal.intern()
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
- syntaxErrorOrIncomplete("illegal literal", true)
- null
- })
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = {
+ atPos(start) {
+ def finish(value: Any): Tree = {
+ val t = Literal(Constant(value))
+ in.nextToken()
+ t
+ }
+ if (in.token == SYMBOLLIT)
+ Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
+ else if (in.token == INTERPOLATIONID)
+ interpolatedString(inPattern = inPattern)
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal.intern()
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ =>
+ syntaxErrorOrIncomplete("illegal literal", true)
+ null
+ })
+ }
}
private def stringOp(t: Tree, op: TermName) = {
@@ -1332,11 +1333,10 @@ self =>
def parseWhile = {
val start = in.offset
atPos(in.skipToken()) {
- val lname: Name = freshTermName(nme.WHILE_PREFIX)
val cond = condExpr()
newLinesOpt()
val body = expr()
- makeWhile(lname, cond, body)
+ makeWhile(start, cond, body)
}
}
parseWhile
@@ -1510,7 +1510,7 @@ self =>
atPos(in.offset) {
val name = nme.toUnaryName(rawIdent())
if (name == nme.UNARY_- && isNumericLit)
- simpleExprRest(atPos(in.offset)(literal(isNegated = true)), canApply = true)
+ simpleExprRest(literal(isNegated = true), canApply = true)
else
Select(stripParens(simpleExpr()), name)
}
@@ -1535,7 +1535,7 @@ self =>
def simpleExpr(): Tree = {
var canApply = true
val t =
- if (isLiteral) atPos(in.offset)(literal())
+ if (isLiteral) literal()
else in.token match {
case XMLSTART =>
xmlLiteral()
@@ -1924,7 +1924,7 @@ self =>
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
case Ident(nme.MINUS) =>
- return atPos(start) { literal(isNegated = true, inPattern = true) }
+ return literal(isNegated = true, inPattern = true, start = start)
case _ =>
}
case _ =>
@@ -1942,7 +1942,7 @@ self =>
atPos(start, start) { Ident(nme.WILDCARD) }
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL =>
- atPos(start) { literal(inPattern = true) }
+ literal(inPattern = true)
case LPAREN =>
atPos(start)(makeParens(noSeq.patterns()))
case XMLSTART =>
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 79f0bcf149..1aa50be83a 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -142,24 +142,16 @@ trait Scanners extends ScannersCommon {
/** Should doc comments be built? */
def buildDocs: Boolean = forScaladoc
- /** buffer for the documentation comment
+ /** holder for the documentation comment
*/
- var docBuffer: StringBuilder = null
- var docPos: Position = null
+ var docComment: DocComment = null
- /** Return current docBuffer and set docBuffer to null */
def flushDoc: DocComment = {
- val ret = if (docBuffer != null) DocComment(docBuffer.toString, docPos) else null
- docBuffer = null
+ val ret = docComment
+ docComment = null
ret
}
- /** add the given character to the documentation buffer
- */
- protected def putDocChar(c: Char) {
- if (docBuffer ne null) docBuffer.append(c)
- }
-
protected def foundComment(value: String, start: Int, end: Int) = ()
protected def foundDocComment(value: String, start: Int, end: Int) = ()
@@ -236,11 +228,11 @@ trait Scanners extends ScannersCommon {
while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- docBuffer = null
+ docComment = null
case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- docBuffer = null
+ docComment = null
case ARROW =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
@@ -433,6 +425,7 @@ trait Scanners extends ScannersCommon {
if (ch == '\"') {
nextRawChar()
if (ch == '\"') {
+ offset += 3
nextRawChar()
getStringPart(multiLine = true)
sepRegions = STRINGPART :: sepRegions // indicate string part
@@ -442,6 +435,7 @@ trait Scanners extends ScannersCommon {
strVal = ""
}
} else {
+ offset += 1
getStringPart(multiLine = false)
sepRegions = STRINGLIT :: sepRegions // indicate single line string part
}
@@ -546,7 +540,7 @@ trait Scanners extends ScannersCommon {
nextChar()
} while ((ch != CR) && (ch != LF) && (ch != SU))
} else {
- docBuffer = null
+ docComment = null
var openComments = 1
appendToComment()
nextChar()
@@ -554,24 +548,23 @@ trait Scanners extends ScannersCommon {
var buildingDocComment = false
if (ch == '*' && buildDocs) {
buildingDocComment = true
- docBuffer = new StringBuilder("/**")
}
while (openComments > 0) {
do {
do {
if (ch == '/') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
if (ch == '*') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
openComments += 1
}
}
if (ch != '*' && ch != SU) {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
}
} while (ch != '*' && ch != SU)
while (ch == '*') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
}
} while (ch != '/' && ch != SU)
if (ch == '/') nextChar()
@@ -1310,7 +1303,8 @@ trait Scanners extends ScannersCommon {
}
override def foundDocComment(value: String, start: Int, end: Int) {
- docPos = new RangePosition(unit.source, start, start, end)
+ val docPos = new RangePosition(unit.source, start, start, end)
+ docComment = new DocComment(value, docPos)
unit.comment(docPos, value)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 7969bb9c20..b5771454f8 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -251,8 +251,13 @@ abstract class TreeBuilder {
else CompoundTypeTree(Template(tps, emptyValDef, Nil))
/** Create tree representing a while loop */
- def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
- val continu = atPos(o2p(body.pos pointOrElse wrappingPos(List(cond, body)).pos.endOrPoint)) { Apply(Ident(lname), Nil) }
+ def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = {
+ val lname = freshTermName(nme.WHILE_PREFIX)
+ def default = wrappingPos(List(cond, body)) match {
+ case p if p.isDefined => p.endOrPoint
+ case _ => startPos
+ }
+ val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
LabelDef(lname, Nil, rhs)
}
@@ -589,7 +594,7 @@ abstract class TreeBuilder {
if (contextBounds.isEmpty) vparamss
else {
val mods = Modifiers(if (owner.isTypeName) PARAMACCESSOR | LOCAL | PRIVATE else PARAM)
- def makeEvidenceParam(tpt: Tree) = ValDef(mods | IMPLICIT, freshTermName(nme.EVIDENCE_PARAM_PREFIX), tpt, EmptyTree)
+ def makeEvidenceParam(tpt: Tree) = ValDef(mods | IMPLICIT | SYNTHETIC, freshTermName(nme.EVIDENCE_PARAM_PREFIX), tpt, EmptyTree)
val evidenceParams = contextBounds map makeEvidenceParam
val vparamssLast = if(vparamss.nonEmpty) vparamss.last else Nil
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 7ba212f42e..b74770f051 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -171,6 +171,7 @@ trait Members {
var returnType: TypeKind = _
var recursive: Boolean = false
var bytecodeHasEHs = false // set by ICodeReader only, used by Inliner to prevent inlining (SI-6188)
+ var bytecodeHasInvokeDynamic = false // set by ICodeReader only, used by Inliner to prevent inlining until we have proper invoke dynamic support
/** local variables and method parameters */
var locals: List[Local] = Nil
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 8c9a72638d..a3a0edb35d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -409,6 +409,25 @@ trait Opcodes { self: ICodes =>
override def category = mthdsCat
}
+
+ /**
+ * A place holder entry that allows us to parse class files with invoke dynamic
+ * instructions. Because the compiler doesn't yet really understand the
+ * behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze
+ * this instruction will cause a failure. The only optimization that
+ * should ever look at non-Scala generated icode is the inliner, and it
+ * has been modified to not examine any method with invokeDynamic
+ * instructions. So if this poison pill ever causes problems then
+ * there's been a serious misunderstanding
+ */
+ // TODO do the real thing
+ case class INVOKE_DYNAMIC(poolEntry: Char) extends Instruction {
+ private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed")
+ override def consumed = error
+ override def produced = error
+ override def producedTypes = error
+ override def category = error
+ }
case class BOX(boxType: TypeKind) extends Instruction {
assert(boxType.isValueType && (boxType ne UNIT)) // documentation
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 0abbe44b02..7c46d648fe 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -2143,8 +2143,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val start = st.pop()
seen ::= LocVarEntry(lv, start, end)
case _ =>
- // TODO SI-6049
- getCurrentCUnit().warning(iPos, "Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6049")
+ // TODO SI-6049 track down the cause for these.
+ debugwarn(s"$iPos: Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6191")
}
}
@@ -2259,16 +2259,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// info calls so that types are up to date; erasure may add lateINTERFACE to traits
hostSymbol.info ; methodOwner.info
- def isInterfaceCall(sym: Symbol) = (
- sym.isInterface && methodOwner != ObjectClass
+ def needsInterfaceCall(sym: Symbol) = (
+ sym.isInterface
|| sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
)
// whether to reference the type of the receiver or
- // the type of the method owner (if not an interface!)
+ // the type of the method owner
val useMethodOwner = (
style != Dynamic
- || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol)
|| hostSymbol.isBottomClass
+ || methodOwner == ObjectClass
)
val receiver = if (useMethodOwner) methodOwner else hostSymbol
val jowner = javaName(receiver)
@@ -2291,11 +2291,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
style match {
- case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype)
- case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype)
- case Dynamic if isInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype)
- case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype)
- case SuperCall(_) =>
+ case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype)
+ case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype)
+ case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype)
+ case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype)
+ case SuperCall(_) =>
dbg("invokespecial")
jcode.invokespecial(jowner, jname, jtype)
initModule()
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 598965b982..36b294b289 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -1196,16 +1196,16 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// info calls so that types are up to date; erasure may add lateINTERFACE to traits
hostSymbol.info ; methodOwner.info
- def isInterfaceCall(sym: Symbol) = (
- sym.isInterface && methodOwner != ObjectClass
+ def needsInterfaceCall(sym: Symbol) = (
+ sym.isInterface
|| sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
)
// whether to reference the type of the receiver or
- // the type of the method owner (if not an interface!)
+ // the type of the method owner
val useMethodOwner = (
style != Dynamic
- || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol)
|| hostSymbol.isBottomClass
+ || methodOwner == ObjectClass
)
val receiver = if (useMethodOwner) methodOwner else hostSymbol
val jowner = javaName(receiver)
@@ -1230,11 +1230,11 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
style match {
- case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
- case Dynamic if isInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
- case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
- case SuperCall(_) =>
+ case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
+ case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
+ case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
+ case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
+ case SuperCall(_) =>
dbg("invokespecial")
jcode.emitINVOKESPECIAL(jowner, jname, jtype)
initModule()
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 521b6cc132..498db78636 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -961,6 +961,7 @@ abstract class Inliners extends SubComponent {
if(isInlineForbidden) { rs ::= "is annotated @noinline" }
if(inc.isSynchronized) { rs ::= "is synchronized method" }
if(inc.m.bytecodeHasEHs) { rs ::= "bytecode contains exception handlers / finally clause" } // SI-6188
+ if(inc.m.bytecodeHasInvokeDynamic) { rs ::= "bytecode contains invoke dynamic" }
if(rs.isEmpty) null else rs.mkString("", ", and ", "")
}
diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
index 35390adcd9..cdcfeaae81 100755
--- a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -10,15 +10,15 @@ import comment._
trait MemberLookupBase {
val global: Global
- val settings: doc.Settings
-
import global._
+
def internalLink(sym: Symbol, site: Symbol): Option[LinkTo]
def chooseLink(links: List[LinkTo]): LinkTo
def toString(link: LinkTo): String
+ def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal]
+ def warnNoLink: Boolean
import global._
- import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
import rootMirror.{RootPackage, EmptyPackage}
private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
@@ -83,7 +83,7 @@ trait MemberLookupBase {
}
links match {
case Nil =>
- if (!settings.docNoLinkWarnings.value)
+ if (warnNoLink)
reporter.warning(pos, "Could not find any member to link for \"" + query + "\".")
// (4) if we still haven't found anything, create a tooltip
Tooltip(query)
@@ -95,7 +95,7 @@ trait MemberLookupBase {
if (link == chosen) " [chosen]" else ""
toString(link) + chosenInfo + "\n"
}
- if (!settings.docNoLinkWarnings.value) {
+ if (warnNoLink) {
val allLinks = links.map(linkToString).mkString
reporter.warning(pos,
s"""The link target \"$query\" is ambiguous. Several members fit the target:
@@ -199,29 +199,6 @@ trait MemberLookupBase {
members.reverse
}
-
- def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
- val sym1 =
- if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
- else if (sym.isPackage)
- /* Get package object which has associatedFile ne null */
- sym.info.member(newTermName("package"))
- else sym
- Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
- val path = src.path
- settings.extUrlMapping get path map { url =>
- LinkToExternal(name, url + "#" + name)
- }
- } orElse {
- // Deprecated option.
- settings.extUrlPackageMapping find {
- case (pkg, _) => name startsWith pkg
- } map {
- case (_, url) => LinkToExternal(name, url + "#" + name)
- }
- }
- }
-
def externalSignature(sym: Symbol) = {
sym.info // force it, otherwise we see lazy types
(sym.nameString + sym.signatureString).replaceAll("\\s", "")
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
index 02e662da85..eb0d751f3e 100755
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
+++ b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
@@ -75,16 +75,20 @@ object EntityLink {
def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
}
final case class HtmlTag(data: String) extends Inline {
- def canClose(open: HtmlTag) = {
- open.data.stripPrefix("<") == data.stripPrefix("</")
+ private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
+ private val (isEnd, tagName) = data match {
+ case Pattern(s1, s2) =>
+ (! s1.isEmpty, Some(s2.toLowerCase))
+ case _ =>
+ (false, None)
}
- def close = {
- if (data.indexOf("</") == -1)
- Some(HtmlTag("</" + data.stripPrefix("<")))
- else
- None
+ def canClose(open: HtmlTag) = {
+ isEnd && tagName == open.tagName
}
+
+ private val TagsNotToClose = Set("br", "img")
+ def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
}
/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
index ee78f4ea7a..6fdaaed75f 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc.doc.html
import scala.xml.NodeSeq
+import scala.annotation.tailrec
/** Highlight the syntax of Scala code appearing in a `{{{` wiki block
* (see method `HtmlPage.blockToHtml`).
@@ -209,9 +210,9 @@ private[html] object SyntaxHigh {
out.toString
}
- def parse(pre: String, i: Int): Int = {
+ @tailrec def parse(pre: String, i: Int): Unit = {
out append pre
- if (i == buf.length) return i
+ if (i == buf.length) return
buf(i) match {
case '\n' =>
parse("\n", i+1)
@@ -277,7 +278,6 @@ private[html] object SyntaxHigh {
} else
parse(buf(i).toChar.toString, i+1)
}
- i
}
parse("", 0)
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index c76bdc58d9..8802d7c35c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -27,13 +27,19 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
val headers =
<xml:group>
<link href={ relativeLinkTo{List("index.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.layout.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
</xml:group>
+ private val scripts = {
+ val sources =
+ (List("jquery.js", "jquery-ui.js", "jquery.layout.js", "scheduler.js", "index.js").map {
+ x => relativeLinkTo(List(x, "lib"))
+ }) :+ "index.js"
+
+ sources map {
+ src => <script defer="defer" type="text/javascript" src={src}></script>
+ }
+ }
+
val body =
<body>
<div id="library">
@@ -46,6 +52,7 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
<div id="content" class="ui-layout-center">
<iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
</div>
+ { scripts }
</body>
def letters: NodeSeq =
@@ -125,7 +132,7 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
</xml:group>
}
packageElem(universe.rootPackage)
- }</div></div><script src="index.js"></script>
+ }</div></div>
</div>
def packageQualifiedName(ety: DocTemplateEntity): String =
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index ea07ff29c4..63c77e7bb3 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -40,14 +40,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<xml:group>
<link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
<link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} } id="jquery-js"></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
- { if (universe.settings.docDiagrams.value) {
- <script type="text/javascript" src={ relativeLinkTo{List("modernizr.custom.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("diagrams.js", "lib")} } id="diagrams-js"></script>
- } else NodeSeq.Empty }
<script type="text/javascript">
if(top === self) {{
var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
@@ -61,6 +53,22 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
</script>
</xml:group>
+ private val scripts = {
+ val sources = {
+ val default = List("jquery.js", "jquery-ui.js", "tools.tooltip.js", "template.js")
+ val forDiagrams = List("modernizr.custom.js", "diagrams.js")
+
+ (default ++ (if (universe.settings.docDiagrams.value) forDiagrams else Nil)) map {
+ x => x.replace('.', '-') -> relativeLinkTo(List(x, "lib"))
+ }
+ }
+
+ sources map {
+ case (id, src) =>
+ <script defer="defer" type="text/javascript" id={id} src={src}></script>
+ }
+ }
+
val valueMembers =
tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
@@ -215,7 +223,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
{ if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
- <h3>Shadowed Implict Value Members</h3>
+ <h3>Shadowed Implicit Value Members</h3>
<ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
@@ -280,8 +288,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
else
<div id="footer"> { tpl.universe.settings.docfooter.value } </div>
}
-
-
+ { scripts }
</body>
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
index c7a767f992..23259a4ae8 100644
--- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -9,6 +9,7 @@ trait MemberLookup extends base.MemberLookupBase {
thisFactory: ModelFactory =>
import global._
+ import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] =
findTemplateMaybe(sym) match {
@@ -35,4 +36,28 @@ trait MemberLookup extends base.MemberLookupBase {
mbr.sym.signatureString + " in " + inTpl.sym.toString
case _ => link.toString
}
+
+ override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
+ val sym1 =
+ if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
+ else if (sym.isPackage)
+ /* Get package object which has associatedFile ne null */
+ sym.info.member(newTermName("package"))
+ else sym
+ Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
+ val path = src.path
+ settings.extUrlMapping get path map { url =>
+ LinkToExternal(name, url + "#" + name)
+ }
+ } orElse {
+ // Deprecated option.
+ settings.extUrlPackageMapping find {
+ case (pkg, _) => name startsWith pkg
+ } map {
+ case (_, url) => LinkToExternal(name, url + "#" + name)
+ }
+ }
+ }
+
+ override def warnNoLink = !settings.docNoLinkWarnings.value
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 0a469c9227..d9b173bc43 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -897,32 +897,36 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
- /** */
def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
val aSym = annot.symbol
new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
lazy val annotationClass =
makeTemplate(annot.symbol)
- val arguments = { // lazy
- def noParams = annot.args map { _ => None }
- val params: List[Option[ValueParam]] = annotationClass match {
+ val arguments = {
+ val paramsOpt: Option[List[ValueParam]] = annotationClass match {
case aClass: DocTemplateEntity with Class =>
- (aClass.primaryConstructor map { _.valueParams.head }) match {
- case Some(vps) => vps map { Some(_) }
- case None => noParams
+ val constr = aClass.constructors collectFirst {
+ case c: MemberImpl if c.sym == annot.original.symbol => c
}
- case _ => noParams
+ constr flatMap (_.valueParams.headOption)
+ case _ => None
}
- assert(params.length == annot.args.length)
- (params zip annot.args) flatMap { case (param, arg) =>
- makeTree(arg) match {
- case Some(tree) =>
- Some(new ValueArgument {
- def parameter = param
+ val argTrees = annot.args map makeTree
+ paramsOpt match {
+ case Some (params) =>
+ params zip argTrees map { case (param, tree) =>
+ new ValueArgument {
+ def parameter = Some(param)
def value = tree
- })
- case None => None
- }
+ }
+ }
+ case None =>
+ argTrees map { tree =>
+ new ValueArgument {
+ def parameter = None
+ def value = tree
+ }
+ }
}
}
}
@@ -962,9 +966,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
sym.name == aSym.name &&
sym.isParamWithDefault
)
- (unit.body find (t => isCorrespondingParam(t.symbol))) match {
- case Some(ValDef(_,_,_,rhs)) => makeTree(rhs)
- case _ => None
+ unit.body find (t => isCorrespondingParam(t.symbol)) collect {
+ case ValDef(_,_,_,rhs) if rhs ne EmptyTree => makeTree(rhs)
}
case _ => None
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
index bd7534ded4..fdad84d0bc 100755
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -19,7 +19,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
val global: Global
import global._
- def makeTree(rhs: Tree): Option[TreeEntity] = {
+ def makeTree(rhs: Tree): TreeEntity = {
var expr = new StringBuilder
var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
@@ -80,17 +80,16 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
traverser.traverse(rhs)
- Some(new TreeEntity {
+ new TreeEntity {
val expression = expr.toString
val refEntity = refs
- })
+ }
}
- case pos: OffsetPosition =>
- Some(new TreeEntity {
+ case _ =>
+ new TreeEntity {
val expression = rhs.toString
val refEntity = new immutable.TreeMap[Int, (Entity, Int)]
- })
- case _ => None
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index 73738ebd21..af82957a2e 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -164,17 +164,22 @@ trait CompilerControl { self: Global =>
/** Sets sync var `response` to doc comment information for a given symbol.
*
- * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
- * @param site The place where sym is observed.
- * @param source The source file that's supposed to contain the definition
- * @param response A response that will be set to the following:
- * If `source` contains a definition of a given symbol that has a doc comment,
- * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
- * Note: This operation does not automatically load `source`. If `source`
- * is unloaded, it stays that way.
+ * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
+ * @param source The source file that's supposed to contain the definition
+ * @param site The symbol where 'sym' is observed
+ * @param fragments All symbols that can contribute to the generated documentation
+ * together with their source files.
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition of a given symbol that has a doc comment,
+ * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
+ * Note: This operation does not automatically load sources that are not yet loaded.
*/
- def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) =
- postWorkItem(new AskDocCommentItem(sym, site, source, response))
+ def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit =
+ postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response))
+
+ @deprecated("Use method that accepts fragments", "2.10.2")
+ def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit =
+ askDocComment(sym, source, site, (sym,source)::Nil, response)
/** Sets sync var `response` to list of members that are visible
* as members of the tree enclosing `pos`, possibly reachable by an implicit.
@@ -390,9 +395,9 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskDocCommentItem(val sym: Symbol, val site: Symbol, val source: SourceFile, response: Response[(String, String, Position)]) extends WorkItem {
- def apply() = self.getDocComment(sym, site, source, response)
- override def toString = "doc comment "+sym+" in "+source
+ case class AskDocCommentItem(val sym: Symbol, val source: SourceFile, val site: Symbol, val fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
+ def apply() = self.getDocComment(sym, source, site, fragments, response)
+ override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
def raiseMissing() =
response raise new MissingResponse
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 105b0e4833..c63cca9b88 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -73,7 +73,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
if (verboseIDE) println("[%s][%s]".format(projectName, msg))
override def forInteractive = true
- override def forScaladoc = settings.isScaladoc
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
@@ -705,8 +704,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
* If we do just removeUnit, some problems with default parameters can ensue.
* Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly.
*/
- private def afterRunRemoveUnitOf(source: SourceFile) {
- toBeRemovedAfterRun += source.file
+ private def afterRunRemoveUnitsOf(sources: List[SourceFile]) {
+ toBeRemovedAfterRun ++= sources map (_.file)
}
/** A fully attributed tree located at position `pos` */
@@ -714,7 +713,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
case None =>
reloadSources(List(pos.source))
try typedTreeAt(pos)
- finally afterRunRemoveUnitOf(pos.source)
+ finally afterRunRemoveUnitsOf(List(pos.source))
case Some(unit) =>
informIDE("typedTreeAt " + pos)
parseAndEnter(unit)
@@ -764,14 +763,23 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
respond(response)(typedTree(source, forceReload))
}
+ private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = {
+ val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file)
+ sources filterNot (getUnit(_).isDefined) match {
+ case Nil =>
+ f(unitOfSrc)
+ case unknown =>
+ reloadSources(unknown)
+ try {
+ f(unitOfSrc)
+ } finally
+ afterRunRemoveUnitsOf(unknown)
+ }
+ }
+
private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
- getUnit(source) match {
- case None =>
- reloadSources(List(source))
- try f(getUnit(source).get)
- finally afterRunRemoveUnitOf(source)
- case Some(unit) =>
- f(unit)
+ withTempUnits(List(source)){ srcToUnit =>
+ f(srcToUnit(source))
}
/** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
@@ -835,48 +843,36 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
+ private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
+ unit.body foreachPartial {
+ case DocDef(comment, defn) if defn.symbol == sym =>
+ fillDocComment(defn.symbol, comment)
+ EmptyTree
+ case _: ValOrDefDef =>
+ EmptyTree
+ }
+ }
+
/** Implements CompilerControl.askDocComment */
- private[interactive] def getDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) {
- informIDE("getDocComment "+sym+" "+source)
+ private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)],
+ response: Response[(String, String, Position)]) {
+ informIDE(s"getDocComment $sym at $source, site $site")
respond(response) {
- withTempUnit(source){ u =>
- val mirror = findMirrorSymbol(sym, u)
+ withTempUnits(fragments.unzip._2){ units =>
+ for((sym, src) <- fragments) {
+ val mirror = findMirrorSymbol(sym, units(src))
+ if (mirror ne NoSymbol) forceDocComment(mirror, units(src))
+ }
+ val mirror = findMirrorSymbol(sym, units(source))
if (mirror eq NoSymbol)
("", "", NoPosition)
else {
- forceDocComment(mirror, u)
- (expandedDocComment(mirror), rawDocComment(mirror), docCommentPos(mirror))
+ (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror))
}
}
}
- }
-
- private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
- // Either typer has been run and we don't find DocDef,
- // or we force the targeted typecheck here.
- // In both cases doc comment maps should be filled for the subject symbol.
- val docTree =
- unit.body find {
- case DocDef(_, defn) if defn.symbol eq sym => true
- case _ => false
- }
-
- for (t <- docTree) {
- debugLog("Found DocDef tree for "+sym)
- // Cannot get a typed tree at position since DocDef range is transparent.
- val prevPos = unit.targetPos
- val prevInterruptsEnabled = interruptsEnabled
- try {
- unit.targetPos = t.pos
- interruptsEnabled = true
- typeCheck(unit)
- } catch {
- case _: TyperResult => // ignore since we are after the side effect.
- } finally {
- unit.targetPos = prevPos
- interruptsEnabled = prevInterruptsEnabled
- }
- }
+ // New typer run to remove temp units and drop per-run caches that might refer to symbols entered from temp units.
+ newTyperRun()
}
def stabilizedType(tree: Tree): Type = tree match {
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index 84cb03c140..2b389158c3 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -166,8 +166,8 @@ trait Picklers { self: Global =>
.asClass (classOf[AskLinkPosItem])
implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
- (pkl[Symbol] ~ pkl[Symbol] ~ pkl[SourceFile])
- .wrapped { case sym ~ site ~ source => new AskDocCommentItem(sym, site, source, new Response) } { item => item.sym ~ item.site ~ item.source }
+ (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]])
+ .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments }
.asClass (classOf[AskDocCommentItem])
implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
index 7bb2b7b7c8..f304eda753 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -8,7 +8,7 @@ import scala.reflect.internal.util.Position
/** Trait encapsulating the creation of a presentation compiler's instance.*/
private[tests] trait PresentationCompilerInstance extends TestSettings {
protected val settings = new Settings
- protected val docSettings = new doc.Settings(_ => ())
+ protected val withDocComments = false
protected val compilerReporter: CompilerReporter = new InteractiveReporter {
override def compiler = PresentationCompilerInstance.this.compiler
@@ -16,7 +16,9 @@ private[tests] trait PresentationCompilerInstance extends TestSettings {
protected lazy val compiler: Global = {
prepareSettings(settings)
- new Global(settings, compilerReporter)
+ new Global(settings, compilerReporter) {
+ override def forScaladoc = withDocComments
+ }
}
/**
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index c4a672ac37..827ebe1678 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -57,15 +57,13 @@ trait ExprTyper {
// Typing it with a lazy val would give us the right type, but runs
// into compiler bugs with things like existentials, so we compile it
// behind a def and strip the NullaryMethodType which wraps the expr.
- val line = "def " + name + " = {\n" + code + "\n}"
+ val line = "def " + name + " = " + code
interpretSynthetic(line) match {
case IR.Success =>
val sym0 = symbolOfTerm(name)
// drop NullaryMethodType
- val sym = sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
- if (sym.info.typeSymbol eq UnitClass) NoSymbol
- else sym
+ sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
case _ => NoSymbol
}
}
@@ -82,7 +80,11 @@ trait ExprTyper {
case _ => NoSymbol
}
}
- beQuietDuring(asExpr()) orElse beQuietDuring(asDefn())
+ def asError(): Symbol = {
+ interpretSynthetic(code)
+ NoSymbol
+ }
+ beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
}
private var typeOfExpressionDepth = 0
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 43a8402fc7..0779e648cd 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -420,6 +420,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case FINAL =>
flags |= Flags.FINAL
in.nextToken
+ case DEFAULT =>
+ flags |= Flags.DEFAULTMETHOD
+ in.nextToken()
case NATIVE =>
addAnnot(NativeAttr)
in.nextToken
@@ -544,8 +547,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val vparams = formalParams()
if (!isVoid) rtpt = optArrayBrackets(rtpt)
optThrows()
+ val bodyOk = !inInterface || (mods hasFlag Flags.DEFAULTMETHOD)
val body =
- if (!inInterface && in.token == LBRACE) {
+ if (bodyOk && in.token == LBRACE) {
methodBody()
} else {
if (parentToken == AT && in.token == DEFAULT) {
@@ -800,13 +804,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val pos = in.currentPos
val name = identForType()
val (statics, body) = typeBody(AT, name)
- def getValueMethodType(tree: Tree) = tree match {
- case DefDef(_, nme.value, _, _, tpt, _) => Some(tpt.duplicate)
- case _ => None
- }
- var templ = makeTemplate(annotationParents, body)
- for (stat <- templ.body; tpt <- getValueMethodType(stat))
- templ = makeTemplate(annotationParents, makeConstructor(List(tpt)) :: templ.body)
+ val templ = makeTemplate(annotationParents, body)
addCompanionObject(statics, atPos(pos) {
ClassDef(mods, name, List(), templ)
})
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index f116a7c4c7..ef41246af9 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -402,7 +402,7 @@ trait Patterns extends ast.TreeDSL {
case _ => toPats(args)
}
- def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args.length)
+ def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args)
def resTypesString = resTypes match {
case Nil => "Boolean"
case xs => xs.mkString(", ")
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 348c7f688f..9e5186b731 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -97,8 +97,15 @@ abstract class SymbolLoaders {
val clazz = enterClass(root, name, completer)
val module = enterModule(root, name, completer)
if (!clazz.isAnonymousClass) {
- assert(clazz.companionModule == module, module)
- assert(module.companionClass == clazz, clazz)
+ // Diagnostic for SI-7147
+ def msg: String = {
+ def symLocation(sym: Symbol) = if (sym == null) "null" else s"${clazz.fullLocationString} (from ${clazz.associatedFile})"
+ sm"""Inconsistent class/module symbol pair for `$name` loaded from ${symLocation(root)}.
+ |clazz = ${symLocation(clazz)}; clazz.companionModule = ${clazz.companionModule}
+ |module = ${symLocation(module)}; module.companionClass = ${module.companionClass}"""
+ }
+ assert(clazz.companionModule == module, msg)
+ assert(module.companionClass == clazz, msg)
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index a517a33279..da117540b4 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -44,7 +44,7 @@ abstract class ClassfileParser {
def srcfile = srcfile0
- private def currentIsTopLevel = currentClass.toString.indexOf('$') < 0
+ private def currentIsTopLevel = !(currentClass.decodedName containsChar '$')
private object unpickler extends scala.reflect.internal.pickling.UnPickler {
val global: ClassfileParser.this.global.type = ClassfileParser.this.global
@@ -52,18 +52,14 @@ abstract class ClassfileParser {
private def handleMissing(e: MissingRequirementError) = {
if (settings.debug.value) e.printStackTrace
- throw new IOException("Missing dependency '" + e.req + "', required by " + in.file)
+ throw new IOException(s"Missing dependency '${e.req}', required by ${in.file}")
}
private def handleError(e: Exception) = {
if (settings.debug.value) e.printStackTrace()
- throw new IOException("class file '%s' is broken\n(%s/%s)".format(
- in.file,
- e.getClass,
- if (e.getMessage eq null) "" else e.getMessage)
- )
+ throw new IOException(s"class file '${in.file}' is broken\n(${e.getClass}/${e.getMessage})")
}
private def mismatchError(c: Symbol) = {
- throw new IOException("class file '%s' has location not matching its contents: contains ".format(in.file) + c)
+ throw new IOException(s"class file '${in.file}' has location not matching its contents: contains $c")
}
private def parseErrorHandler[T]: PartialFunction[Throwable, T] = {
@@ -72,8 +68,8 @@ abstract class ClassfileParser {
}
@inline private def pushBusy[T](sym: Symbol)(body: => T): T = {
busy match {
- case Some(`sym`) => throw new IOException("unsatisfiable cyclic dependency in '%s'".format(sym))
- case Some(sym1) => throw new IOException("illegal class file dependency between '%s' and '%s'".format(sym, sym1))
+ case Some(`sym`) => throw new IOException(s"unsatisfiable cyclic dependency in '$sym'")
+ case Some(sym1) => throw new IOException(s"illegal class file dependency between '$sym' and '$sym1'")
case _ => ()
}
@@ -137,10 +133,13 @@ abstract class ClassfileParser {
(in.nextByte.toInt: @switch) match {
case CONSTANT_UTF8 | CONSTANT_UNICODE =>
in.skip(in.nextChar)
- case CONSTANT_CLASS | CONSTANT_STRING =>
+ case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE=>
in.skip(2)
+ case CONSTANT_METHODHANDLE =>
+ in.skip(3)
case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
- | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT =>
+ | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
+ | CONSTANT_INVOKEDYNAMIC =>
in.skip(4)
case CONSTANT_LONG | CONSTANT_DOUBLE =>
in.skip(8)
@@ -229,8 +228,6 @@ abstract class ClassfileParser {
forceMangledName(tpe0.typeSymbol.name, false)
val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
-// println("new tpe: " + tpe + " at phase: " + phase)
-
if (name == nme.MODULE_INSTANCE_FIELD) {
val index = in.getChar(start + 1)
val name = getExternalName(in.getChar(starts(index) + 1))
@@ -241,14 +238,12 @@ abstract class ClassfileParser {
} else {
val origName = nme.originalName(name)
val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
-// println("\t" + owner.info.member(name).tpe.widen + " =:= " + tpe)
f = owner.info.findMember(origName, 0, 0, false).suchThat(_.tpe.widen =:= tpe)
if (f == NoSymbol)
f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
if (f == NoSymbol) {
// if it's an impl class, try to find it's static member inside the class
if (ownerTpe.typeSymbol.isImplClass) {
-// println("impl class, member: " + owner.tpe.member(origName) + ": " + owner.tpe.member(origName).tpe)
f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe)
} else {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
@@ -259,11 +254,13 @@ abstract class ClassfileParser {
f setInfo tpe
log("created fake member " + f.fullName)
}
-// println("\townerTpe.decls: " + ownerTpe.decls)
-// println("Looking for: " + name + ": " + tpe + " inside: " + ownerTpe.typeSymbol + "\n\tand found: " + ownerTpe.members)
}
}
- assert(f != NoSymbol, "could not find\n " + name + ": " + tpe + "\ninside:\n " + ownerTpe.members.mkString(", "))
+ assert(f != NoSymbol,
+ s"could not find $name: $tpe in $ownerTpe" + (
+ if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
+ )
+ )
values(index) = f
}
f
@@ -436,63 +433,58 @@ abstract class ClassfileParser {
sym
}
- /** Return the class symbol of the given name. */
- def classNameToSymbol(name: Name): Symbol = {
- def loadClassSymbol(name: Name): Symbol = {
- val file = global.classPath findSourceFile ("" +name) getOrElse {
- // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
- // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
- // that are not in their correct place (see bug for details)
- if (!settings.isScaladoc)
- warning("Class " + name + " not found - continuing with a stub.")
- return NoSymbol.newClass(name.toTypeName)
- }
- val completer = new global.loaders.ClassfileLoader(file)
- var owner: Symbol = rootMirror.RootClass
- var sym: Symbol = NoSymbol
- var ss: Name = null
- var start = 0
- var end = name indexOf '.'
-
- while (end > 0) {
- ss = name.subName(start, end)
- sym = owner.info.decls lookup ss
- if (sym == NoSymbol) {
- sym = owner.newPackage(ss) setInfo completer
- sym.moduleClass setInfo completer
- owner.info.decls enter sym
- }
- owner = sym.moduleClass
- start = end + 1
- end = name.indexOf('.', start)
- }
- ss = name.subName(0, start)
- owner.info.decls lookup ss orElse {
- sym = owner.newClass(ss.toTypeName) setInfoAndEnter completer
- debuglog("loaded "+sym+" from file "+file)
- sym
+ private def loadClassSymbol(name: Name): Symbol = {
+ val file = global.classPath findSourceFile ("" +name) getOrElse {
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ if (!settings.isScaladoc)
+ warning(s"Class $name not found - continuing with a stub.")
+ return NoSymbol.newClass(name.toTypeName)
+ }
+ val completer = new global.loaders.ClassfileLoader(file)
+ var owner: Symbol = rootMirror.RootClass
+ var sym: Symbol = NoSymbol
+ var ss: Name = null
+ var start = 0
+ var end = name indexOf '.'
+
+ while (end > 0) {
+ ss = name.subName(start, end)
+ sym = owner.info.decls lookup ss
+ if (sym == NoSymbol) {
+ sym = owner.newPackage(ss) setInfo completer
+ sym.moduleClass setInfo completer
+ owner.info.decls enter sym
}
+ owner = sym.moduleClass
+ start = end + 1
+ end = name.indexOf('.', start)
}
-
- def lookupClass(name: Name) = try {
- if (name.pos('.') == name.length)
- definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName)
- else
- rootMirror.getClass(name) // see tickets #2464, #3756
- } catch {
- case _: FatalError => loadClassSymbol(name)
+ ss = name.subName(0, start)
+ owner.info.decls lookup ss orElse {
+ sym = owner.newClass(ss.toTypeName) setInfoAndEnter completer
+ debuglog("loaded "+sym+" from file "+file)
+ sym
}
+ }
+ /** FIXME - we shouldn't be doing ad hoc lookups in the empty package.
+ * The method called "getClassByName" should either return the class or not.
+ */
+ private def lookupClass(name: Name) = (
+ if (name containsChar '.')
+ rootMirror getClassByName name // see tickets #2464, #3756
+ else
+ definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName)
+ )
- innerClasses.get(name) match {
- case Some(entry) =>
- //println("found inner class " + name)
- val res = innerClasses.classSymbol(entry.externalName)
- //println("\trouted to: " + res)
- res
- case None =>
- //if (name.toString.contains("$")) println("No inner class: " + name + innerClasses + " while parsing " + in.file.name)
- lookupClass(name)
- }
+ /** Return the class symbol of the given name. */
+ def classNameToSymbol(name: Name): Symbol = {
+ if (innerClasses contains name)
+ innerClasses innerSymbol name
+ else
+ try lookupClass(name)
+ catch { case _: FatalError => loadClassSymbol(name) }
}
var sawPrivateConstructor = false
@@ -588,11 +580,9 @@ abstract class ClassfileParser {
def addEnclosingTParams(clazz: Symbol) {
var sym = clazz.owner
while (sym.isClass && !sym.isModuleClass) {
-// println("adding tparams of " + sym)
- for (t <- sym.tpe.typeArgs) {
-// println("\tadding " + (t.typeSymbol.name + "->" + t.typeSymbol))
+ for (t <- sym.tpe.typeArgs)
classTParams = classTParams + (t.typeSymbol.name -> t.typeSymbol)
- }
+
sym = sym.owner
}
}
@@ -646,7 +636,7 @@ abstract class ClassfileParser {
info match {
case MethodType(params, restpe) =>
// if this is a non-static inner class, remove the explicit outer parameter
- val newParams = innerClasses.get(currentClass) match {
+ val newParams = innerClasses getEntry currentClass match {
case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
/* About `clazz.owner.isPackage` below: SI-5957
* For every nested java class A$B, there are two symbols in the scala compiler.
@@ -742,13 +732,10 @@ abstract class ClassfileParser {
// raw type - existentially quantify all type parameters
val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
val t = typeRef(pre, classSym, eparams.map(_.tpeHK))
- val res = newExistentialType(eparams, t)
- if (settings.debug.value && settings.verbose.value)
- println("raw type " + classSym + " -> " + res)
- res
+ newExistentialType(eparams, t)
}
case tp =>
- assert(sig.charAt(index) != '<', tp)
+ assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
}
@@ -867,8 +854,6 @@ abstract class ClassfileParser {
val sig = pool.getExternalName(in.nextChar)
val newType = sigToType(sym, sig)
sym.setInfo(newType)
- if (settings.debug.value && settings.verbose.value)
- println("" + sym + "; signature = " + sig + " type = " + newType)
}
else in.skip(attrLen)
case tpnme.SyntheticATTR =>
@@ -885,10 +870,10 @@ abstract class ClassfileParser {
val c = pool.getConstant(in.nextChar)
val c1 = convertTo(c, symtype)
if (c1 ne null) sym.setInfo(ConstantType(c1))
- else println("failure to convert " + c + " to " + symtype); //debug
+ else debugwarn(s"failure to convert $c to $symtype")
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
- debuglog("warning: symbol " + sym.fullName + " has pickled signature in attribute")
+ debugwarn(s"symbol ${sym.fullName} has pickled signature in attribute")
unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name)
}
in.skip(attrLen)
@@ -933,6 +918,12 @@ abstract class ClassfileParser {
case pkg => pkg.fullName(File.separatorChar)+File.separator+srcfileLeaf
}
srcfile0 = settings.outputDirs.srcFilesFor(in.file, srcpath).find(_.exists)
+ case tpnme.CodeATTR =>
+ if (sym.owner.isInterface) {
+ sym setFlag DEFAULTMETHOD
+ log(s"$sym in ${sym.owner} is a java8+ default method.")
+ }
+ in.skip(attrLen)
case _ =>
in.skip(attrLen)
}
@@ -1020,16 +1011,18 @@ abstract class ClassfileParser {
}
if (hasError) None
else Some(AnnotationInfo(attrType, List(), nvpairs.toList))
- } catch {
- case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
- case ex: Throwable =>
+ }
+ catch {
+ case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
+ case ex: java.lang.Error => throw ex
+ case ex: Throwable =>
// We want to be robust when annotations are unavailable, so the very least
// we can do is warn the user about the exception
// There was a reference to ticket 1135, but that is outdated: a reference to a class not on
// the classpath would *not* end up here. A class not found is signaled
// with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
// and that should never be swallowed silently.
- warning("Caught: " + ex + " while parsing annotations in " + in.file)
+ warning(s"Caught: $ex while parsing annotations in ${in.file}")
if (settings.debug.value) ex.printStackTrace()
None // ignore malformed annotations
@@ -1044,6 +1037,9 @@ abstract class ClassfileParser {
for (n <- 0 until nClasses) {
// FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
val cls = pool.getClassSymbol(in.nextChar.toInt)
+ // we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation
+ // and that method requires Symbol to be forced to give the right answers, see SI-7107 for details
+ cls.initialize
sym.addThrowsAnnotation(cls)
}
}
@@ -1104,7 +1100,7 @@ abstract class ClassfileParser {
unlinkIfPresent(cName.toTypeName)
}
- for (entry <- innerClasses.values) {
+ for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
@@ -1142,14 +1138,9 @@ abstract class ClassfileParser {
case tpnme.InnerClassesATTR if !isScala =>
val entries = in.nextChar.toInt
for (i <- 0 until entries) {
- val innerIndex = in.nextChar.toInt
- val outerIndex = in.nextChar.toInt
- val nameIndex = in.nextChar.toInt
- val jflags = in.nextChar.toInt
- if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) {
- val entry = InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
- innerClasses += (pool.getClassName(innerIndex) -> entry)
- }
+ val innerIndex, outerIndex, nameIndex, jflags = in.nextChar.toInt
+ if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0)
+ innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
}
case _ =>
in.skip(attrLen)
@@ -1163,72 +1154,69 @@ abstract class ClassfileParser {
def externalName = pool getClassName external
def outerName = pool getClassName outer
def originalName = pool getName name
+ def isStatic = ClassfileParser.this.isStatic(jflags)
+ def isModule = originalName.isTermName
+ def scope = if (isStatic) staticScope else instanceScope
+ def enclosing = if (isStatic) enclModule else enclClass
+
+ // The name of the outer class, without its trailing $ if it has one.
+ private def strippedOuter = nme stripModuleSuffix outerName
+ private def isInner = innerClasses contains strippedOuter
+ private def enclClass = if (isInner) innerClasses innerSymbol strippedOuter else classNameToSymbol(strippedOuter)
+ private def enclModule = enclClass.companionModule
+
+ private def staticWord = if (isStatic) "static " else ""
+ override def toString = s"$staticWord$originalName in $outerName ($externalName)"
+ }
- override def toString =
- originalName + " in " + outerName + "(" + externalName +")"
+ /** Return the Symbol of the top level class enclosing `name`,
+ * or the symbol of `name` itself if no enclosing classes are found.
+ */
+ def topLevelClass(name: Name): Symbol = innerClasses getEntry name match {
+ case Some(entry) => topLevelClass(entry.outerName)
+ case _ => classNameToSymbol(name)
}
- object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] {
- /** Return the Symbol of the top level class enclosing `name`,
- * or 'name's symbol if no entry found for `name`.
- */
- def topLevelClass(name: Name): Symbol = {
- val tlName = if (isDefinedAt(name)) {
- var entry = this(name)
- while (isDefinedAt(entry.outerName))
- entry = this(entry.outerName)
- entry.outerName
- } else
- name
- classNameToSymbol(tlName)
+ /** Return the class symbol for the given name. It looks it up in its outer class.
+ * Forces all outer class symbols to be completed.
+ *
+ * If the given name is not an inner class, it returns the symbol found in `definitions`.
+ */
+ object innerClasses {
+ private val inners = mutable.HashMap[Name, InnerClassEntry]()
+
+ def contains(name: Name) = inners contains name
+ def getEntry(name: Name) = inners get name
+ def entries = inners.values
+
+ def add(entry: InnerClassEntry): Unit = {
+ inners get entry.externalName foreach (existing =>
+ debugwarn(s"Overwriting inner class entry! Was $existing, now $entry")
+ )
+ inners(entry.externalName) = entry
}
-
- /** Return the class symbol for `externalName`. It looks it up in its outer class.
- * Forces all outer class symbols to be completed.
- *
- * If the given name is not an inner class, it returns the symbol found in `definitions`.
- */
- def classSymbol(externalName: Name): Symbol = {
- /** Return the symbol of `innerName`, having the given `externalName`. */
- def innerSymbol(externalName: Name, innerName: Name, static: Boolean): Symbol = {
- def getMember(sym: Symbol, name: Name): Symbol =
- if (static)
- if (sym == clazz) staticScope.lookup(name)
- else sym.companionModule.info.member(name)
- else
- if (sym == clazz) instanceScope.lookup(name)
- else sym.info.member(name)
-
- innerClasses.get(externalName) match {
- case Some(entry) =>
- val outerName = nme.stripModuleSuffix(entry.outerName)
- val sym = classSymbol(outerName)
- val s =
- // if loading during initialization of `definitions` typerPhase is not yet set.
- // in that case we simply load the member at the current phase
- if (currentRun.typerPhase != null)
- beforeTyper(getMember(sym, innerName.toTypeName))
- else
- getMember(sym, innerName.toTypeName)
-
- assert(s ne NoSymbol,
- "" + ((externalName, outerName, innerName, sym.fullLocationString)) + " / " +
- " while parsing " + ((in.file, busy)) +
- sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members
- )
- s
-
- case None =>
- classNameToSymbol(externalName)
- }
- }
-
- get(externalName) match {
- case Some(entry) =>
- innerSymbol(entry.externalName, entry.originalName, isStatic(entry.jflags))
- case None =>
- classNameToSymbol(externalName)
- }
+ def innerSymbol(externalName: Name): Symbol = this getEntry externalName match {
+ case Some(entry) => innerSymbol(entry)
+ case _ => NoSymbol
+ }
+ // if loading during initialization of `definitions` typerPhase is not yet set.
+ // in that case we simply load the member at the current phase
+ @inline private def enteringTyperIfPossible(body: => Symbol): Symbol =
+ if (currentRun.typerPhase eq null) body else beforeTyper(body)
+
+ private def innerSymbol(entry: InnerClassEntry): Symbol = {
+ val name = entry.originalName.toTypeName
+ val enclosing = entry.enclosing
+ def getMember = (
+ if (enclosing == clazz) entry.scope lookup name
+ else enclosing.info member name
+ )
+ enteringTyperIfPossible(getMember)
+ /** There used to be an assertion that this result is not NoSymbol; changing it to an error
+ * revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
+ * in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
+ * thought it wasn't triggering, I removed it entirely.
+ */
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 13c0d8993a..c304c18c4f 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -506,6 +506,13 @@ abstract class ICodeReader extends ClassfileParser {
code.emit(UNBOX(toTypeKind(m.info.resultType)))
else
code.emit(CALL_METHOD(m, Static(false)))
+ case JVM.invokedynamic =>
+ // TODO, this is just a place holder. A real implementation must parse the class constant entry
+ debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.")
+ containsInvokeDynamic = true
+ val poolEntry = in.nextChar
+ in.skip(2)
+ code.emit(INVOKE_DYNAMIC(poolEntry))
case JVM.new_ =>
code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
@@ -644,6 +651,7 @@ abstract class ICodeReader extends ClassfileParser {
var containsDUPX = false
var containsNEW = false
var containsEHs = false
+ var containsInvokeDynamic = false
def emit(i: Instruction) {
instrs += ((pc, i))
@@ -662,6 +670,7 @@ abstract class ICodeReader extends ClassfileParser {
val code = new Code(method)
method.setCode(code)
method.bytecodeHasEHs = containsEHs
+ method.bytecodeHasInvokeDynamic = containsInvokeDynamic
var bb = code.startBlock
def makeBasicBlocks: mutable.Map[Int, BasicBlock] =
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index e8b0cd2696..c3aded2b2d 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -293,7 +293,6 @@ abstract class Pickler extends SubComponent {
putTree(definition)
*/
case Template(parents, self, body) =>
- writeNat(parents.length)
putTrees(parents)
putTree(self)
putTrees(body)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 889d309ba9..ead6ef288c 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -477,6 +477,7 @@ abstract class Erasure extends AddInterfaces
def checkPair(member: Symbol, other: Symbol) {
val otpe = specialErasure(root)(other.tpe)
val bridgeNeeded = afterErasure (
+ !member.isMacro &&
!(other.tpe =:= member.tpe) &&
!(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
{ var e = bridgesScope.lookupEntry(member.name)
@@ -780,7 +781,7 @@ abstract class Erasure extends AddInterfaces
else if (tree.symbol == Any_isInstanceOf)
adaptMember(atPos(tree.pos)(Select(qual, Object_isInstanceOf)))
else if (tree.symbol.owner == AnyClass)
- adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, name))))
+ adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, tree.symbol.name))))
else {
var qual1 = typedQualifier(qual)
if ((isPrimitiveValueType(qual1.tpe) && !isPrimitiveValueMember(tree.symbol)) ||
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 2f28a16416..970519ab7c 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -45,26 +45,24 @@ abstract class ExplicitOuter extends InfoTransform
private def isInner(clazz: Symbol) =
!clazz.isPackageClass && !clazz.outerClass.isStaticOwner
- private def haveSameOuter(parent: Type, clazz: Symbol) = parent match {
- case TypeRef(pre, sym, _) =>
- val owner = clazz.owner
+ private def haveSameOuter(parent: Type, clazz: Symbol) = {
+ val owner = clazz.owner
+ val parentSym = parent.typeSymbol
- //println(s"have same outer $parent $clazz $sym ${sym.owner} $owner $pre")
-
- sym.isClass && owner.isClass &&
- (owner isSubClass sym.owner) &&
- owner.thisType =:= pre
-
- case _ => false
+ parentSym.isClass && owner.isClass &&
+ (owner isSubClass parentSym.owner) &&
+ owner.thisType =:= parent.prefix
}
/** Does given clazz define an outer field? */
def hasOuterField(clazz: Symbol) = {
- val parents = clazz.info.parents
+ val parent = clazz.info.firstParent
- isInner(clazz) && !clazz.isTrait && {
- parents.isEmpty || !haveSameOuter(parents.head, clazz)
- }
+ // space optimization: inherit the $outer pointer from the parent class if
+ // we know that it will point to the correct instance.
+ def canReuseParentOuterField = !parent.typeSymbol.isJavaDefined && haveSameOuter(parent, clazz)
+
+ isInner(clazz) && !clazz.isTrait && !canReuseParentOuterField
}
private def outerField(clazz: Symbol): Symbol = {
@@ -112,6 +110,29 @@ abstract class ExplicitOuter extends InfoTransform
sym setInfo clazz.outerClass.thisType
}
+ /**
+ * Will the outer accessor of the `clazz` subsume the outer accessor of
+ * `mixin`?
+ *
+ * This arises when an inner object mixes in its companion trait.
+ *
+ * {{{
+ * class C {
+ * trait T { C.this } // C$T$$$outer$ : C
+ * object T extends T { C.this } // C$T$$$outer$ : C.this.type
+ * }
+ * }}}
+ *
+ * See SI-7242.
+ }}
+ */
+ private def skipMixinOuterAccessor(clazz: Symbol, mixin: Symbol) = {
+ // Reliant on the current scheme for name expansion, the expanded name
+ // of the outer accessors in a trait and its companion object are the same.
+ // If the assumption is one day falsified, run/t7424.scala will let us know.
+ clazz.fullName == mixin.fullName
+ }
+
/** <p>
* The type transformation method:
* </p>
@@ -177,10 +198,14 @@ abstract class ExplicitOuter extends InfoTransform
for (mc <- clazz.mixinClasses) {
val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc))
if (mixinOuterAcc != NoSymbol) {
- if (decls1 eq decls) decls1 = decls.cloneScope
- val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED)
- newAcc setInfo (clazz.thisType memberType mixinOuterAcc)
- decls1 enter newAcc
+ if (skipMixinOuterAccessor(clazz, mc))
+ debuglog(s"Reusing outer accessor symbol of $clazz for the mixin outer accessor of $mc")
+ else {
+ if (decls1 eq decls) decls1 = decls.cloneScope
+ val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED)
+ newAcc setInfo (clazz.thisType memberType mixinOuterAcc)
+ decls1 enter newAcc
+ }
}
}
}
@@ -390,6 +415,7 @@ abstract class ExplicitOuter extends InfoTransform
val outerAcc = outerAccessor(mixinClass) overridingSymbol currentClass
def mixinPrefix = (currentClass.thisType baseType mixinClass).prefix
assert(outerAcc != NoSymbol, "No outer accessor for inner mixin " + mixinClass + " in " + currentClass)
+ assert(outerAcc.alternatives.size == 1, s"Multiple outer accessors match inner mixin $mixinClass in $currentClass : ${outerAcc.alternatives.map(_.defString)}")
// I added the mixinPrefix.typeArgs.nonEmpty condition to address the
// crash in SI-4970. I feel quite sure this can be improved.
val path = (
@@ -492,7 +518,7 @@ abstract class ExplicitOuter extends InfoTransform
}
if (!currentClass.isTrait)
for (mc <- currentClass.mixinClasses)
- if (outerAccessor(mc) != NoSymbol)
+ if (outerAccessor(mc) != NoSymbol && !skipMixinOuterAccessor(currentClass, mc))
newDefs += mixinOuterAccessorDef(mc)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 232148676c..39716d4ed0 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -78,7 +78,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
/** For a given class and concrete type arguments, give its specialized class */
- val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
+ val specializedClass = perRunCaches.newMap[(Symbol, TypeEnv), Symbol]
/** Map a method symbol to a list of its specialized overloads in the same class. */
private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil
@@ -796,7 +796,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
sym :: (
if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil
- else {
+ else if (sym.hasDefault) {
+ /* Specializing default getters is useless, also see SI-7329 . */
+ sym.resetFlag(SPECIALIZED)
+ Nil
+ } else {
// debuglog("normalizeMember: " + sym.fullNameAsName('.').decode)
var specializingOn = specializedParams(sym)
val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info)
@@ -976,27 +980,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info))
typeEnv(om) = env
addConcreteSpecMethod(overriding)
- info(om) = (
- if (overriding.isDeferred) { // abstract override
- debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
- Forward(overriding)
- }
- else {
- // if the override is a normalized member, 'om' gets the
- // implementation from its original target, and adds the
- // environment of the normalized member (that is, any
- // specialized /method/ type parameter bindings)
- val impl = info get overriding match {
- case Some(NormalizedMember(target)) =>
- typeEnv(om) = env ++ typeEnv(overriding)
- target
- case _ =>
- overriding
- }
- info(overriding) = Forward(om setPos overriding.pos)
- SpecialOverride(impl)
+ if (overriding.isDeferred) { // abstract override
+ debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
+ info(om) = Forward(overriding)
+ }
+ else {
+ // if the override is a normalized member, 'om' gets the
+ // implementation from its original target, and adds the
+ // environment of the normalized member (that is, any
+ // specialized /method/ type parameter bindings)
+ info get overriding match {
+ case Some(NormalizedMember(target)) =>
+ typeEnv(om) = env ++ typeEnv(overriding)
+ info(om) = Forward(target)
+ case _ =>
+ info(om) = SpecialOverride(overriding)
}
- )
+ info(overriding) = Forward(om setPos overriding.pos)
+ }
newOverload(overriding, om, env)
ifDebug(afterSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
@@ -1776,21 +1777,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Create specialized class definitions */
def implSpecClasses(trees: List[Tree]): List[Tree] = {
- val buf = new mutable.ListBuffer[Tree]
- for (tree <- trees)
- tree match {
- case ClassDef(_, _, _, impl) =>
- tree.symbol.info // force specialization
- for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) {
- val parents = specCls.info.parents.map(TypeTree)
- buf +=
- ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
- .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
- debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
- }
- case _ =>
- }
- buf.toList
+ trees flatMap {
+ case tree @ ClassDef(_, _, _, impl) =>
+ tree.symbol.info // force specialization
+ for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield {
+ debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
+ val parents = specCls.info.parents.map(TypeTree)
+ ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
+ .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
+ }
+ case _ => Nil
+ } sortBy (_.name.decoded)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index a767850cba..938499261e 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -220,7 +220,10 @@ abstract class TailCalls extends Transform {
debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
accessed += ctx.label
- typedPos(fun.pos)(Apply(Ident(ctx.label), noTailTransform(recv) :: transformArgs))
+ typedPos(fun.pos) {
+ val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe))
+ Apply(Ident(ctx.label), noTailTransform(recv) :: args)
+ }
}
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
@@ -280,7 +283,7 @@ abstract class TailCalls extends Transform {
typedPos(tree.pos)(Block(
List(ValDef(newThis, This(currentClass))),
- LabelDef(newCtx.label, newThis :: vpSyms, newRHS)
+ LabelDef(newCtx.label, newThis :: vpSyms, mkAttributedCastHack(newRHS, newCtx.label.tpe.resultType))
))
}
else {
@@ -377,6 +380,13 @@ abstract class TailCalls extends Transform {
super.transform(tree)
}
}
+
+ // Workaround for SI-6900. Uncurry installs an InfoTransformer and a tree Transformer.
+ // These leave us with conflicting view on method signatures; the parameter symbols in
+ // the MethodType can be clones of the ones originally found on the parameter ValDef, and
+ // consequently appearing in the typechecked RHS of the method.
+ private def mkAttributedCastHack(tree: Tree, tpe: Type) =
+ gen.mkAttributedCast(tree, tpe)
}
// collect the LabelDefs (generated by the pattern matcher) in a DefDef that are in tail position
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e9f403aea0..430129aaff 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -492,10 +492,14 @@ abstract class UnCurry extends InfoTransform
}
else {
log(s"Argument '$arg' at line ${arg.pos.safeLine} is $formal from ${fun.fullName}")
+ def canUseDirectly(recv: Tree) = (
+ recv.tpe.typeSymbol.isSubClass(FunctionClass(0))
+ && treeInfo.isExprSafeToInline(recv)
+ )
arg match {
// don't add a thunk for by-name argument if argument already is an application of
// a Function0. We can then remove the application and use the existing Function0.
- case Apply(Select(recv, nme.apply), Nil) if recv.tpe.typeSymbol isSubClass FunctionClass(0) =>
+ case Apply(Select(recv, nme.apply), Nil) if canUseDirectly(recv) =>
recv
case _ =>
newFunction0(arg)
@@ -619,7 +623,7 @@ abstract class UnCurry extends InfoTransform
val fn1 = withInPattern(false)(transform(fn))
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args.length))
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
case _ => sys.error("internal error: UnApply node has wrong symbol")
})
treeCopy.UnApply(tree, fn1, args1)
@@ -814,7 +818,8 @@ abstract class UnCurry extends InfoTransform
*
* This transformation erases the dependent method types by:
* - Widening the formal parameter type to existentially abstract
- * over the prior parameters (using `packSymbols`)
+ * over the prior parameters (using `packSymbols`). This transformation
+ * is performed in the the `InfoTransform`er [[scala.reflect.internal.transform.UnCurry]].
* - Inserting casts in the method body to cast to the original,
* precise type.
*
@@ -842,15 +847,14 @@ abstract class UnCurry extends InfoTransform
*/
def erase(dd: DefDef): (List[List[ValDef]], Tree) = {
import dd.{ vparamss, rhs }
- val vparamSyms = vparamss flatMap (_ map (_.symbol))
-
val paramTransforms: List[ParamTransform] =
- vparamss.flatten.map { p =>
- val declaredType = p.symbol.info
- // existentially abstract over value parameters
- val packedType = typer.packSymbols(vparamSyms, declaredType)
- if (packedType =:= declaredType) Identity(p)
+ map2(vparamss.flatten, dd.symbol.info.paramss.flatten) { (p, infoParam) =>
+ val packedType = infoParam.info
+ if (packedType =:= p.symbol.info) Identity(p)
else {
+ // The Uncurry info transformer existentially abstracted over value parameters
+ // from the previous parameter lists.
+
// Change the type of the param symbol
p.symbol updateInfo packedType
@@ -862,8 +866,8 @@ abstract class UnCurry extends InfoTransform
// the method body to refer to this, rather than the parameter.
val tempVal: ValDef = {
val tempValName = unit freshTermName (p.name + "$")
- val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(declaredType)
- atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), declaredType)))
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(p.symbol.info)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), p.symbol.info)))
}
Packed(newParam, tempVal)
}
@@ -881,13 +885,6 @@ abstract class UnCurry extends InfoTransform
Block(tempVals, rhsSubstituted)
}
- // update the type of the method after uncurry.
- dd.symbol updateInfo {
- val GenPolyType(tparams, tp) = dd.symbol.info
- logResult("erased dependent param types for ${dd.symbol.info}") {
- GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType))
- }
- }
(allParams :: Nil, rhs1)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
new file mode 100644
index 0000000000..dbe08315f4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -0,0 +1,644 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab._
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.HashSet
+
+
+trait Logic extends Debugging {
+ import PatternMatchingStats._
+
+ private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
+ private def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
+ def toString(x: AnyRef) = if (x eq null) "" else x.toString
+ if (cols.isEmpty || cols.tails.isEmpty) cols map toString
+ else {
+ val colLens = cols map (c => toString(c).length)
+ val maxLen = max(colLens)
+ val avgLen = colLens.sum/colLens.length
+ val goalLen = maxLen min avgLen*2
+ def pad(s: String) = {
+ val toAdd = ((goalLen - s.length) max 0) + 2
+ (" " * (toAdd/2)) + s + (" " * (toAdd/2 + (toAdd%2)))
+ }
+ cols map (x => pad(toString(x)))
+ }
+ }
+
+ def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
+ val maxLen = max(xss map (_.length))
+ val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
+ padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
+ }
+
+ // http://www.cis.upenn.edu/~cis510/tcl/chap3.pdf
+ // http://users.encs.concordia.ca/~ta_ahmed/ms_thesis.pdf
+ // propositional logic with constants and equality
+ trait PropositionalLogic {
+ type Type
+ type Tree
+
+ class Prop
+ case class Eq(p: Var, q: Const) extends Prop
+
+ type Const
+
+ type TypeConst <: Const
+ def TypeConst: TypeConstExtractor
+ trait TypeConstExtractor {
+ def apply(tp: Type): Const
+ }
+
+ type ValueConst <: Const
+ def ValueConst: ValueConstExtractor
+ trait ValueConstExtractor {
+ def apply(p: Tree): Const
+ }
+
+ val NullConst: Const
+
+ type Var <: AbsVar
+ val Var: VarExtractor
+ trait VarExtractor {
+ def apply(x: Tree): Var
+ def unapply(v: Var): Some[Tree]
+ }
+
+ // resets hash consing -- only supposed to be called by TreeMakersToProps
+ def prepareNewAnalysis(): Unit
+
+ trait AbsVar {
+ // indicate we may later require a prop for V = C
+ def registerEquality(c: Const): Unit
+
+ // call this to indicate null is part of the domain
+ def registerNull(): Unit
+
+ // can this variable be null?
+ def mayBeNull: Boolean
+
+ // compute the domain and return it (call registerNull first!)
+ def domainSyms: Option[Set[Sym]]
+
+ // the symbol for this variable being equal to its statically known type
+ // (only available if registerEquality has been called for that type before)
+ def symForStaticTp: Option[Sym]
+
+ // for this var, call it V, turn V = C into the equivalent proposition in boolean logic
+ // registerEquality(c) must have been called prior to this call
+ // in fact, all equalities relevant to this variable must have been registered
+ def propForEqualsTo(c: Const): Prop
+
+ // populated by registerEquality
+ // once implications has been called, must not call registerEquality anymore
+ def implications: List[(Sym, List[Sym], List[Sym])]
+ }
+
+ // would be nice to statically check whether a prop is equational or pure,
+ // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
+ case class And(a: Prop, b: Prop) extends Prop
+ case class Or(a: Prop, b: Prop) extends Prop
+ case class Not(a: Prop) extends Prop
+
+ case object True extends Prop
+ case object False extends Prop
+
+ // symbols are propositions
+ abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ private[this] val id = Sym.nextSymId
+
+ override def toString = variable +"="+ const +"#"+ id
+ }
+ class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+ object Sym {
+ private val uniques: HashSet[Sym] = new HashSet("uniques", 512)
+ def apply(variable: Var, const: Const): Sym = {
+ val newSym = new UniqueSym(variable, const)
+ (uniques findEntryOrUpdate newSym)
+ }
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ }
+
+ def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
+ def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
+
+ trait PropTraverser {
+ def apply(x: Prop): Unit = x match {
+ case And(a, b) => apply(a); apply(b)
+ case Or(a, b) => apply(a); apply(b)
+ case Not(a) => apply(a)
+ case Eq(a, b) => applyVar(a); applyConst(b)
+ case _ =>
+ }
+ def applyVar(x: Var): Unit = {}
+ def applyConst(x: Const): Unit = {}
+ }
+
+ def gatherVariables(p: Prop): Set[Var] = {
+ val vars = new mutable.HashSet[Var]()
+ (new PropTraverser {
+ override def applyVar(v: Var) = vars += v
+ })(p)
+ vars.toSet
+ }
+
+ trait PropMap {
+ def apply(x: Prop): Prop = x match { // TODO: mapConserve
+ case And(a, b) => And(apply(a), apply(b))
+ case Or(a, b) => Or(apply(a), apply(b))
+ case Not(a) => Not(apply(a))
+ case p => p
+ }
+ }
+
+ // to govern how much time we spend analyzing matches for unreachability/exhaustivity
+ object AnalysisBudget {
+ import scala.tools.cmd.FromString.IntFromString
+ val max = sys.props.get("scalac.patmat.analysisBudget").collect(IntFromString.orElse{case "off" => Integer.MAX_VALUE}).getOrElse(256)
+
+ abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded")
+
+ object exceeded extends Exception(
+ s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)")
+
+ }
+
+ // convert finite domain propositional logic with subtyping to pure boolean propositional logic
+ // a type test or a value equality test are modelled as a variable being equal to some constant
+ // a variable V may be assigned multiple constants, as long as they do not contradict each other
+ // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments
+ // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain
+ // in a prelude (the equality axioms)
+ // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain
+ // 2. for each variable V in props, and each constant C it is compared to,
+ // compute which assignments imply each other (as in the example above: V = 1 implies V = Int)
+ // and which assignments are mutually exclusive (V = String implies -(V = Int))
+ //
+ // note that this is a conservative approximation: V = Constant(A) and V = Constant(B)
+ // are considered mutually exclusive (and thus both cases are considered reachable in {case A => case B =>}),
+ // even though A may be equal to B (and thus the second case is not "dynamically reachable")
+ //
+ // TODO: for V1 representing x1 and V2 standing for x1.head, encode that
+ // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
+ // may throw an AnalysisBudget.Exception
+ def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = {
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
+
+ val vars = new scala.collection.mutable.HashSet[Var]
+
+ object gatherEqualities extends PropTraverser {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) =>
+ vars += v
+ v.registerEquality(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ object rewriteEqualsToProp extends PropMap {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) => v.propForEqualsTo(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ props foreach gatherEqualities.apply
+ if (modelNull) vars foreach (_.registerNull)
+
+ val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
+
+ val eqAxioms = formulaBuilder
+ @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p))
+
+ debug.patmat("removeVarEq vars: "+ vars)
+ vars.foreach { v =>
+ // if v.domainSyms.isEmpty, we must consider the domain to be infinite
+ // otherwise, since the domain fully partitions the type of the value,
+ // exactly one of the types (and whatever it implies, imposed separately) must be chosen
+ // consider X ::= A | B | C, and A => B
+ // coverage is formulated as: A \/ B \/ C and the implications are
+ v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) }
+
+ // when this variable cannot be null the equality corresponding to the type test `(x: T)`, where T is x's static type,
+ // is always true; when the variable may be null we use the implication `(x != null) => (x: T)` for the axiom
+ v.symForStaticTp foreach { symForStaticTp =>
+ if (v.mayBeNull) addAxiom(Or(v.propForEqualsTo(NullConst), symForStaticTp))
+ else addAxiom(symForStaticTp)
+ }
+
+ v.implications foreach { case (sym, implied, excluded) =>
+ // when sym is true, what must hold...
+ implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
+ // ... and what must not?
+ excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym))))
+ }
+ }
+
+ debug.patmat("eqAxioms:\n"+ cnfString(toFormula(eqAxioms)))
+ debug.patmat("pure:"+ pure.map(p => cnfString(p)).mkString("\n"))
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
+
+ (toFormula(eqAxioms), pure)
+ }
+
+
+ // an interface that should be suitable for feeding a SAT solver when the time comes
+ type Formula
+ type FormulaBuilder
+
+ // creates an empty formula builder to which more formulae can be added
+ def formulaBuilder: FormulaBuilder
+
+ // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN)
+ // toFormula(f) == andFormula(f1, andFormula(..., fN))
+ def addFormula(buff: FormulaBuilder, f: Formula): Unit
+ def toFormula(buff: FormulaBuilder): Formula
+
+ // the conjunction of formulae `a` and `b`
+ def andFormula(a: Formula, b: Formula): Formula
+
+ // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses)
+ def simplifyFormula(a: Formula): Formula
+
+ // may throw an AnalysisBudget.Exception
+ def propToSolvable(p: Prop): Formula = {
+ val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
+ andFormula(eqAxioms, pure)
+ }
+
+ // may throw an AnalysisBudget.Exception
+ def eqFreePropToSolvable(p: Prop): Formula
+ def cnfString(f: Formula): String
+
+ type Model = Map[Sym, Boolean]
+ val EmptyModel: Model
+ val NoModel: Model
+
+ def findModelFor(f: Formula): Model
+ def findAllModelsFor(f: Formula): List[Model]
+ }
+}
+
+trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
+ trait TreesAndTypesDomain extends PropositionalLogic with CheckableTreeAndTypeAnalysis {
+ type Type = global.Type
+ type Tree = global.Tree
+
+ // resets hash consing -- only supposed to be called by TreeMakersToProps
+ def prepareNewAnalysis(): Unit = { Var.resetUniques(); Const.resetUniques() }
+
+ object Var extends VarExtractor {
+ private var _nextId = 0
+ def nextId = {_nextId += 1; _nextId}
+
+ def resetUniques() = {_nextId = 0; uniques.clear()}
+ private val uniques = new mutable.HashMap[Tree, Var]
+ def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
+ def unapply(v: Var) = Some(v.path)
+ }
+ class Var(val path: Tree, staticTp: Type) extends AbsVar {
+ private[this] val id: Int = Var.nextId
+
+ // private[this] var canModify: Option[Array[StackTraceElement]] = None
+ private[this] def ensureCanModify() = {} //if (canModify.nonEmpty) debug.patmat("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n"))
+
+ private[this] def observed() = {} //canModify = Some(Thread.currentThread.getStackTrace)
+
+ // don't access until all potential equalities have been registered using registerEquality
+ private[this] val symForEqualsTo = new mutable.HashMap[Const, Sym]
+
+ // when looking at the domain, we only care about types we can check at run time
+ val staticTpCheckable: Type = checkableType(staticTp)
+
+ private[this] var _mayBeNull = false
+ def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def mayBeNull: Boolean = _mayBeNull
+
+ // case None => domain is unknown,
+ // case Some(List(tps: _*)) => domain is exactly tps
+ // we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
+ // once we go to run-time checks (on Const's), convert them to checkable types
+ // TODO: there seems to be bug for singleton domains (variable does not show up in model)
+ lazy val domain: Option[Set[Const]] = {
+ val subConsts = enumerateSubtypes(staticTp).map{ tps =>
+ tps.toSet[Type].map{ tp =>
+ val domainC = TypeConst(tp)
+ registerEquality(domainC)
+ domainC
+ }
+ }
+
+ val allConsts =
+ if (mayBeNull) {
+ registerEquality(NullConst)
+ subConsts map (_ + NullConst)
+ } else
+ subConsts
+
+ observed; allConsts
+ }
+
+ // populate equalitySyms
+ // don't care about the result, but want only one fresh symbol per distinct constant c
+ def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
+
+ // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
+ // (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
+ def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+
+ // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
+ /** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
+ *
+ * that models a type test pattern `_: C` or constant pattern `C`, where the type test gives rise to a TypeConst C,
+ * and the constant pattern yields a ValueConst C
+ *
+ * for exhaustivity, we really only need implication (e.g., V = 1 implies that V = 1 /\ V = Int, if both tests occur in the match,
+ * and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
+ */
+ lazy val implications = {
+ /** when we know V = C, which other equalities must hold
+ *
+ * in general, equality to some type implies equality to its supertypes
+ * (this multi-valued kind of equality is necessary for unreachability)
+ * note that we use subtyping as a model for implication between instanceof tests
+ * i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ * unfortunately this is not true in general (see e.g. SI-6022)
+ */
+ def implies(lower: Const, upper: Const): Boolean =
+ // values and null
+ lower == upper ||
+ // type implication
+ (lower != NullConst && !upper.isValue &&
+ instanceOfTpImplies(if (lower.isValue) lower.wideTp else lower.tp, upper.tp))
+
+ // if(r) debug.patmat("implies : "+(lower, lower.tp, upper, upper.tp))
+ // else debug.patmat("NOT implies: "+(lower, upper))
+
+
+ /** Does V=A preclude V=B?
+ *
+ * (0) A or B must be in the domain to draw any conclusions.
+ *
+ * For example, knowing the the scrutinee is *not* true does not
+ * statically exclude it from being `X`, because that is an opaque
+ * Boolean.
+ *
+ * val X = true
+ * (true: Boolean) match { case true => case X <reachable> }
+ *
+ * (1) V = null excludes assignment to any other constant (modulo point #0). This includes
+ * both values and type tests (which are both modelled here as `Const`)
+ * (2) V = A and V = B, for A and B domain constants, are mutually exclusive unless A == B
+ *
+ * (3) We only reason about test tests as being excluded by null assignments, otherwise we
+ * only consider value assignments.
+ * TODO: refine this, a == 0 excludes a: String, or `a: Int` excludes `a: String`
+ * (since no value can be of both types. See also SI-7211)
+ *
+ * NOTE: V = 1 does not preclude V = Int, or V = Any, it could be said to preclude
+ * V = String, but we don't model that.
+ */
+ def excludes(a: Const, b: Const): Boolean = {
+ val bothInDomain = domain exists (d => d(a) && d(b))
+ val eitherIsNull = a == NullConst || b == NullConst
+ val bothAreValues = a.isValue && b.isValue
+ bothInDomain && (eitherIsNull || bothAreValues) && (a != b)
+ }
+
+ // if(r) debug.patmat("excludes : "+(a, a.tp, b, b.tp))
+ // else debug.patmat("NOT excludes: "+(a, b))
+
+/*
+[ HALF BAKED FANCINESS: //!equalitySyms.exists(common => implies(common.const, a) && implies(common.const, b)))
+ when type tests are involved, we reason (conservatively) under a closed world assumption,
+ since we are really only trying to counter the effects of the symbols that we introduce to model type tests
+ we don't aim to model the whole subtyping hierarchy, simply to encode enough about subtyping to do unreachability properly
+
+ consider the following hierarchy:
+
+ trait A
+ trait B
+ trait C
+ trait AB extends B with A
+
+ // two types are mutually exclusive if there is no equality symbol whose constant implies both
+ object Test extends App {
+ def foo(x: Any) = x match {
+ case _ : C => println("C")
+ case _ : AB => println("AB")
+ case _ : (A with B) => println("AB'")
+ case _ : B => println("B")
+ case _ : A => println("A")
+ }
+
+ of course this kind of reasoning is not true in general,
+ but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing}
+*/
+
+ val excludedPair = new mutable.HashSet[ExcludedPair]
+
+ case class ExcludedPair(a: Const, b: Const) {
+ override def equals(o: Any) = o match {
+ case ExcludedPair(aa, bb) => (a == aa && b == bb) || (a == bb && b == aa)
+ case _ => false
+ }
+ // make ExcludedPair(a, b).hashCode == ExcludedPair(b, a).hashCode
+ override def hashCode = a.hashCode ^ b.hashCode
+ }
+
+ equalitySyms map { sym =>
+ // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
+ // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
+ val todo = equalitySyms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
+ val (excluded, notExcluded) = todo partition (b => excludes(sym.const, b.const))
+ val implied = notExcluded filter (b => implies(sym.const, b.const))
+
+ debug.patmat("eq axioms for: "+ sym.const)
+ debug.patmat("excluded: "+ excluded)
+ debug.patmat("implied: "+ implied)
+
+ excluded foreach { excludedSym => excludedPair += ExcludedPair(sym.const, excludedSym.const)}
+
+ (sym, implied, excluded)
+ }
+ }
+
+ // accessing after calling registerNull will result in inconsistencies
+ lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
+
+ lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
+
+ // don't access until all potential equalities have been registered using registerEquality
+ private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
+
+ // don't call until all equalities have been registered and registerNull has been called (if needed)
+ def describe = {
+ def domain_s = domain match {
+ case Some(d) => d mkString (" ::= ", " | ", "// "+ symForEqualsTo.keys)
+ case _ => symForEqualsTo.keys mkString (" ::= ", " | ", " | ...")
+ }
+ s"$this: ${staticTp}${domain_s} // = $path"
+ }
+ override def toString = "V"+ id
+ }
+
+
+ import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
+ import global.definitions.{AnyClass, UnitClass}
+
+
+ // all our variables range over types
+ // a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
+ // equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
+ object Const {
+ def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear()}
+
+ private var _nextTypeId = 0
+ def nextTypeId = {_nextTypeId += 1; _nextTypeId}
+
+ private var _nextValueId = 0
+ def nextValueId = {_nextValueId += 1; _nextValueId}
+
+ private val uniques = new mutable.HashMap[Type, Const]
+ private[TreesAndTypesDomain] def unique(tp: Type, mkFresh: => Const): Const =
+ uniques.get(tp).getOrElse(
+ uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
+ case Some((_, c)) =>
+ debug.patmat("unique const: "+ (tp, c))
+ c
+ case _ =>
+ val fresh = mkFresh
+ debug.patmat("uniqued const: "+ (tp, fresh))
+ uniques(tp) = fresh
+ fresh
+ })
+
+ private val trees = mutable.HashSet.empty[Tree]
+
+ // hashconsing trees (modulo value-equality)
+ private[TreesAndTypesDomain] def uniqueTpForTree(t: Tree): Type =
+ // a new type for every unstable symbol -- only stable value are uniqued
+ // technically, an unreachable value may change between cases
+ // thus, the failure of a case that matches on a mutable value does not exclude the next case succeeding
+ // (and thuuuuus, the latter case must be considered reachable)
+ if (!t.symbol.isStable) t.tpe.narrow
+ else trees find (a => a.correspondsStructure(t)(sameValue)) match {
+ case Some(orig) =>
+ debug.patmat("unique tp for tree: "+ (orig, orig.tpe))
+ orig.tpe
+ case _ =>
+ // duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
+ val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
+ debug.patmat("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
+ trees += treeWithNarrowedType
+ treeWithNarrowedType.tpe
+ }
+ }
+
+ sealed abstract class Const {
+ def tp: Type
+ def wideTp: Type
+
+ def isAny = wideTp.typeSymbol == AnyClass
+ def isValue: Boolean //= tp.isStable
+
+ // note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
+ // the equals inherited from AnyRef does just this
+ }
+
+ // find most precise super-type of tp that is a class
+ // we skip non-class types (singleton types, abstract types) so that we can
+ // correctly compute how types relate in terms of the values they rule out
+ // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
+ // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
+ // (At least conceptually: `true` is an instance of class `Boolean`)
+ private def widenToClass(tp: Type): Type =
+ if (tp.typeSymbol.isClass) tp
+ else tp.baseType(tp.baseClasses.head)
+
+ object TypeConst extends TypeConstExtractor {
+ def apply(tp: Type) = {
+ if (tp =:= NullTp) NullConst
+ else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
+ else Const.unique(tp, new TypeConst(tp))
+ }
+ def unapply(c: TypeConst): Some[Type] = Some(c.tp)
+ }
+
+ // corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
+ sealed class TypeConst(val tp: Type) extends Const {
+ assert(!(tp =:= NullTp))
+ /*private[this] val id: Int = */ Const.nextTypeId
+
+ val wideTp = widenToClass(tp)
+ def isValue = false
+ override def toString = tp.toString //+"#"+ id
+ }
+
+ // p is a unique type or a constant value
+ object ValueConst extends ValueConstExtractor {
+ def fromType(tp: Type) = {
+ assert(tp.isInstanceOf[SingletonType])
+ val toString = tp match {
+ case ConstantType(c) => c.escapedStringValue
+ case _ if tp.typeSymbol.isModuleClass => tp.typeSymbol.name.toString
+ case _ => tp.toString
+ }
+ Const.unique(tp, new ValueConst(tp, tp.widen, toString))
+ }
+ def apply(p: Tree) = {
+ val tp = p.tpe.normalize
+ if (tp =:= NullTp) NullConst
+ else {
+ val wideTp = widenToClass(tp)
+
+ val narrowTp =
+ if (tp.isInstanceOf[SingletonType]) tp
+ else p match {
+ case Literal(c) =>
+ if (c.tpe.typeSymbol == UnitClass) c.tpe
+ else ConstantType(c)
+ case Ident(_) if p.symbol.isStable =>
+ // for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
+ // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala)
+ singleType(tp.prefix, p.symbol)
+ case _ =>
+ Const.uniqueTpForTree(p)
+ }
+
+ val toString =
+ if (hasStableSymbol(p)) p.symbol.name.toString // tp.toString
+ else p.toString //+"#"+ id
+
+ Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst
+ }
+ }
+ }
+ sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
+ // debug.patmat("VC"+(tp, wideTp, toString))
+ assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
+ /*private[this] val id: Int = */Const.nextValueId
+ def isValue = true
+ }
+
+
+ lazy val NullTp = ConstantType(Constant(null))
+ case object NullConst extends Const {
+ def tp = NullTp
+ def wideTp = NullTp
+
+ def isValue = true
+ override def toString = "null"
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
new file mode 100644
index 0000000000..9558542533
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -0,0 +1,709 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+
+trait TreeAndTypeAnalysis extends Debugging {
+ import global.{Tree, Type, Symbol, definitions, analyzer,
+ ConstantType, Literal, Constant, appliedType, WildcardType, TypeRef, ModuleClassSymbol,
+ nestedMemberType, TypeMap, Ident}
+
+ import definitions._
+ import analyzer.Typer
+
+
+ // we use subtyping as a model for implication between instanceof tests
+ // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ // unfortunately this is not true in general:
+ // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
+ def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
+ val tpValue = tp.typeSymbol.isPrimitiveValueClass
+
+ // pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
+ // (and the subtype is respectively a value type or not a value type)
+ // this allows us to reuse subtyping as a model for implication between instanceOf tests
+ // the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
+ val tpImpliedNormalizedToAny =
+ if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
+ else tpImplied
+
+ tp <:< tpImpliedNormalizedToAny
+ }
+
+ // TODO: improve, e.g., for constants
+ def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
+ case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
+ case _ => false
+ })
+
+ trait CheckableTreeAndTypeAnalysis {
+ val typer: Typer
+
+ // TODO: domain of other feasibly enumerable built-in types (char?)
+ def enumerateSubtypes(tp: Type): Option[List[Type]] =
+ tp.typeSymbol match {
+ // TODO case _ if tp.isTupleType => // recurse into component types?
+ case UnitClass =>
+ Some(List(UnitClass.tpe))
+ case BooleanClass =>
+ Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
+ // TODO case _ if tp.isTupleType => // recurse into component types
+ case modSym: ModuleClassSymbol =>
+ Some(List(tp))
+ // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
+ case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
+ debug.patmat("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ None
+ case sym =>
+ val subclasses = (
+ sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ // symbols which are both sealed and abstract need not be covered themselves, because
+ // all of their children must be and they cannot otherwise be created.
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
+ debug.patmat("enum sealed -- subclasses: "+ (sym, subclasses))
+
+ val tpApprox = typer.infer.approximateAbstracts(tp)
+ val pre = tpApprox.prefix
+ // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
+ val validSubTypes = (subclasses flatMap {sym =>
+ // have to filter out children which cannot match: see ticket #3683 for an example
+ // compare to the fully known type `tp` (modulo abstract types),
+ // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
+ // however, must approximate abstract types in
+
+ val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
+ val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
+ val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
+ // debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
+ if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
+ else None
+ })
+ debug.patmat("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
+ Some(validSubTypes)
+ }
+
+ // approximate a type to the static type that is fully checkable at run time,
+ // hiding statically known but dynamically uncheckable information using existential quantification
+ // TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time)
+ def checkableType(tp: Type): Type = {
+ // TODO: this is extremely rough...
+ // replace type args by wildcards, since they can't be checked (don't use existentials: overkill)
+ // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
+ // similar to typer.infer.approximateAbstracts
+ object typeArgsToWildcardsExceptArray extends TypeMap {
+ // SI-6771 dealias would be enough today, but future proofing with the dealiasWiden.
+ // See neg/t6771b.scala for elaboration
+ def apply(tp: Type): Type = tp.dealiasWiden match {
+ case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) =>
+ TypeRef(pre, sym, args map (_ => WildcardType))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ val res = typeArgsToWildcardsExceptArray(tp)
+ debug.patmat("checkable "+(tp, res))
+ res
+ }
+
+ // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
+ // we consider tuple types with at least one component of a checkable type as a checkable type
+ def uncheckableType(tp: Type): Boolean = {
+ def tupleComponents(tp: Type) = tp.normalize.typeArgs
+ val checkable = (
+ (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
+ || enumerateSubtypes(tp).nonEmpty)
+ // if (!checkable) debug.patmat("deemed uncheckable: "+ tp)
+ !checkable
+ }
+ }
+}
+
+trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchTreeMaking {
+ import global.{Tree, Type, NoType, Symbol, NoSymbol, ConstantType, Literal, Constant, Ident, UniqueType, RefinedType, EmptyScope}
+ import global.definitions.{ListClass, NilModule}
+
+ /**
+ * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
+ *
+ */
+ trait MatchApproximator extends TreeMakers with TreesAndTypesDomain {
+ object Test {
+ var currId = 0
+ }
+ case class Test(prop: Prop, treeMaker: TreeMaker) {
+ // private val reusedBy = new scala.collection.mutable.HashSet[Test]
+ var reuses: Option[Test] = None
+ def registerReuseBy(later: Test): Unit = {
+ assert(later.reuses.isEmpty, later.reuses)
+ // reusedBy += later
+ later.reuses = Some(this)
+ }
+
+ val id = { Test.currId += 1; Test.currId}
+ override def toString =
+ "T"+ id + "C("+ prop +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+ }
+
+
+ class TreeMakersToPropsIgnoreNullChecks(root: Symbol) extends TreeMakersToProps(root) {
+ override def uniqueNonNullProp(p: Tree): Prop = True
+ }
+
+ // returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
+ class TreeMakersToProps(val root: Symbol) {
+ prepareNewAnalysis() // reset hash consing for Var and Const
+
+ private[this] val uniqueEqualityProps = new scala.collection.mutable.HashMap[(Tree, Tree), Eq]
+ private[this] val uniqueNonNullProps = new scala.collection.mutable.HashMap[Tree, Not]
+ private[this] val uniqueTypeProps = new scala.collection.mutable.HashMap[(Tree, Type), Eq]
+
+ def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop =
+ uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs)))
+
+ // overridden in TreeMakersToPropsIgnoreNullChecks
+ def uniqueNonNullProp (testedPath: Tree): Prop =
+ uniqueNonNullProps getOrElseUpdate(testedPath, Not(Eq(Var(testedPath), NullConst)))
+
+ def uniqueTypeProp(testedPath: Tree, pt: Type): Prop =
+ uniqueTypeProps getOrElseUpdate((testedPath, pt), Eq(Var(testedPath), TypeConst(checkableType(pt))))
+
+ // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
+ private val pointsToBound = mutable.HashSet(root)
+ private val trees = mutable.HashSet.empty[Tree]
+
+ // the substitution that renames variables to variables in pointsToBound
+ private var normalize: Substitution = EmptySubstitution
+ private var substitutionComputed = false
+
+ // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
+ // in the end, instead of having x1, x1.hd, x2, x2.hd, ... flying around,
+ // we want something like x1, x1.hd, x1.hd.tl, x1.hd.tl.hd, so that we can easily recognize when
+ // we're testing the same variable
+ // TODO check:
+ // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
+ private var accumSubst: Substitution = EmptySubstitution
+
+ // hashconsing trees (modulo value-equality)
+ def unique(t: Tree, tpOverride: Type = NoType): Tree =
+ trees find (a => a.correspondsStructure(t)(sameValue)) match {
+ case Some(orig) =>
+ // debug.patmat("unique: "+ (t eq orig, orig))
+ orig
+ case _ =>
+ trees += t
+ if (tpOverride != NoType) t setType tpOverride
+ else t
+ }
+
+ def uniqueTp(tp: Type): Type = tp match {
+ // typerefs etc are already hashconsed
+ case _ : UniqueType => tp
+ case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
+ case _ => tp
+ }
+
+ // produce the unique tree used to refer to this binder
+ // the type of the binder passed to the first invocation
+ // determines the type of the tree that'll be returned for that binder as of then
+ final def binderToUniqueTree(b: Symbol) =
+ unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
+
+ // note that the sequencing of operations is important: must visit in same order as match execution
+ // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
+ abstract class TreeMakerToProp extends (TreeMaker => Prop) {
+ // requires(if (!substitutionComputed))
+ def updateSubstitution(subst: Substitution): Unit = {
+ // find part of substitution that replaces bound symbols by new symbols, and reverse that part
+ // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
+ val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
+ case (f, t) =>
+ t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ }
+ val (boundFrom, boundTo) = boundSubst.unzip
+ val (unboundFrom, unboundTo) = unboundSubst.unzip
+
+ // reverse substitution that would otherwise replace a variable we already encountered by a new variable
+ // NOTE: this forgets the more precise type we have for these later variables, but that's probably okay
+ normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
+ // debug.patmat ("normalize subst: "+ normalize)
+
+ val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
+ pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
+ // debug.patmat("pointsToBound: "+ pointsToBound)
+
+ accumSubst >>= okSubst
+ // debug.patmat("accumSubst: "+ accumSubst)
+ }
+
+ def handleUnknown(tm: TreeMaker): Prop
+
+ /** apply itself must render a faithful representation of the TreeMaker
+ *
+ * Concretely, True must only be used to represent a TreeMaker that is sure to match and that does not do any computation at all
+ * e.g., doCSE relies on apply itself being sound in this sense (since it drops TreeMakers that are approximated to True -- SI-6077)
+ *
+ * handleUnknown may be customized by the caller to approximate further
+ *
+ * TODO: don't ignore outer-checks
+ */
+ def apply(tm: TreeMaker): Prop = {
+ if (!substitutionComputed) updateSubstitution(tm.subPatternsAsSubstitution)
+
+ tm match {
+ case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
+ object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
+ type Result = Prop
+ def and(a: Result, b: Result) = And(a, b)
+ def outerTest(testedBinder: Symbol, expectedTp: Type) = True // TODO OuterEqProp(testedBinder, expectedType)
+ def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
+ val p = binderToUniqueTree(b); And(uniqueNonNullProp(p), uniqueTypeProp(p, uniqueTp(pt)))
+ }
+ def nonNullTest(testedBinder: Symbol) = uniqueNonNullProp(binderToUniqueTree(testedBinder))
+ def equalsTest(pat: Tree, testedBinder: Symbol) = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat))
+ def eqTest(pat: Tree, testedBinder: Symbol) = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ def tru = True
+ }
+ ttm.renderCondition(condStrategy)
+ case EqualityTestTreeMaker(prevBinder, patTree, _) => uniqueEqualityProp(binderToUniqueTree(prevBinder), unique(patTree))
+ case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map this)))
+ case ProductExtractorTreeMaker(testedBinder, None) => uniqueNonNullProp(binderToUniqueTree(testedBinder))
+ case SubstOnlyTreeMaker(_, _) => True
+ case GuardTreeMaker(guard) =>
+ guard.tpe match {
+ case ConstantType(Constant(true)) => True
+ case ConstantType(Constant(false)) => False
+ case _ => handleUnknown(tm)
+ }
+ case ExtractorTreeMaker(_, _, _) |
+ ProductExtractorTreeMaker(_, _) |
+ BodyTreeMaker(_, _) => handleUnknown(tm)
+ }
+ }
+ }
+
+
+ private val irrefutableExtractor: PartialFunction[TreeMaker, Prop] = {
+ // the extra condition is None, the extractor's result indicates it always succeeds,
+ // (the potential type-test for the argument is represented by a separate TypeTestTreeMaker)
+ case IrrefutableExtractorTreeMaker(_, _) => True
+ }
+
+ // special-case: interpret pattern `List()` as `Nil`
+ // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil -- not sure this is a good idea...
+ private val rewriteListPattern: PartialFunction[TreeMaker, Prop] = {
+ case p @ ExtractorTreeMaker(_, _, testedBinder)
+ if testedBinder.tpe.typeSymbol == ListClass && p.checkedLength == Some(0) =>
+ uniqueEqualityProp(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
+ }
+ val fullRewrite = (irrefutableExtractor orElse rewriteListPattern)
+ val refutableRewrite = irrefutableExtractor
+
+ @inline def onUnknown(handler: TreeMaker => Prop) = new TreeMakerToProp {
+ def handleUnknown(tm: TreeMaker) = handler(tm)
+ }
+
+ // used for CSE -- rewrite all unknowns to False (the most conserative option)
+ object conservative extends TreeMakerToProp {
+ def handleUnknown(tm: TreeMaker) = False
+ }
+
+ final def approximateMatch(cases: List[List[TreeMaker]], treeMakerToProp: TreeMakerToProp = conservative) ={
+ val testss = cases.map { _ map (tm => Test(treeMakerToProp(tm), tm)) }
+ substitutionComputed = true // a second call to approximateMatch should not re-compute the substitution (would be wrong)
+ testss
+ }
+ }
+
+ def approximateMatchConservative(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] =
+ (new TreeMakersToProps(root)).approximateMatch(cases)
+
+ // turns a case (represented as a list of abstract tests)
+ // into a proposition that is satisfiable if the case may match
+ protected final def caseWithoutBodyToProp(tests: List[Test]): Prop =
+ /\(tests.takeWhile(t => !t.treeMaker.isInstanceOf[BodyTreeMaker]).map(t => t.prop))
+
+ def showTreeMakers(cases: List[List[TreeMaker]]) = {
+ debug.patmat("treeMakers:")
+ debug.patmat(alignAcrossRows(cases, ">>"))
+ }
+
+ def showTests(testss: List[List[Test]]) = {
+ debug.patmat("tests: ")
+ debug.patmat(alignAcrossRows(testss, "&"))
+ }
+ }
+
+}
+
+trait MatchAnalysis extends MatchApproximation {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, NoSymbol, Ident, Select}
+ import global.definitions.{isPrimitiveValueClass, ConsClass, isTupleSymbol}
+
+ trait MatchAnalyzer extends MatchApproximator {
+ def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg)
+ def warn(pos: Position, ex: AnalysisBudget.Exception, kind: String) = uncheckedWarning(pos, s"Cannot check match for $kind.\n${ex.advice}")
+
+ // TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil
+ // right now hackily implement this by pruning counter-examples
+ // unreachability would also benefit from a more faithful representation
+
+
+ // reachability (dead code)
+
+ // computes the first 0-based case index that is unreachable (if any)
+ // a case is unreachable if it implies its preceding cases
+ // call C the formula that is satisfiable if the considered case matches
+ // call P the formula that is satisfiable if the cases preceding it match
+ // the case is reachable if there is a model for -P /\ C,
+ // thus, the case is unreachable if there is no model for -(-P /\ C),
+ // or, equivalently, P \/ -C, or C => P
+ def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaReach) else null
+
+ // use the same approximator so we share variables,
+ // but need different conditions depending on whether we're conservatively looking for failure or success
+ // don't rewrite List-like patterns, as List() and Nil need to distinguished for unreachability
+ val approx = new TreeMakersToProps(prevBinder)
+ def approximate(default: Prop) = approx.approximateMatch(cases, approx.onUnknown { tm =>
+ approx.refutableRewrite.applyOrElse(tm, (_: TreeMaker) => default )
+ })
+
+ val propsCasesOk = approximate(True) map caseWithoutBodyToProp
+ val propsCasesFail = approximate(False) map (t => Not(caseWithoutBodyToProp(t)))
+
+ try {
+ val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
+ val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
+
+ val prefix = formulaBuilder
+ addFormula(prefix, eqAxioms)
+
+ var prefixRest = symbolicCasesFail
+ var current = symbolicCasesOk
+ var reachable = true
+ var caseIndex = 0
+
+ debug.patmat("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
+ debug.patmat("equality axioms:\n"+ cnfString(eqAxiomsOk))
+
+ // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
+ // termination: prefixRest.length decreases by 1
+ while (prefixRest.nonEmpty && reachable) {
+ val prefHead = prefixRest.head
+ caseIndex += 1
+ prefixRest = prefixRest.tail
+ if (prefixRest.isEmpty) reachable = true
+ else {
+ addFormula(prefix, prefHead)
+ current = current.tail
+ val model = findModelFor(andFormula(current.head, toFormula(prefix)))
+
+ // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
+ // if (NoModel ne model) debug.patmat("reached: "+ modelString(model))
+
+ reachable = NoModel ne model
+ }
+ }
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start)
+
+ if (reachable) None else Some(caseIndex)
+ } catch {
+ case ex: AnalysisBudget.Exception =>
+ warn(prevBinder.pos, ex, "unreachability")
+ None // CNF budget exceeded
+ }
+ }
+
+ // exhaustivity
+
+ def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[String] = if (uncheckableType(prevBinder.info)) Nil else {
+ // customize TreeMakersToProps (which turns a tree of tree makers into a more abstract DAG of tests)
+ // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`,
+ // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive
+ // - back off (to avoid crying exhaustive too often) when:
+ // - there are guards -->
+ // - there are extractor calls (that we can't secretly/soundly) rewrite
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaExhaust) else null
+ var backoff = false
+
+ val approx = new TreeMakersToPropsIgnoreNullChecks(prevBinder)
+ val symbolicCases = approx.approximateMatch(cases, approx.onUnknown { tm =>
+ approx.fullRewrite.applyOrElse[TreeMaker, Prop](tm, {
+ case BodyTreeMaker(_, _) => True // irrelevant -- will be discarded by symbolCase later
+ case _ => // debug.patmat("backing off due to "+ tm)
+ backoff = true
+ False
+ })
+ }) map caseWithoutBodyToProp
+
+ if (backoff) Nil else {
+ val prevBinderTree = approx.binderToUniqueTree(prevBinder)
+
+ // TODO: null tests generate too much noise, so disabled them -- is there any way to bring them back?
+ // assuming we're matching on a non-null scrutinee (prevBinder), when does the match fail?
+ // val nonNullScrutineeCond =
+ // assume non-null for all the components of the tuple we're matching on (if we're matching on a tuple)
+ // if (isTupleType(prevBinder.tpe))
+ // prevBinder.tpe.typeArgs.mapWithIndex{case (_, i) => NonNullProp(codegen.tupleSel(prevBinderTree)(i))}.reduceLeft(And)
+ // else
+ // NonNullProp(prevBinderTree)
+ // val matchFails = And(symbolic(nonNullScrutineeCond), Not(symbolicCases reduceLeft (Or(_, _))))
+
+ // when does the match fail?
+ val matchFails = Not(\/(symbolicCases))
+
+ // debug output:
+ debug.patmat("analysing:")
+ showTreeMakers(cases)
+
+ // debug.patmat("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
+ // debug.patmat("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
+
+ try {
+ // find the models (under which the match fails)
+ val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
+
+ val scrutVar = Var(prevBinderTree)
+ val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
+
+ val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
+ pruned
+ } catch {
+ case ex : AnalysisBudget.Exception =>
+ warn(prevBinder.pos, ex, "exhaustivity")
+ Nil // CNF budget exceeded
+ }
+ }
+ }
+
+ object CounterExample {
+ def prune(examples: List[CounterExample]): List[CounterExample] = {
+ val distinct = examples.filterNot(_ == NoExample).toSet
+ distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
+ }
+ }
+
+ // a way to construct a value that will make the match fail: a constructor invocation, a constant, an object of some type)
+ class CounterExample {
+ protected[MatchAnalyzer] def flattenConsArgs: List[CounterExample] = Nil
+ def coveredBy(other: CounterExample): Boolean = this == other || other == WildcardExample
+ }
+ case class ValueExample(c: ValueConst) extends CounterExample { override def toString = c.toString }
+ case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
+ case class NegativeExample(eqTo: Const, nonTrivialNonEqualTo: List[Const]) extends CounterExample {
+ // require(nonTrivialNonEqualTo.nonEmpty, nonTrivialNonEqualTo)
+ override def toString = {
+ val negation =
+ if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toString
+ else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("(", ", ", ")")
+ "(x: "+ eqTo +" forSome x not in "+ negation +")"
+ }
+ }
+ case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ protected[MatchAnalyzer] override def flattenConsArgs: List[CounterExample] = ctorArgs match {
+ case hd :: tl :: Nil => hd :: tl.flattenConsArgs
+ case _ => Nil
+ }
+ protected[MatchAnalyzer] lazy val elems = flattenConsArgs
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case other@ListExample(_) =>
+ this == other || ((elems.length == other.elems.length) && (elems zip other.elems).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+
+ override def toString = elems.mkString("List(", ", ", ")")
+ }
+ case class TupleExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = ctorArgs.mkString("(", ", ", ")")
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case TupleExample(otherArgs) =>
+ this == other || ((ctorArgs.length == otherArgs.length) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+ }
+ case class ConstructorExample(cls: Symbol, ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = cls.decodedName + (if (cls.isModuleClass) "" else ctorArgs.mkString("(", ", ", ")"))
+ }
+
+ case object WildcardExample extends CounterExample { override def toString = "_" }
+ case object NoExample extends CounterExample { override def toString = "??" }
+
+ def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
+ model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
+ val (trues, falses) = xs.partition(_._2)
+ (trues map (_._1.const), falses map (_._1.const))
+ // should never be more than one value in trues...
+ }
+
+ def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) =
+ varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
+ val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
+ v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
+ }.mkString("\n")
+
+ // return constructor call when the model is a true counter example
+ // (the variables don't take into account type information derived from other variables,
+ // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
+ // since we didn't realize the tail of the outer cons was a Nil)
+ def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
+ // x1 = ...
+ // x1.hd = ...
+ // x1.tl = ...
+ // x1.hd.hd = ...
+ // ...
+ val varAssignment = modelToVarAssignment(model)
+
+ debug.patmat("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
+
+ // chop a path into a list of symbols
+ def chop(path: Tree): List[Symbol] = path match {
+ case Ident(_) => List(path.symbol)
+ case Select(pre, name) => chop(pre) :+ path.symbol
+ case _ =>
+ // debug.patmat("don't know how to chop "+ path)
+ Nil
+ }
+
+ // turn the variable assignments into a tree
+ // the root is the scrutinee (x1), edges are labelled by the fields that are assigned
+ // a node is a variable example (which is later turned into a counter example)
+ object VariableAssignment {
+ private def findVar(path: List[Symbol]) = path match {
+ case List(root) if root == scrutVar.path.symbol => Some(scrutVar)
+ case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
+ }
+
+ private val uniques = new mutable.HashMap[Var, VariableAssignment]
+ private def unique(variable: Var): VariableAssignment =
+ uniques.getOrElseUpdate(variable, {
+ val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
+ VariableAssignment(variable, eqTo.toList, neqTo.toList, mutable.HashMap.empty)
+ })
+
+ def apply(variable: Var): VariableAssignment = {
+ val path = chop(variable.path)
+ val pre = path.init
+ val field = path.last
+
+ val newCtor = unique(variable)
+
+ if (pre.isEmpty) newCtor
+ else {
+ findVar(pre) foreach { preVar =>
+ val outerCtor = this(preVar)
+ outerCtor.fields(field) = newCtor
+ }
+ newCtor
+ }
+ }
+ }
+
+ // node in the tree that describes how to construct a counter-example
+ case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
+ // need to prune since the model now incorporates all super types of a constant (needed for reachability)
+ private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
+ private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
+ private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
+ private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
+ private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
+
+
+ def allFieldAssignmentsLegal: Boolean =
+ (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
+
+ private lazy val nonTrivialNonEqualTo = notEqualTo.filterNot{c => c.isAny }
+
+ // NoExample if the constructor call is ill-typed
+ // (thus statically impossible -- can we incorporate this into the formula?)
+ // beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
+ def toCounterExample(beBrief: Boolean = false): CounterExample =
+ if (!allFieldAssignmentsLegal) NoExample
+ else {
+ debug.patmat("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ val res = prunedEqualTo match {
+ // a definite assignment to a value
+ case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
+
+ // constructor call
+ // or we did not gather any information about equality but we have information about the fields
+ // --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
+ case _ if cls != NoSymbol && !isPrimitiveValueClass(cls) &&
+ ( uniqueEqualTo.nonEmpty
+ || (fields.nonEmpty && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
+
+ def args(brevity: Boolean = beBrief) = {
+ // figure out the constructor arguments from the field assignment
+ val argLen = (caseFieldAccs.length min ctorParams.length)
+
+ (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
+ }
+
+ cls match {
+ case ConsClass => ListExample(args())
+ case _ if isTupleSymbol(cls) => TupleExample(args(true))
+ case _ => ConstructorExample(cls, args())
+ }
+
+ // a definite assignment to a type
+ case List(eq) if fields.isEmpty => TypeExample(eq)
+
+ // negative information
+ case Nil if nonTrivialNonEqualTo.nonEmpty =>
+ // negation tends to get pretty verbose
+ if (beBrief) WildcardExample
+ else {
+ val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
+ NegativeExample(eqTo, nonTrivialNonEqualTo)
+ }
+
+ // not a valid counter-example, possibly since we have a definite type but there was a field mismatch
+ // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
+ case _ => NoExample
+ }
+ debug.patmat("described as: "+ res)
+ res
+ }
+
+ override def toString = toCounterExample().toString
+ }
+
+ // slurp in information from other variables
+ varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) }
+
+ // this is the variable we want a counter example for
+ VariableAssignment(scrutVar).toCounterExample()
+ }
+
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {
+ if (!suppression.unreachable) {
+ unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
+ reportUnreachable(cases(caseIndex).last.pos)
+ }
+ }
+ if (!suppression.exhaustive) {
+ val counterExamples = exhaustive(prevBinder, cases, pt)
+ if (counterExamples.nonEmpty)
+ reportMissingCases(prevBinder.pos, counterExamples)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
new file mode 100644
index 0000000000..57fab4eafa
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -0,0 +1,258 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.SYNTHETIC
+import scala.language.postfixOps
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Factory methods used by TreeMakers to make the actual trees.
+ *
+ * We have two modes in which to emit trees: optimized (the default)
+ * and pure (aka "virtualized": match is parametric in its monad).
+ */
+trait MatchCodeGen extends Interface {
+ import PatternMatchingStats._
+ import global.{nme, treeInfo, definitions, gen, Tree, Type, Symbol, NoSymbol,
+ appliedType, NoType, MethodType, newTermName, Name,
+ Block, Literal, Constant, EmptyTree, Function, Typed, ValDef, LabelDef}
+ import definitions._
+
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ // generate actual trees
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait CodegenCore extends MatchMonadInterface {
+ private var ctr = 0
+ def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
+
+ // assert(owner ne null); assert(owner ne NoSymbol)
+ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
+ NoSymbol.newTermSymbol(freshName(prefix), pos, newFlags = SYNTHETIC) setInfo tp
+
+ def newSynthCaseLabel(name: String) =
+ NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS
+
+ // codegen relevant to the structure of the translation (how extractors are combined)
+ trait AbsCodegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
+
+ // local / context-free
+ def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def _equals(checker: Tree, binder: Symbol): Tree
+ def _isInstanceOf(b: Symbol, tp: Type): Tree
+ def drop(tgt: Tree)(n: Int): Tree
+ def index(tgt: Tree)(i: Int): Tree
+ def mkZero(tp: Type): Tree
+ def tupleSel(binder: Symbol)(i: Int): Tree
+ }
+
+ // structure
+ trait Casegen extends AbsCodegen { import CODE._
+ def one(res: Tree): Tree
+
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
+ def ifThenElseZero(c: Tree, thenp: Tree): Tree = IF (c) THEN thenp ELSE zero
+ protected def zero: Tree
+ }
+
+ def codegen: AbsCodegen
+
+ abstract class CommonCodegen extends AbsCodegen { import CODE._
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+ def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+
+ // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
+
+ // duplicated out of frustration with cast generation
+ def mkZero(tp: Type): Tree = {
+ tp.typeSymbol match {
+ case UnitClass => Literal(Constant())
+ case BooleanClass => Literal(Constant(false))
+ case FloatClass => Literal(Constant(0.0f))
+ case DoubleClass => Literal(Constant(0.0d))
+ case ByteClass => Literal(Constant(0.toByte))
+ case ShortClass => Literal(Constant(0.toShort))
+ case IntClass => Literal(Constant(0))
+ case LongClass => Literal(Constant(0L))
+ case CharClass => Literal(Constant(0.toChar))
+ case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ }
+ }
+ }
+ }
+
+ trait PureMatchMonadInterface extends MatchMonadInterface {
+ val matchStrategy: Tree
+
+ def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
+ def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
+ protected def matchMonadSym = oneSig.finalResultType.typeSymbol
+
+ import CODE._
+ def _match(n: Name): SelectStart = matchStrategy DOT n
+
+ private lazy val oneSig: Type = typer.typedOperator(_match(vpmName.one)).tpe // TODO: error message
+ }
+
+ trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
+ def codegen: AbsCodegen = pureCodegen
+
+ object pureCodegen extends CommonCodegen with Casegen { import CODE._
+ //// methods in MatchingStrategy (the monad companion) -- used directly in translation
+ // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
+ // TODO: consider catchAll, or virtualized matching will break in exception handlers
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
+
+ // __match.one(`res`)
+ def one(res: Tree): Tree = (_match(vpmName.one)) (res)
+ // __match.zero
+ protected def zero: Tree = _match(vpmName.zero)
+ // __match.guard(`c`, `then`)
+ def guard(c: Tree, thenp: Tree): Tree = _match(vpmName.guard) APPLY (c, thenp)
+
+ //// methods in the monad instance -- used directly in translation
+ // `prev`.flatMap(`b` => `next`)
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
+ // `thisCase`.orElse(`elseCase`)
+ def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
+ // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
+ // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
+ }
+ }
+
+ trait OptimizedMatchMonadInterface extends MatchMonadInterface {
+ override def inMatchMonad(tp: Type): Type = optionType(tp)
+ override def pureType(tp: Type): Type = tp
+ override protected def matchMonadSym = OptionClass
+ }
+
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ override def codegen: AbsCodegen = optimizedCodegen
+
+ // when we know we're targetting Option, do some inlining the optimizer won't do
+ // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
+ // this is a special instance of the advanced inlining optimization that takes a method call on
+ // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
+ object optimizedCodegen extends CommonCodegen { import CODE._
+
+ /** Inline runOrElse and get rid of Option allocations
+ *
+ * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
+ * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
+ * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
+ */
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
+ val matchEnd = newSynthCaseLabel("matchEnd")
+ val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
+ matchEnd setInfo MethodType(List(matchRes), restpe)
+
+ def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
+ var _currCase = newCaseSym
+
+ val caseDefs = cases map { (mkCase: Casegen => Tree) =>
+ val currCase = _currCase
+ val nextCase = newCaseSym
+ _currCase = nextCase
+
+ LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
+ }
+
+ // must compute catchAll after caseLabels (side-effects nextCase)
+ // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
+ // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+ val catchAllDef = matchFailGen map { matchFailGen =>
+ val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+
+ LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
+ } toList // at most 1 element
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+
+ // the generated block is taken apart in TailCalls under the following assumptions
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ Block(
+ scrutDef ++ caseDefs ++ catchAllDef,
+ LabelDef(matchEnd, List(matchRes), REF(matchRes))
+ )
+ }
+
+ class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol) extends CommonCodegen with Casegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
+
+ // only used to wrap the RHS of a body
+ // res: T
+ // returns MatchMonad[T]
+ def one(res: Tree): Tree = matchEnd APPLY (res) // a jump to a case label is special-cased in typedApply
+ protected def zero: Tree = nextCase APPLY ()
+
+ // prev: MatchMonad[T]
+ // b: T
+ // next: MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
+ val tp = inMatchMonad(b.tpe)
+ val prevSym = freshSym(prev.pos, tp, "o")
+ val isEmpty = tp member vpmName.isEmpty
+ val get = tp member vpmName.get
+
+ BLOCK(
+ VAL(prevSym) === prev,
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ )
+ }
+
+ // cond: Boolean
+ // res: T
+ // nextBinder: T
+ // next == MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest =
+ // only emit a local val for `nextBinder` if it's actually referenced in `next`
+ if (next.exists(_.symbol eq nextBinder))
+ BLOCK(
+ VAL(nextBinder) === res,
+ next
+ )
+ else next
+ ifThenElseZero(cond, rest)
+ }
+
+ // guardTree: Boolean
+ // next: MatchMonad[T]
+ // returns MatchMonad[T]
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ ifThenElseZero(guardTree, next)
+
+ def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ condSym === mkTRUE,
+ nextBinder === res,
+ next
+ ))
+ }
+
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
new file mode 100644
index 0000000000..c570dd8572
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -0,0 +1,615 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.MUTABLE
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Optimize and analyze matches based on their TreeMaker-representation.
+ *
+ * The patmat translation doesn't rely on this, so it could be disabled in principle.
+ * - well, not quite: the backend crashes if we emit duplicates in switches (e.g. SI-7290)
+ */
+// TODO: split out match analysis
+trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, NoSymbol, CaseDef, atPos,
+ ConstantType, Literal, Constant, gen, EmptyTree, distinctBy,
+ Typed, treeInfo, nme, Ident,
+ Apply, If, Bind, lub, Alternative, deriveCaseDef, Match, MethodType, LabelDef, TypeTree, Throw}
+
+ import global.definitions._
+
+
+ ////
+ trait CommonSubconditionElimination extends OptimizedCodegen with MatchApproximator {
+ /** a flow-sensitive, generalised, common sub-expression elimination
+ * reuse knowledge from performed tests
+ * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
+ * when a sub-expression is shared, it is stored in a mutable variable
+ * the variable is floated up so that its scope includes all of the program that shares it
+ * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
+ */
+ def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ debug.patmat("before CSE:")
+ showTreeMakers(cases)
+
+ val testss = approximateMatchConservative(prevBinder, cases)
+
+ // interpret:
+ val dependencies = new mutable.LinkedHashMap[Test, Set[Prop]]
+ val tested = new mutable.HashSet[Prop]
+
+ // TODO: use SAT solver instead of hashconsing props and approximating implication by subset/equality
+ def storeDependencies(test: Test) = {
+ val cond = test.prop
+
+ def simplify(c: Prop): Set[Prop] = c match {
+ case And(a, b) => simplify(a) ++ simplify(b)
+ case Or(_, _) => Set(False) // TODO: make more precise
+ case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering
+ case _ => Set(c)
+ }
+ val conds = simplify(cond)
+
+ if (conds(False)) false // stop when we encounter a definite "no" or a "not sure"
+ else {
+ val nonTrivial = conds filterNot (_ == True)
+ if (nonTrivial nonEmpty) {
+ tested ++= nonTrivial
+
+ // is there an earlier test that checks our condition and whose dependencies are implied by ours?
+ dependencies find {
+ case (priorTest, deps) =>
+ ((simplify(priorTest.prop) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly
+ (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions
+ ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
+ } foreach {
+ case (priorTest, _) =>
+ // if so, note the dependency in both tests
+ priorTest registerReuseBy test
+ }
+
+ dependencies(test) = tested.toSet // copies
+ }
+ true
+ }
+ }
+
+
+ testss foreach { tests =>
+ tested.clear()
+ tests dropWhile storeDependencies
+ }
+ debug.patmat("dependencies: "+ dependencies)
+
+ // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
+ // then, collapse these contiguous sequences of reusing tests
+ // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
+ // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
+ val reused = new mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
+ var okToCall = false
+ val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
+
+ // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker
+ // once this has been computed, we'll know which tree makers are reused,
+ // and we'll replace those by the ReusedCondTreeMakers we've constructed (and stored in the reused map)
+ val collapsed = testss map { tests =>
+ // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker
+ // if there's no sharing, simply map to the tree makers corresponding to the tests
+ var currDeps = Set[Prop]()
+ val (sharedPrefix, suffix) = tests span { test =>
+ (test.prop == True) || (for(
+ reusedTest <- test.reuses;
+ nextDeps <- dependencies.get(reusedTest);
+ diff <- (nextDeps -- currDeps).headOption;
+ _ <- Some(currDeps = nextDeps))
+ yield diff).nonEmpty
+ }
+
+ val collapsedTreeMakers =
+ if (sharedPrefix.isEmpty) None
+ else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
+ for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
+ case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
+ case _ =>
+ }
+
+ debug.patmat("sharedPrefix: "+ sharedPrefix)
+ debug.patmat("suffix: "+ sharedPrefix)
+ // if the shared prefix contains interesting conditions (!= True)
+ // and the last of such interesting shared conditions reuses another treemaker's test
+ // replace the whole sharedPrefix by a ReusingCondTreeMaker
+ for (lastShared <- sharedPrefix.reverse.dropWhile(_.prop == True).headOption;
+ lastReused <- lastShared.reuses)
+ yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
+ }
+
+ collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains True-tests, which are dropped above)
+ }
+ okToCall = true // TODO: remove (debugging)
+
+ // replace original treemakers that are reused (as determined when computing collapsed),
+ // by ReusedCondTreeMakers
+ val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
+ debug.patmat("after CSE:")
+ showTreeMakers(reusedMakers)
+ reusedMakers
+ }
+
+ object ReusedCondTreeMaker {
+ def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
+ }
+ class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+ lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+ lazy val treesToHoist: List[Tree] = {
+ nextBinder setFlag MUTABLE
+ List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
+ }
+
+ // TODO: finer-grained duplication
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
+ atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
+
+ override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
+ }
+
+ case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
+ val pos = sharedPrefix.last.treeMaker.pos
+
+ lazy val localSubstitution = {
+ // replace binder of each dropped treemaker by corresponding binder bound by the most recent reused treemaker
+ var mostRecentReusedMaker: ReusedCondTreeMaker = null
+ def mapToStored(droppedBinder: Symbol) = if (mostRecentReusedMaker eq null) Nil else List((droppedBinder, REF(mostRecentReusedMaker.nextBinder)))
+ val (from, to) = sharedPrefix.flatMap { dropped =>
+ dropped.reuses.map(test => toReused(test.treeMaker)).foreach {
+ case reusedMaker: ReusedCondTreeMaker =>
+ mostRecentReusedMaker = reusedMaker
+ case _ =>
+ }
+
+ // TODO: have super-trait for retrieving the variable that's operated on by a tree maker
+ // and thus assumed in scope, either because it binds it or because it refers to it
+ dropped.treeMaker match {
+ case dropped: FunTreeMaker =>
+ mapToStored(dropped.nextBinder)
+ case _ => Nil
+ }
+ }.unzip
+ val rerouteToReusedBinders = Substitution(from, to)
+
+ val collapsedDroppedSubst = sharedPrefix map (t => (toReused(t.treeMaker).substitution))
+
+ collapsedDroppedSubst.foldLeft(rerouteToReusedBinders)(_ >> _)
+ }
+
+ lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => tm.reuses map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift,
+ // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
+ }
+ override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
+ }
+ }
+
+
+ //// DCE
+// trait DeadCodeElimination extends TreeMakers {
+// // TODO: non-trivial dead-code elimination
+// // e.g., the following match should compile to a simple instanceof:
+// // case class Ident(name: String)
+// // for (Ident(name) <- ts) println(name)
+// def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+// // do minimal DCE
+// cases
+// }
+// }
+
+ //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
+ trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface {
+ import treeInfo.isGuardedCase
+
+ abstract class SwitchMaker {
+ abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
+ val SwitchableTreeMaker: SwitchableTreeMakerExtractor
+
+ def alternativesSupported: Boolean
+
+ // when collapsing guarded switch cases we may sometimes need to jump to the default case
+ // however, that's not supported in exception handlers, so when we can't jump when we need it, don't emit a switch
+ // TODO: make more fine-grained, as we don't always need to jump
+ def canJump: Boolean
+
+ /** Should exhaustivity analysis be skipped? */
+ def unchecked: Boolean
+
+
+ def isDefault(x: CaseDef): Boolean
+ def defaultSym: Symbol
+ def defaultBody: Tree
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
+
+ private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
+ if (xs exists (_.isEmpty)) None else Some(xs.flatten)
+
+ object GuardAndBodyTreeMakers {
+ def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
+ tms match {
+ case (btm@BodyTreeMaker(body, _)) :: Nil => Some((EmptyTree, btm.substitution(body)))
+ case (gtm@GuardTreeMaker(guard)) :: (btm@BodyTreeMaker(body, _)) :: Nil => Some((gtm.substitution(guard), btm.substitution(body)))
+ case _ => None
+ }
+ }
+ }
+
+ private val defaultLabel: Symbol = newSynthCaseLabel("default")
+
+ /** Collapse guarded cases that switch on the same constant (the last case may be unguarded).
+ *
+ * Cases with patterns A and B switch on the same constant iff for all values x that match A also match B and vice versa.
+ * (This roughly corresponds to equality on trees modulo alpha renaming and reordering of alternatives.)
+ *
+ * The rewrite only applies if some of the cases are guarded (this must be checked before invoking this method).
+ *
+ * The rewrite goes through the switch top-down and merges each case with the subsequent cases it is implied by
+ * (i.e. it matches if they match, not taking guards into account)
+ *
+ * If there are no unreachable cases, all cases can be uniquely assigned to a partition of such 'overlapping' cases,
+ * save for the default case (thus we jump to it rather than copying it several times).
+ * (The cases in a partition are implied by the principal element of the partition.)
+ *
+ * The overlapping cases are merged into one case with their guards pushed into the body as follows
+ * (with P the principal element of the overlapping patterns Pi):
+ *
+ * `{case Pi if(G_i) => B_i }*` is rewritten to `case P => {if(G_i) B_i}*`
+ *
+ * The rewrite fails (and returns Nil) when:
+ * (1) there is a subsequence of overlapping cases that has an unguarded case in the middle;
+ * only the last case of each subsequence of overlapping cases may be unguarded (this is implied by unreachability)
+ *
+ * (2) there are overlapping cases that differ (tested by `caseImpliedBy`)
+ * cases with patterns A and B are overlapping if for SOME value x, A matches x implies B matches y OR vice versa <-- note the difference with case equality defined above
+ * for example `case 'a' | 'b' =>` and `case 'b' =>` are different and overlapping (overlapping and equality disregard guards)
+ *
+ * The second component of the returned tuple indicates whether we'll need to emit a labeldef to jump to the default case.
+ */
+ private def collapseGuardedCases(cases: List[CaseDef]): (List[CaseDef], Boolean) = {
+ // requires(same.forall(caseEquals(same.head)))
+ // requires(same.nonEmpty, same)
+ def collapse(same: List[CaseDef], isDefault: Boolean): CaseDef = {
+ val commonPattern = same.head.pat
+ // jump to default case (either the user-supplied one or the synthetic one)
+ // unless we're collapsing the default case: then we re-use the same body as the synthetic catchall (throwing a matcherror, rethrowing the exception)
+ val jumpToDefault: Tree =
+ if (isDefault || !canJump) defaultBody
+ else Apply(Ident(defaultLabel), Nil)
+
+ val guardedBody = same.foldRight(jumpToDefault){
+ // the last case may be un-guarded (we know it's the last one since fold's accum == jumpToDefault)
+ // --> replace jumpToDefault by the un-guarded case's body
+ case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b
+ case (cd@CaseDef(_, g, b), els) if isGuardedCase(cd) => If(g, b, els)
+ }
+
+ // if the cases that we're going to collapse bind variables,
+ // must replace them by the single binder introduced by the collapsed case
+ val binders = same.collect{case CaseDef(x@Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol}
+ val (pat, guardedBodySubst) =
+ if (binders.isEmpty) (commonPattern, guardedBody)
+ else {
+ // create a single fresh binder to subsume the old binders (and their types)
+ // TODO: I don't think the binder's types can actually be different (due to checks in caseEquals)
+ // if they do somehow manage to diverge, the lub might not be precise enough and we could get a type error
+ // TODO: reuse name exactly if there's only one binder in binders
+ val binder = freshSym(binders.head.pos, lub(binders.map(_.tpe)), binders.head.name.toString)
+
+ // the patterns in same are equal (according to caseEquals)
+ // we can thus safely pick the first one arbitrarily, provided we correct binding
+ val origPatWithoutBind = commonPattern match {
+ case Bind(b, orig) => orig
+ case o => o
+ }
+ // need to replace `defaultSym` as well -- it's used in `defaultBody` (see `jumpToDefault` above)
+ val unifiedBody = guardedBody.substituteSymbols(defaultSym :: binders, binder :: binders.map(_ => binder))
+ (Bind(binder, origPatWithoutBind), unifiedBody)
+ }
+
+ atPos(commonPattern.pos)(CaseDef(pat, EmptyTree, guardedBodySubst))
+ }
+
+ // requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless)
+ var remainingCases = cases
+ val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef]
+
+ // when some of collapsed cases (except for the default case itself) did not include an un-guarded case
+ // we'll need to emit a labeldef for the default case
+ var needDefault = false
+
+ while (remainingCases.nonEmpty) {
+ val currCase = remainingCases.head
+ val currIsDefault = isDefault(CaseDef(currCase.pat, EmptyTree, EmptyTree))
+ val (impliesCurr, others) =
+ // the default case is implied by all cases, no need to partition (and remainingCases better all be default cases as well)
+ if (currIsDefault) (remainingCases.tail, Nil)
+ else remainingCases.tail partition (caseImplies(currCase))
+
+ val unguardedComesLastOrAbsent =
+ (!isGuardedCase(currCase) && impliesCurr.isEmpty) || { val LastImpliesCurr = impliesCurr.length - 1
+ impliesCurr.indexWhere(oc => !isGuardedCase(oc)) match {
+ // if all cases are guarded we will have to jump to the default case in the final else
+ // (except if we're collapsing the default case itself)
+ case -1 =>
+ if (!currIsDefault) needDefault = true
+ true
+
+ // last case is not guarded, no need to jump to the default here
+ // note: must come after case -1 => (since LastImpliesCurr may be -1)
+ case LastImpliesCurr => true
+
+ case _ => false
+ }}
+
+ if (unguardedComesLastOrAbsent /*(1)*/ && impliesCurr.forall(caseEquals(currCase)) /*(2)*/) {
+ collapsed += (
+ if (impliesCurr.isEmpty && !isGuardedCase(currCase)) currCase
+ else collapse(currCase :: impliesCurr, currIsDefault)
+ )
+
+ remainingCases = others
+ } else { // fail
+ collapsed.clear()
+ remainingCases = Nil
+ }
+ }
+
+ (collapsed.toList, needDefault)
+ }
+
+ private def caseEquals(x: CaseDef)(y: CaseDef) = patternEquals(x.pat)(y.pat)
+ private def patternEquals(x: Tree)(y: Tree): Boolean = (x, y) match {
+ case (Alternative(xs), Alternative(ys)) =>
+ xs.forall(x => ys.exists(patternEquals(x))) &&
+ ys.forall(y => xs.exists(patternEquals(y)))
+ case (Alternative(pats), _) => pats.forall(p => patternEquals(p)(y))
+ case (_, Alternative(pats)) => pats.forall(q => patternEquals(x)(q))
+ // regular switch
+ case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
+ case (Ident(nme.WILDCARD), Ident(nme.WILDCARD)) => true
+ // type-switch for catch
+ case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)), Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => tpX.tpe =:= tpY.tpe
+ case _ => false
+ }
+
+ // if y matches then x matches for sure (thus, if x comes before y, y is unreachable)
+ private def caseImplies(x: CaseDef)(y: CaseDef) = patternImplies(x.pat)(y.pat)
+ private def patternImplies(x: Tree)(y: Tree): Boolean = (x, y) match {
+ // since alternatives are flattened, must treat them as separate cases
+ case (Alternative(pats), _) => pats.exists(p => patternImplies(p)(y))
+ case (_, Alternative(pats)) => pats.exists(q => patternImplies(x)(q))
+ // regular switch
+ case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
+ case (Ident(nme.WILDCARD), _) => true
+ // type-switch for catch
+ case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)),
+ Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => instanceOfTpImplies(tpY.tpe, tpX.tpe)
+ case _ => false
+ }
+
+ private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase)
+
+ // must do this before removing guards from cases and collapsing (SI-6011, SI-6048)
+ private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
+ var cases = cs
+ var unreachable: Option[CaseDef] = None
+
+ while (cases.nonEmpty && unreachable.isEmpty) {
+ val currCase = cases.head
+ if (isDefault(currCase) && cases.tail.nonEmpty) // subsumed by the `else if` that follows, but faster
+ unreachable = Some(cases.tail.head)
+ else if (!isGuardedCase(currCase) || currCase.guard.tpe =:= ConstantType(Constant(true)))
+ unreachable = cases.tail.find(caseImplies(currCase))
+ else if (currCase.guard.tpe =:= ConstantType(Constant(false)))
+ unreachable = Some(currCase)
+
+ cases = cases.tail
+ }
+
+ unreachable
+ }
+
+ // empty list ==> failure
+ def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] =
+ // generate if-then-else for 1 case switch (avoids verify error... can't imagine a one-case switch being faster than if-then-else anyway)
+ if (cases.isEmpty || cases.tail.isEmpty) Nil
+ else {
+ val caseDefs = cases map { case (scrutSym, makers) =>
+ makers match {
+ // default case
+ case GuardAndBodyTreeMakers(guard, body) =>
+ Some(defaultCase(scrutSym, guard, body))
+ // constant (or typetest for typeSwitch)
+ case SwitchableTreeMaker(pattern) :: GuardAndBodyTreeMakers(guard, body) =>
+ Some(CaseDef(pattern, guard, body))
+ // alternatives
+ case AlternativesTreeMaker(_, altss, pos) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported =>
+ val switchableAlts = altss map {
+ case SwitchableTreeMaker(pattern) :: Nil =>
+ Some(pattern)
+ case _ =>
+ None
+ }
+
+ // succeed if they were all switchable
+ sequence(switchableAlts) map { switchableAlts =>
+ def extractConst(t: Tree) = t match {
+ case Literal(const) => const
+ case _ => t
+ }
+ // SI-7290 Discard duplicate alternatives that would crash the backend
+ val distinctAlts = distinctBy(switchableAlts)(extractConst)
+ if (distinctAlts.size < switchableAlts.size) {
+ val duplicated = switchableAlts.groupBy(extractConst).flatMap(_._2.drop(1).take(1)) // report the first duplicated
+ global.currentUnit.warning(pos, s"Pattern contains duplicate alternatives: ${duplicated.mkString(", ")}")
+ }
+ CaseDef(Alternative(distinctAlts), guard, body)
+ }
+ case _ =>
+ // debug.patmat("can't emit switch for "+ makers)
+ None //failure (can't translate pattern to a switch)
+ }
+ }
+
+ val caseDefsWithGuards = sequence(caseDefs) match {
+ case None => return Nil
+ case Some(cds) => cds
+ }
+
+ // a switch with duplicate cases yields a verify error,
+ // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
+ // (even though the verify error would disappear, the behaviour would change)
+ val allReachable = unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
+
+ if (!allReachable) Nil
+ else if (noGuards(caseDefsWithGuards)) {
+ if (isDefault(caseDefsWithGuards.last)) caseDefsWithGuards
+ else caseDefsWithGuards :+ defaultCase()
+ } else {
+ // collapse identical cases with different guards, push guards into body for all guarded cases
+ // this translation is only sound if there are no unreachable (duplicate) cases
+ // it should only be run if there are guarded cases, and on failure it returns Nil
+ val (collapsed, needDefaultLabel) = collapseGuardedCases(caseDefsWithGuards)
+
+ if (collapsed.isEmpty || (needDefaultLabel && !canJump)) Nil
+ else {
+ def wrapInDefaultLabelDef(cd: CaseDef): CaseDef =
+ if (needDefaultLabel) deriveCaseDef(cd){ b =>
+ // TODO: can b.tpe ever be null? can't really use pt, see e.g. pos/t2683 or cps/match1.scala
+ defaultLabel setInfo MethodType(Nil, if (b.tpe != null) b.tpe else pt)
+ LabelDef(defaultLabel, Nil, b)
+ } else cd
+
+ val last = collapsed.last
+ if (isDefault(last)) {
+ if (!needDefaultLabel) collapsed
+ else collapsed.init :+ wrapInDefaultLabelDef(last)
+ } else collapsed :+ wrapInDefaultLabelDef(defaultCase())
+ }
+ }
+ }
+ }
+
+ class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
+ val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val alternativesSupported = true
+ val canJump = true
+
+ // Constant folding sets the type of a constant tree to `ConstantType(Constant(folded))`
+ // The tree itself can be a literal, an ident, a selection, ...
+ object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat.tpe match {
+ case ConstantType(const) if const.isIntRange =>
+ Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
+ case _ => None
+ }}
+
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
+ case _ => None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ def defaultSym: Symbol = scrutSym
+ def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (DEFAULT IF guard) ==> body
+ }}
+ }
+
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked)
+ // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
+ if (regularSwitchMaker.switchableTpe(dealiasWiden(scrutSym.tpe))) {
+ val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
+ if (caseDefsWithDefault isEmpty) None // not worth emitting a switch.
+ else {
+ // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
+ val scrutToInt: Tree =
+ if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ else (REF(scrutSym) DOT (nme.toInt))
+ Some(BLOCK(
+ VAL(scrutSym) === scrut,
+ Match(scrutToInt, caseDefsWithDefault) // a switch
+ ))
+ }
+ } else None
+ }
+
+ // for the catch-cases in a try/catch
+ private object typeSwitchMaker extends SwitchMaker {
+ val unchecked = false
+ val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
+ val canJump = false
+
+ // TODO: there are more treemaker-sequences that can be handled by type tests
+ // analyze the result of approximateTreeMaker rather than the TreeMaker itself
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case tm@TypeTestTreeMaker(_, _, pt, _) if tm.isPureTypeTest => // -- TODO: use this if binder does not occur in the body
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(pt)) /* not used by back-end */))
+ case _ =>
+ None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ def defaultBody: Tree = Throw(CODE.REF(defaultSym))
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
+ }}
+ }
+
+ // TODO: drop null checks
+ override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
+ val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
+ if (caseDefsWithDefault isEmpty) None
+ else Some(caseDefsWithDefault)
+ }
+ }
+
+ trait MatchOptimizer extends OptimizedCodegen
+ with SwitchEmission
+ with CommonSubconditionElimination {
+ override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = {
+ // TODO: do CSE on result of doDCE(prevBinder, cases, pt)
+ val optCases = doCSE(prevBinder, cases, pt)
+ val toHoist = (
+ for (treeMakers <- optCases)
+ yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
+ ).flatten.flatten.toList
+ (optCases, toHoist)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
new file mode 100644
index 0000000000..90c52e3eb6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -0,0 +1,674 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+/** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
+ */
+trait MatchTranslation { self: PatternMatching =>
+ import PatternMatchingStats._
+ import global.{phase, currentRun, Symbol,
+ Apply, Bind, CaseDef, ClassInfoType, Ident, Literal, Match,
+ Alternative, Constant, EmptyTree, Select, Star, This, Throw, Typed, UnApply,
+ Type, MethodType, WildcardType, PolyType, ErrorType, NoType, TypeRef, typeRef,
+ Name, NoSymbol, Position, Tree, atPos, glb, rootMirror, treeInfo, nme, Transformer,
+ elimAnonymousClass, asCompactDebugString, hasLength}
+ import global.definitions.{ThrowableClass, SeqClass, ScalaPackageClass, BooleanClass, UnitClass, RepeatedParamClass,
+ repeatedToSeq, isRepeatedParamType, getProductArgs}
+ import global.analyzer.{ErrorUtils, formalTypes}
+
+ trait MatchTranslator extends TreeMakers {
+ import typer.context
+
+ // Why is it so difficult to say "here's a name and a context, give me any
+ // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+ // use the scopes of the contexts in the enclosing context chain discover
+ // nothing. How to associate a name with a symbol would would be a wonderful
+ // linkage for which to establish a canonical acquisition mechanism.
+ def matchingSymbolInScope(pat: Tree): Symbol = {
+ def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+ case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+ case _ => NoSymbol
+ }
+ pat match {
+ case Bind(name, _) =>
+ context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+ res orElse declarationOfName(ctx.owner.rawInfo, name))
+ case _ => NoSymbol
+ }
+ }
+
+ // Issue better warnings than "unreachable code" when people mis-use
+ // variable patterns thinking they bind to existing identifiers.
+ //
+ // Possible TODO: more deeply nested variable patterns, like
+ // case (a, b) => 1 ; case (c, d) => 2
+ // However this is a pain (at least the way I'm going about it)
+ // and I have to think these detailed errors are primarily useful
+ // for beginners, not people writing nested pattern matches.
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
+ // A string describing the first variable pattern
+ var vpat: String = null
+ // Using an iterator so we can recognize the last case
+ val it = cases.iterator
+
+ def addendum(pat: Tree) = {
+ matchingSymbolInScope(pat) match {
+ case NoSymbol => ""
+ case sym =>
+ val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+ s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ }
+ }
+
+ while (it.hasNext) {
+ val cdef = it.next
+ // If a default case has been seen, then every succeeding case is unreachable.
+ if (vpat != null)
+ context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ // If this is a default case and more cases follow, warn about this one so
+ // we have a reason to mention its pattern variable name and any corresponding
+ // symbol in scope. Errors will follow from the remaining cases, at least
+ // once we make the above warning an error.
+ else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+ val vpatName = cdef.pat match {
+ case Bind(name, _) => s" '$name'"
+ case _ => ""
+ }
+ vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+ context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ }
+ }
+ }
+
+ // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+ private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
+ private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
+ private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
+ private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+ private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
+ private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
+ private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+
+ /** Implement a pattern match by turning its cases (including the implicit failure case)
+ * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
+ *
+ * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape
+ * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))`
+ *
+ * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
+ * thus, you must typecheck the result (and that will in turn translate nested matches)
+ * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
+ */
+ def translateMatch(match_ : Match): Tree = {
+ val Match(selector, cases) = match_
+
+ val (nonSyntheticCases, defaultOverride) = cases match {
+ case init :+ last if treeInfo isSyntheticDefaultCase last =>
+ (init, Some(((scrut: Tree) => last.body)))
+ case _ =>
+ (cases, None)
+ }
+
+ checkMatchVariablePatterns(nonSyntheticCases)
+
+ // we don't transform after uncurry
+ // (that would require more sophistication when generating trees,
+ // and the only place that emits Matches after typers is for exception handling anyway)
+ if (phase.id >= currentRun.uncurryPhase.id)
+ devWarning(s"running translateMatch past uncurry (at $phase) on $selector match $cases")
+
+ debug.patmat("translating "+ cases.mkString("{", "\n", "}"))
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null
+
+ val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
+
+ val origPt = match_.tpe
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+ // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+ val ptUnCPS =
+ if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
+ removeCPSAdaptAnnotations(origPt)
+ else origPt
+
+ // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
+ // pt is the skolemized version
+ val pt = repeatedToSeq(ptUnCPS)
+
+ // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
+ val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
+
+ // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
+ val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride)
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
+ combined
+ }
+
+ // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
+ // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
+ // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
+ // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
+ // unlike translateMatch, we type our result before returning it
+ def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
+ // if they're already simple enough to be handled by the back-end, we're done
+ if (caseDefs forall treeInfo.isCatchCase) caseDefs
+ else {
+ val swatches = { // switch-catches
+ val bindersAndCases = caseDefs map { caseDef =>
+ // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
+ // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
+ val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
+ }
+
+ for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+ if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
+ cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
+ }
+
+ val catches = if (swatches.nonEmpty) swatches else {
+ val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
+
+ val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+
+ List(
+ atPos(pos) {
+ CaseDef(
+ Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
+ EmptyTree,
+ combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+ )
+ })
+ }
+
+ typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ }
+
+
+
+ /** The translation of `pat if guard => body` has two aspects:
+ * 1) the substitution due to the variables bound by patterns
+ * 2) the combination of the extractor calls using `flatMap`.
+ *
+ * 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
+ * this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
+ * variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
+ * 1) is tricky because translatePattern_i determines the shape of translatePattern_i+1:
+ * zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
+ * `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
+ *
+ * `x_1` references the result (inside the monad) of the extractor corresponding to `pat_1`,
+ * this result holds the values for the constructor arguments, which translatePattern_1 has extracted
+ * from the object pointed to by `x_scrut`. The `y_i` are the symbols bound by `pat_1` (in order)
+ * in the scope of the remainder of the pattern, and they must thus be replaced by:
+ * - (for 1-ary unapply) x_1
+ * - (for n-ary unapply, n > 1) selection of the i'th tuple component of `x_1`
+ * - (for unapplySeq) x_1.apply(i)
+ *
+ * in the treemakers,
+ *
+ * Thus, the result type of `translatePattern_i`'s extractor must conform to `M[(T_1,..., T_n)]`.
+ *
+ * Operationally, phase 1) is a foldLeft, since we must consider the depth-first-flattening of
+ * the transformed patterns from left to right. For every pattern ast node, it produces a transformed ast and
+ * a function that will take care of binding and substitution of the next ast (to the right).
+ *
+ */
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
+ translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+ }
+
+ def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
+ def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
+ def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
+
+ val pos = patTree.pos
+
+ def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
+ if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
+ // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
+
+ debug.patmat("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
+
+ // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
+ debug.patmat("changing "+ b +" : "+ b.info +" -> "+ tp)
+ b setInfo tp
+ }
+
+ // example check: List[Int] <:< ::[Int]
+ // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
+ // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
+ val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
+ if (patBinder.info.widen <:< extractor.paramType) {
+ // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
+ // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
+ // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
+ // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
+ /* TODO: uncomment when `settings.developer` and `devWarning` become available
+ if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
+ devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
+ */
+ (Nil, patBinder setInfo extractor.paramType, false)
+ } else {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+
+ // check whether typetest implies patBinder is not null,
+ // even though the eventual null check will be on patBinderOrCasted
+ // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
+ (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
+ }
+
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
+ }
+
+
+ object MaybeBoundTyped {
+ /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
+ * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
+ * The returned type is the one inferred by inferTypedPattern (`owntype`)
+ *
+ * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
+ */
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
+ case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
+ case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
+ case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
+ case _ => None
+ }
+ }
+
+ val (treeMakers, subpats) = patTree match {
+ // skip wildcard trees -- no point in checking them
+ case WildcardPattern() => noFurtherSubPats()
+ case UnApply(unfun, args) =>
+ // TODO: check unargs == args
+ // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
+ translateExtractorPattern(ExtractorCall(unfun, args))
+
+ /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
+ A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
+ **/
+ case Apply(fun, args) =>
+ ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
+ ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
+ noFurtherSubPats()
+ }
+
+ /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ **/
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ case MaybeBoundTyped(subPatBinder, pt) =>
+ val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
+ // a typed pattern never has any subtrees
+ noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
+
+ /** A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ **/
+ case Bound(subpatBinder, p) =>
+ // replace subpatBinder by patBinder (as if the Bind was not there)
+ withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
+ // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
+ (patBinder, p)
+ )
+
+ /** 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ **/
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
+ noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
+
+ case Alternative(alts) =>
+ noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
+
+ /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
+ case class Foo(x: Int, y: String)
+ case class Bar(z: Int)
+
+ def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
+ */
+
+ case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
+ debug.patmat("WARNING: Bind tree with unbound symbol "+ patTree)
+ noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
+
+ // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
+
+ case _ =>
+ typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
+ noFurtherSubPats()
+ }
+
+ treeMakers ++ subpats.flatMap { case (binder, pat) =>
+ translatePattern(binder, pat) // recurse on subpatterns
+ }
+ }
+
+ def translateGuard(guard: Tree): List[TreeMaker] =
+ if (guard == EmptyTree) Nil
+ else List(GuardTreeMaker(guard))
+
+ // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
+ // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
+ // to enable this, probably need to move away from Option to a monad specific to pattern-match,
+ // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
+ // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
+ // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
+ def translateBody(body: Tree, matchPt: Type): TreeMaker =
+ BodyTreeMaker(body, matchPt)
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ object ExtractorCall {
+ def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
+ def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+ }
+
+ abstract class ExtractorCall(val args: List[Tree]) {
+ val nbSubPats = args.length
+
+ // everything okay, captain?
+ def isTyped : Boolean
+
+ def isSeq: Boolean
+ lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
+
+ // to which type should the previous binder be casted?
+ def paramType : Type
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
+
+ // `subPatBinders` are the variables bound by this pattern in the following patterns
+ // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
+ lazy val subPatBinders = args map {
+ case Bound(b, p) => b
+ case p => freshSym(p.pos, prefix = "p")
+ }
+
+ lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
+ case (b, Bound(_, p)) => (b, p)
+ case bp => bp
+ }
+
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
+ case (b, PatternBoundToUnderscore()) => b
+ }.toSet
+
+ def subPatTypes: List[Type] =
+ if(isSeq) {
+ val TypeRef(pre, SeqClass, args) = seqTp
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
+
+ if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
+ else formalTypes(formalsWithRepeated, nbSubPats)
+ } else rawSubPatTypes
+
+ protected def rawSubPatTypes: List[Type]
+
+ protected def seqTp = rawSubPatTypes.last baseType SeqClass
+ protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
+ protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
+ protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
+ protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+ protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require isSeq
+ protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
+ val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
+ val nbIndexingIndices = indexingIndices.length
+
+ // this error-condition has already been checked by checkStarPatOK:
+ // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+ // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ (((1 to firstIndexingBinder) map tupleSel(binder)) ++
+ // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ (indexingIndices map codegen.index(seqTree(binder))) ++
+ // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
+ (if(!lastIsStar) Nil else List(
+ if(nbIndexingIndices == 0) seqTree(binder)
+ else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ }
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (nbSubPats == 0) Nil
+ else if (isSeq) subPatRefsSeq(binder)
+ else ((1 to nbSubPats) map tupleSel(binder)).toList
+
+ protected def lengthGuard(binder: Symbol): Option[Tree] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ checkedLength map { expectedLength => import CODE._
+ // `binder.lengthCompare(expectedLength)`
+ def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+
+ // the comparison to perform
+ // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
+ // (otherwise equality is required)
+ def compareOp: (Tree, Tree) => Tree =
+ if (lastIsStar) _ INT_>= _
+ else _ INT_== _
+
+ // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
+ (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
+ }
+
+ def checkedLength: Option[Int] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ if (!isSeq || (expectedLength < minLenToCheck)) None
+ else Some(expectedLength)
+
+ }
+
+ // TODO: to be called when there's a def unapplyProd(x: T): U
+ // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
+ //
+ // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
+ class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
+ /*override def equals(x$1: Any): Boolean = ...
+ val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
+ */
+ // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
+ // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
+ // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
+ // debug.patmat("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
+ // debug.patmat("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
+ private def constructorTp = fun.tpe
+
+ def isTyped = fun.isTyped
+
+ // to which type should the previous binder be casted?
+ def paramType = constructorTp.finalResultType
+
+ def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
+ protected def rawSubPatTypes = constructorTp.paramTypes
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
+ val paramAccessors = binder.constrParamAccessors
+ // binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
+ val mutableBinders =
+ if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
+ (paramAccessors exists (_.isMutable)))
+ subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
+ else Nil
+
+ // checks binder ne null before chaining to the next extractor
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
+ }
+
+ // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ val accessors = binder.caseFieldAccessors
+ if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
+ else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
+ }
+
+ override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ }
+
+ class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+
+ def tpe = extractorCall.tpe
+ def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
+ def paramType = tpe.paramTypes.head
+ def resultType = tpe.finalResultType
+ def isSeq = extractorCall.symbol.name == nme.unapplySeq
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
+ // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
+ val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
+ }
+
+ override protected def seqTree(binder: Symbol): Tree =
+ if (firstIndexingBinder == 0) CODE.REF(binder)
+ else super.seqTree(binder)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ override protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ else super.subPatRefs(binder)
+
+ protected def spliceApply(binder: Symbol): Tree = {
+ object splice extends Transformer {
+ override def transform(t: Tree) = t match {
+ case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
+ treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
+ case _ => super.transform(t)
+ }
+ }
+ splice.transform(extractorCallIncludingDummy)
+ }
+
+ // what's the extractor's result type in the monad?
+ // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
+ protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
+ if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
+ else matchMonadResult(resultType)
+ }
+
+ protected lazy val rawSubPatTypes =
+ if (resultInMonad.typeSymbol eq UnitClass) Nil
+ else if(!isSeq && nbSubPats == 1) List(resultInMonad)
+ else getProductArgs(resultInMonad) match {
+ case Nil => List(resultInMonad)
+ case x => x
+ }
+
+ override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
+ }
+
+ /** A conservative approximation of which patterns do not discern anything.
+ * They are discarded during the translation.
+ */
+ object WildcardPattern {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Star(WildcardPattern()) => true
+ case x: Ident => treeInfo.isVarPattern(x)
+ case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
+ case EmptyTree => true
+ case _ => false
+ }
+ }
+
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
+ object Bound {
+ def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
+ case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
+ Some((t.symbol, p))
+ case _ => None
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
new file mode 100644
index 0000000000..202f3444f8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -0,0 +1,614 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.{SYNTHETIC, ARTIFACT}
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen.
+ *
+ * The IR is mostly concerned with sequencing, substitution, and rendering all necessary conditions,
+ * mostly agnostic to whether we're in optimized/pure (virtualized) mode.
+ */
+trait MatchTreeMaking extends MatchCodeGen with Debugging {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, CaseDef, atPos, settings,
+ Select, Block, ThisType, SingleType, NoPrefix, NoType, needsOuterTest,
+ ConstantType, Literal, Constant, gen, This, EmptyTree, map2, NoSymbol, Traverser,
+ Function, Typed, treeInfo, TypeRef, DefTree, Ident, nme}
+
+ import global.definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass}
+
+ final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
+ object Suppression {
+ val NoSuppression = Suppression(false, false)
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// the making of the trees
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TreeMakers extends TypedSubstitution with CodegenCore {
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree])
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit
+
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] =
+ None
+
+ // for catch (no need to customize match failure)
+ def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
+ None
+
+ abstract class TreeMaker {
+ def pos: Position
+
+ /** captures the scope and the value of the bindings in patterns
+ * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
+ */
+ def substitution: Substitution =
+ if (currSub eq null) localSubstitution
+ else currSub
+
+ protected def localSubstitution: Substitution
+
+ private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ if (currSub ne null) {
+ debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ Thread.dumpStack()
+ }
+ else currSub = outerSubst >> substitution
+ }
+ private[this] var currSub: Substitution = null
+
+ /** The substitution that specifies the trees that compute the values of the subpattern binders.
+ *
+ * Should not be used to perform actual substitution!
+ * Only used to reason symbolically about the values the subpattern binders are bound to.
+ * See TreeMakerToCond#updateSubstitution.
+ *
+ * Overridden in PreserveSubPatBinders to pretend it replaces the subpattern binders by subpattern refs
+ * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
+ *
+ * TODO: clean this up, would be nicer to have some higher-level way to compute
+ * the binders bound by this tree maker and the symbolic values that correspond to them
+ */
+ def subPatternsAsSubstitution: Substitution = substitution
+
+ // build Tree that chains `next` after the current extractor
+ def chainBefore(next: Tree)(casegen: Casegen): Tree
+ }
+
+ trait NoNewBinders extends TreeMaker {
+ protected val localSubstitution: Substitution = EmptySubstitution
+ }
+
+ case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
+ def pos = tree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
+ }
+
+ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
+ def pos = body.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
+ atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
+ override def toString = "B"+(body, matchPt)
+ }
+
+ case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
+ val pos = NoPosition
+
+ val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
+ override def toString = "S"+ localSubstitution
+ }
+
+ abstract class FunTreeMaker extends TreeMaker {
+ val nextBinder: Symbol
+ def pos = nextBinder.pos
+ }
+
+ abstract class CondTreeMaker extends FunTreeMaker {
+ val prevBinder: Symbol
+ val nextBinderTp: Type
+ val cond: Tree
+ val res: Tree
+
+ lazy val nextBinder = freshSym(pos, nextBinderTp)
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree =
+ atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
+ }
+
+ // unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
+ protected val debugInfoEmitVars = !settings.optimise.value
+
+ trait PreserveSubPatBinders extends TreeMaker {
+ val subPatBinders: List[Symbol]
+ val subPatRefs: List[Tree]
+ val ignoredSubPatBinders: Set[Symbol]
+
+ // unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
+ // mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
+
+ def emitVars = storedBinders.nonEmpty
+
+ private lazy val (stored, substed) = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) }
+
+ protected lazy val localSubstitution: Substitution = if (!emitVars) Substitution(subPatBinders, subPatRefs)
+ else {
+ val (subPatBindersSubstituted, subPatRefsSubstituted) = substed.unzip
+ Substitution(subPatBindersSubstituted.toList, subPatRefsSubstituted.toList)
+ }
+
+ /** The substitution that specifies the trees that compute the values of the subpattern binders.
+ *
+ * We pretend to replace the subpattern binders by subpattern refs
+ * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
+ */
+ override def subPatternsAsSubstitution =
+ Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
+
+ import CODE._
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
+ else {
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
+ }
+ }
+
+ /**
+ * Make a TreeMaker that will result in an extractor call specified by `extractor`
+ * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing
+ * a function with binder `nextBinder` over our extractor's result
+ * the function's body is determined by the next TreeMaker
+ * (furthermore, the interpretation of `flatMap` depends on the codegen instance we're using).
+ *
+ * The values for the subpatterns, as computed by the extractor call in `extractor`,
+ * are stored in local variables that re-use the symbols in `subPatBinders`.
+ * This makes extractor patterns more debuggable (SI-5739).
+ */
+ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
+ val subPatBinders: List[Symbol],
+ val subPatRefs: List[Tree],
+ extractorReturnsBoolean: Boolean,
+ val checkedLength: Option[Int],
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val condAndNext = extraCond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(substitution(cond), bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
+ atPos(extractor.pos)(
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, condAndNext)
+ else casegen.flatMap(extractor, nextBinder, condAndNext)
+ )
+ }
+
+ override def toString = "X"+(extractor, nextBinder.name)
+ }
+
+ /**
+ * An optimized version of ExtractorTreeMaker for Products.
+ * For now, this is hard-coded to case classes, and we simply extract the case class fields.
+ *
+ * The values for the subpatterns, as specified by the case class fields at the time of extraction,
+ * are stored in local variables that re-use the symbols in `subPatBinders`.
+ * This makes extractor patterns more debuggable (SI-5739) as well as
+ * avoiding mutation after the pattern has been matched (SI-5158, SI-6070)
+ *
+ * TODO: make this user-definable as follows
+ * When a companion object defines a method `def unapply_1(x: T): U_1`, but no `def unapply` or `def unapplySeq`,
+ * the extractor is considered to match any non-null value of type T
+ * the pattern is expected to have as many sub-patterns as there are `def unapply_I(x: T): U_I` methods,
+ * and the type of the I'th sub-pattern is `U_I`.
+ * The same exception for Seq patterns applies: if the last extractor is of type `Seq[U_N]`,
+ * the pattern must have at least N arguments (exactly N if the last argument is annotated with `: _*`).
+ * The arguments starting at N (and beyond) are taken from the sequence returned by apply_N,
+ * and it is checked that that sequence has enough elements to provide values for all expected sub-patterns.
+ *
+ * For a case class C, the implementation is assumed to be `def unapply_I(x: C) = x._I`,
+ * and the extractor call is inlined under that assumption.
+ */
+ case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
+ val subPatBinders: List[Symbol],
+ val subPatRefs: List[Tree],
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ import CODE._
+ val nextBinder = prevBinder // just passing through
+
+ // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val nullCheck = REF(prevBinder) OBJ_NE NULL
+ val cond =
+ if (binderKnownNonNull) extraCond
+ else (extraCond map (nullCheck AND _)
+ orElse Some(nullCheck))
+
+ cond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
+ }
+
+ override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
+ }
+
+ object IrrefutableExtractorTreeMaker {
+ // will an extractor with unapply method of methodtype `tp` always succeed?
+ // note: this assumes the other side-conditions implied by the extractor are met
+ // (argument of the right type, length check succeeds for unapplySeq,...)
+ def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
+ case TypeRef(_, SomeClass, _) => true
+ // probably not useful since this type won't be inferred nor can it be written down (yet)
+ case ConstantType(Constant(true)) => true
+ case _ => false
+ }
+
+ def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
+ case ExtractorTreeMaker(extractor, None, nextBinder) if irrefutableExtractorType(extractor.tpe) =>
+ Some((extractor, nextBinder))
+ case _ =>
+ None
+ }
+ }
+
+ object TypeTestTreeMaker {
+ // factored out so that we can consistently generate other representations of the tree that implements the test
+ // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
+ trait TypeTestCondStrategy {
+ type Result
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result
+ // TODO: can probably always widen
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result
+ def nonNullTest(testedBinder: Symbol): Result
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result
+ def eqTest(pat: Tree, testedBinder: Symbol): Result
+ def and(a: Result, b: Result): Result
+ def tru: Result
+ }
+
+ object treeCondStrategy extends TypeTestCondStrategy { import CODE._
+ type Result = Tree
+
+ def and(a: Result, b: Result): Result = a AND b
+ def tru = mkTRUE
+ def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
+ def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
+ def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
+ def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
+ val expectedOuter = expectedTp.prefix match {
+ case ThisType(clazz) => THIS(clazz)
+ case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
+ case _ => mkTRUE // fallback for SI-6183
+ }
+
+ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+ // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+ val outer = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedTp.prefix
+
+ (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
+ }
+ }
+
+ object pureTypeTestChecker extends TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = false
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false
+ def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
+ }
+ }
+
+ /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
+ *
+ * Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
+ - A reference to a class C, p.C, or T#C.
+ This type pattern matches any non-null instance of the given class.
+ Note that the prefix of the class, if it is given, is relevant for determining class instances.
+ For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
+ The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
+
+ - A singleton type p.type.
+ This type pattern matches only the value denoted by the path p
+ (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
+ // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
+
+ - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
+ This type pattern matches all values that are matched by each of the type patterns Ti.
+
+ - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
+ This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
+ The bounds or alias type of these type variable are determined as described in (§8.3).
+
+ - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
+ This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
+ **/
+ case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
+ import TypeTestTreeMaker._
+ debug.patmat("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+
+ lazy val outerTestNeeded = (
+ !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
+ && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
+
+ // the logic to generate the run-time test that follows from the fact that
+ // a `prevBinder` is expected to have type `expectedTp`
+ // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
+ // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
+ // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
+ def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
+ import cs._
+
+ def default =
+ // do type test first to ensure we won't select outer on null
+ if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
+ else typeTest(testedBinder, expectedTp)
+
+ // propagate expected type
+ def expTp(t: Tree): t.type = t setType expectedTp
+
+ // true when called to type-test the argument to an extractor
+ // don't do any fancy equality checking, just test the type
+ if (extractorArgTypeTest) default
+ else expectedTp match {
+ // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
+ // this implies sym.isStable
+ case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ // must use == to support e.g. List() == Nil
+ case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
+ => eqTest(expTp(CODE.NULL), testedBinder)
+ case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
+ case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
+
+ // TODO: verify that we don't need to special-case Array
+ // I think it's okay:
+ // - the isInstanceOf test includes a test for the element type
+ // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+ case _ if testedBinder.info.widen <:< expectedTp =>
+ // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
+ // since the types conform, no further checking is required
+ if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ // have to test outer and non-null only when it's a reference type
+ else if (expectedTp <:< AnyRefClass.tpe) {
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+ } else default
+
+ case _ => default
+ }
+ }
+
+ val cond = renderCondition(treeCondStrategy)
+ val res = codegen._asInstanceOf(testedBinder, nextBinderTp)
+
+ // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
+ def isPureTypeTest = renderCondition(pureTypeTestChecker)
+
+ def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
+
+ override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
+ }
+
+ // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
+ case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, override val pos: Position) extends CondTreeMaker {
+ val nextBinderTp = prevBinder.info.widen
+
+ // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
+ // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
+ val cond = codegen._equals(patTree, prevBinder)
+ val res = CODE.REF(prevBinder)
+ override def toString = "ET"+(prevBinder.name, patTree)
+ }
+
+ case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
+ // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
+
+ override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ super.incorporateOuterSubstitution(outerSubst)
+ altss = altss map (alts => propagateSubstitution(alts, substitution))
+ }
+
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+ atPos(pos){
+ // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
+ // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
+ val combinedAlts = altss map (altTreeMakers =>
+ ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen))
+ )
+
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => mkFALSE))
+ codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
+ }
+ }
+ }
+
+ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
+ val pos = guardTree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
+ override def toString = "G("+ guardTree +")"
+ }
+
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
+ treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
+
+
+ def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
+
+ // a foldLeft to accumulate the localSubstitution left-to-right
+ // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
+ def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
+ var accumSubst: Substitution = initial
+ treeMakers foreach { maker =>
+ maker incorporateOuterSubstitution accumSubst
+ accumSubst = maker.substitution
+ }
+ removeSubstOnly(treeMakers)
+ }
+
+ // calls propagateSubstitution on the treemakers
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
+ // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
+ combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
+ }
+
+ // pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
+ def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
+ fixerUpper(owner, scrut.pos){
+ def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
+ debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+
+ val (suppression, requireSwitch): (Suppression, Boolean) =
+ if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppression, false)
+ else scrut match {
+ case Typed(tree, tpt) =>
+ val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
+ val supressUnreachable = tree match {
+ case Ident(name) if name startsWith nme.CHECK_IF_REFUTABLE_STRING => true // SI-7183 don't warn for withFilter's that turn out to be irrefutable.
+ case _ => false
+ }
+ val suppression = Suppression(suppressExhaustive, supressUnreachable)
+ // matches with two or fewer cases need not apply for switchiness (if-then-else will do)
+ val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0
+ (suppression, requireSwitch)
+ case _ =>
+ (Suppression.NoSuppression, false)
+ }
+
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{
+ if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
+
+ if (casesNoSubstOnly nonEmpty) {
+ // before optimizing, check casesNoSubstOnly for presence of a default case,
+ // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // exhaustivity and reachability must be checked before optimization as well
+ // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
+ val synthCatchAll =
+ if (casesNoSubstOnly.nonEmpty && {
+ val nonTrivLast = casesNoSubstOnly.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }) None
+ else matchFailGen
+
+ analyzeCases(scrutSym, casesNoSubstOnly, pt, suppression)
+
+ val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
+
+ val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
+
+ if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
+ } else {
+ codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
+ }
+ }
+ }
+
+ // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
+ // TODO: assign more fine-grained positions
+ // fixes symbol nesting, assigns positions
+ protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
+ currentOwner = origOwner
+
+ override def traverse(t: Tree) {
+ if (t != EmptyTree && t.pos == NoPosition) {
+ t.setPos(pos)
+ }
+ t match {
+ case Function(_, _) if t.symbol == NoSymbol =>
+ t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
+ debug.patmat("new symbol for "+ (t, t.symbol.ownerChain))
+ case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
+ debug.patmat("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ t.symbol.owner = currentOwner
+ case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
+ debug.patmat("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ if(d.symbol.moduleClass ne NoSymbol)
+ d.symbol.moduleClass.owner = currentOwner
+
+ d.symbol.owner = currentOwner
+ // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
+ debug.patmat("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ case _ =>
+ }
+ super.traverse(t)
+ }
+
+ // override def apply
+ // debug.patmat("before fixerupper: "+ xTree)
+ // currentRun.trackerFactory.snapshot()
+ // debug.patmat("after fixerupper")
+ // currentRun.trackerFactory.snapshot()
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
new file mode 100644
index 0000000000..df4e699620
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -0,0 +1,256 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.ast
+import scala.language.postfixOps
+import scala.tools.nsc.transform.TypingTransformers
+import scala.tools.nsc.transform.Transform
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.Types
+import scala.reflect.internal.util.Position
+
+/** Translate pattern matching.
+ *
+ * Either into optimized if/then/else's, or virtualized as method calls (these methods form a zero-plus monad),
+ * similar in spirit to how for-comprehensions are compiled.
+ *
+ * For each case, express all patterns as extractor calls, guards as 0-ary extractors, and sequence them using `flatMap`
+ * (lifting the body of the case into the monad using `one`).
+ *
+ * Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`).
+ *
+ * TODO:
+ * - DCE (on irrefutable patterns)
+ * - update spec and double check it's implemented correctly (see TODO's)
+ *
+ * (longer-term) TODO:
+ * - user-defined unapplyProd
+ * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
+ * - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure
+ */
+trait PatternMatching extends Transform with TypingTransformers
+ with Debugging
+ with Interface
+ with MatchTranslation
+ with MatchTreeMaking
+ with MatchCodeGen
+ with ScalaLogic
+ with Solving
+ with MatchAnalysis
+ with MatchOptimization {
+ import global._
+
+ val phaseName: String = "patmat"
+
+ def newTransformer(unit: CompilationUnit): Transformer =
+ if (opt.virtPatmat) new MatchTransformer(unit)
+ else noopTransformer
+
+ class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree): Tree = tree match {
+ case Match(sel, cases) =>
+ val origTp = tree.tpe
+ // setType origTp intended for CPS -- TODO: is it necessary?
+ val translated = translator.translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]]))
+ try {
+ localTyper.typed(translated) setType origTp
+ } catch {
+ case x: (Types#TypeError) =>
+ // TODO: this should never happen; error should've been reported during type checking
+ unit.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg)
+ translated
+ }
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree, transform(block), translator.translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer))
+ case _ => super.transform(tree)
+ }
+
+ // TODO: only instantiate new match translator when localTyper has changed
+ // override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A
+ // as this is the only time TypingTransformer changes it
+ def translator: MatchTranslator with CodegenCore = {
+ new OptimizingMatchTranslator(localTyper)
+ }
+ }
+
+ class PureMatchTranslator(val typer: analyzer.Typer, val matchStrategy: Tree) extends MatchTranslator with PureCodegen {
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type) = (cases, Nil)
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {}
+ }
+
+ class OptimizingMatchTranslator(val typer: analyzer.Typer) extends MatchTranslator
+ with MatchOptimizer
+ with MatchAnalyzer
+ with Solver
+}
+
+trait Debugging {
+ val global: Global
+
+ // TODO: the inliner fails to inline the closures to debug.patmat unless the method is nested in an object
+ object debug {
+ val printPatmat = global.settings.Ypatmatdebug.value
+ @inline final def patmat(s: => String) = if (printPatmat) println(s)
+ }
+}
+
+trait Interface extends ast.TreeDSL {
+ import global.{newTermName, analyzer, Type, ErrorType, Symbol, Tree}
+ import analyzer.Typer
+
+ // 2.10/2.11 compatibility
+ protected final def dealiasWiden(tp: Type) = tp.dealias // 2.11: dealiasWiden
+ protected final def mkTRUE = CODE.TRUE_typed // 2.11: CODE.TRUE
+ protected final def mkFALSE = CODE.FALSE_typed // 2.11: CODE.FALSE
+ protected final def hasStableSymbol(p: Tree) = p.hasSymbol && p.symbol.isStable // 2.11: p.hasSymbolField && p.symbol.isStable
+ protected final def devWarning(str: String) = global.debugwarn(str) // 2.11: omit
+
+ object vpmName {
+ val one = newTermName("one")
+ val drop = newTermName("drop")
+ val flatMap = newTermName("flatMap")
+ val get = newTermName("get")
+ val guard = newTermName("guard")
+ val isEmpty = newTermName("isEmpty")
+ val orElse = newTermName("orElse")
+ val outer = newTermName("<outer>")
+ val runOrElse = newTermName("runOrElse")
+ val zero = newTermName("zero")
+ val _match = newTermName("__match") // don't call the val __match, since that will trigger virtual pattern matching...
+
+ def counted(str: String, i: Int) = newTermName(str + i)
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// talking to userland
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ /** Interface with user-defined match monad?
+ * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+
+ type Matcher[P[_], M[+_], A] = {
+ def flatMap[B](f: P[A] => M[B]): M[B]
+ def orElse[B >: A](alternative: => M[B]): M[B]
+ }
+
+ abstract class MatchStrategy[P[_], M[+_]] {
+ // runs the matcher on the given input
+ def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U]
+
+ def zero: M[Nothing]
+ def one[T](x: P[T]): M[T]
+ def guard[T](cond: P[Boolean], then: => P[T]): M[T]
+ }
+
+ * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
+
+
+ * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+
+ object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
+ def zero = None
+ def one[T](x: T) = Some(x)
+ // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
+ def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
+ def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
+ }
+
+ */
+ trait MatchMonadInterface {
+ val typer: Typer
+ val matchOwner = typer.context.owner
+
+ def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
+ def reportMissingCases(pos: Position, counterExamples: List[String]) = {
+ val ceString =
+ if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
+ else "inputs: " + counterExamples.mkString(", ")
+
+ typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ }
+
+ def inMatchMonad(tp: Type): Type
+ def pureType(tp: Type): Type
+ final def matchMonadResult(tp: Type): Type =
+ tp.baseType(matchMonadSym).typeArgs match {
+ case arg :: Nil => arg
+ case _ => ErrorType
+ }
+
+ protected def matchMonadSym: Symbol
+ }
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// substitution
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TypedSubstitution extends MatchMonadInterface {
+ object Substitution {
+ def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
+ // requires sameLength(from, to)
+ def apply(from: List[Symbol], to: List[Tree]) =
+ if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
+ }
+
+ class Substitution(val from: List[Symbol], val to: List[Tree]) {
+ import global.{Transformer, Ident, NoType}
+
+ // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
+ // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
+ def apply(tree: Tree): Tree = {
+ // according to -Ystatistics 10% of translateMatch's time is spent in this method...
+ // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
+ if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
+ else (new Transformer {
+ private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
+ if (origTp == null || origTp == NoType) to
+ // important: only type when actually substing and when original tree was typed
+ // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
+ else typer.typed(to)
+
+ override def transform(tree: Tree): Tree = {
+ def subst(from: List[Symbol], to: List[Tree]): Tree =
+ if (from.isEmpty) tree
+ else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe)
+ else subst(from.tail, to.tail)
+
+ tree match {
+ case Ident(_) => subst(from, to)
+ case _ => super.transform(tree)
+ }
+ }
+ }).transform(tree)
+ }
+
+
+ // the substitution that chains `other` before `this` substitution
+ // forall t: Tree. this(other(t)) == (this >> other)(t)
+ def >>(other: Substitution): Substitution = {
+ val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
+ new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
+ }
+ override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")")
+ }
+
+ object EmptySubstitution extends Substitution(Nil, Nil) {
+ override def apply(tree: Tree): Tree = tree
+ override def >>(other: Substitution): Substitution = other
+ }
+ }
+}
+
+object PatternMatchingStats {
+ val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat")
+ val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos)
+ val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos)
+ val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter(""))
+ val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos)
+ val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos)
+ val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
new file mode 100644
index 0000000000..843f831ea1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -0,0 +1,242 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+// naive CNF translation and simple DPLL solver
+trait Solving extends Logic {
+ import PatternMatchingStats._
+ trait CNF extends PropositionalLogic {
+
+ /** Override Array creation for efficiency (to not go through reflection). */
+ private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
+ def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
+ final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
+ }
+
+ import scala.collection.mutable.ArrayBuffer
+ type FormulaBuilder = ArrayBuffer[Clause]
+ def formulaBuilder = ArrayBuffer[Clause]()
+ def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
+ def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
+ def toFormula(buff: FormulaBuilder): Formula = buff
+
+ // CNF: a formula is a conjunction of clauses
+ type Formula = FormulaBuilder
+ def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
+
+ type Clause = Set[Lit]
+ // a clause is a disjunction of distinct literals
+ def clause(l: Lit*): Clause = l.toSet
+
+ type Lit
+ def Lit(sym: Sym, pos: Boolean = true): Lit
+
+ def andFormula(a: Formula, b: Formula): Formula = a ++ b
+ def simplifyFormula(a: Formula): Formula = a.distinct
+
+ private def merge(a: Clause, b: Clause) = a ++ b
+
+ // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
+ // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
+ def eqFreePropToSolvable(p: Prop): Formula = {
+ def negationNormalFormNot(p: Prop, budget: Int): Prop =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else p match {
+ case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
+ case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
+ case Not(p) => negationNormalForm(p, budget - 1)
+ case True => False
+ case False => True
+ case s: Sym => Not(s)
+ }
+
+ def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else p match {
+ case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
+ case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
+ case Not(negated) => negationNormalFormNot(negated, budget - 1)
+ case True
+ | False
+ | (_ : Sym) => p
+ }
+
+ val TrueF = formula()
+ val FalseF = formula(clause())
+ def lit(s: Sym) = formula(clause(Lit(s)))
+ def negLit(s: Sym) = formula(clause(Lit(s, false)))
+
+ def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
+ def distribute(a: Formula, b: Formula, budget: Int): Formula =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else
+ (a, b) match {
+ // true \/ _ = true
+ // _ \/ true = true
+ case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
+ // lit \/ lit
+ case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
+ // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
+ // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
+ case (cs, ds) =>
+ val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
+ big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size)))
+ }
+
+ if (budget <= 0) throw AnalysisBudget.exceeded
+
+ p match {
+ case True => TrueF
+ case False => FalseF
+ case s: Sym => lit(s)
+ case Not(s: Sym) => negLit(s)
+ case And(a, b) =>
+ val cnfA = conjunctiveNormalForm(a, budget - 1)
+ val cnfB = conjunctiveNormalForm(b, budget - cnfA.size)
+ cnfA ++ cnfB
+ case Or(a, b) =>
+ val cnfA = conjunctiveNormalForm(a)
+ val cnfB = conjunctiveNormalForm(b)
+ distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size))
+ }
+ }
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null
+ val res = conjunctiveNormalForm(negationNormalForm(p))
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start)
+
+ //
+ if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1
+
+// debug.patmat("cnf for\n"+ p +"\nis:\n"+cnfString(res))
+ res
+ }
+ }
+
+ // simple solver using DPLL
+ trait Solver extends CNF {
+ // a literal is a (possibly negated) variable
+ def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos)
+ class Lit(val sym: Sym, val pos: Boolean) {
+ override def toString = if (!pos) "-"+ sym.toString else sym.toString
+ override def equals(o: Any) = o match {
+ case o: Lit => (o.sym eq sym) && (o.pos == pos)
+ case _ => false
+ }
+ override def hashCode = sym.hashCode + pos.hashCode
+
+ def unary_- = Lit(sym, !pos)
+ }
+
+ def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
+
+ // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
+ val EmptyModel = Map.empty[Sym, Boolean]
+ val NoModel: Model = null
+
+ // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
+ def findAllModelsFor(f: Formula): List[Model] = {
+ val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet
+ // debug.patmat("vars "+ vars)
+ // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
+ def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
+
+ def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
+ if (recursionDepthAllowed == 0) models
+ else {
+ debug.patmat("find all models for\n"+ cnfString(f))
+ val model = findModelFor(f)
+ // if we found a solution, conjunct the formula with the model's negation and recurse
+ if (model ne NoModel) {
+ val unassigned = (vars -- model.keySet).toList
+ debug.patmat("unassigned "+ unassigned +" in "+ model)
+ def force(lit: Lit) = {
+ val model = withLit(findModelFor(dropUnit(f, lit)), lit)
+ if (model ne NoModel) List(model)
+ else Nil
+ }
+ val forced = unassigned flatMap { s =>
+ force(Lit(s, true)) ++ force(Lit(s, false))
+ }
+ debug.patmat("forced "+ forced)
+ val negated = negateModel(model)
+ findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
+ }
+ else models
+ }
+
+ findAllModels(f, Nil)
+ }
+
+ private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
+ private def dropUnit(f: Formula, unitLit: Lit): Formula = {
+ val negated = -unitLit
+ // drop entire clauses that are trivially true
+ // (i.e., disjunctions that contain the literal we're making true in the returned model),
+ // and simplify clauses by dropping the negation of the literal we're making true
+ // (since False \/ X == X)
+ val dropped = formulaBuilderSized(f.size)
+ for {
+ clause <- f
+ if !(clause contains unitLit)
+ } dropped += (clause - negated)
+ dropped
+ }
+
+ def findModelFor(f: Formula): Model = {
+ @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
+
+ debug.patmat("DPLL\n"+ cnfString(f))
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null
+
+ val satisfiableWithModel: Model =
+ if (f isEmpty) EmptyModel
+ else if(f exists (_.isEmpty)) NoModel
+ else f.find(_.size == 1) match {
+ case Some(unitClause) =>
+ val unitLit = unitClause.head
+ // debug.patmat("unit: "+ unitLit)
+ withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
+ case _ =>
+ // partition symbols according to whether they appear in positive and/or negative literals
+ val pos = new mutable.HashSet[Sym]()
+ val neg = new mutable.HashSet[Sym]()
+ f.foreach{_.foreach{ lit =>
+ if (lit.pos) pos += lit.sym else neg += lit.sym
+ }}
+ // appearing in both positive and negative
+ val impures = pos intersect neg
+ // appearing only in either positive/negative positions
+ val pures = (pos ++ neg) -- impures
+
+ if (pures nonEmpty) {
+ val pureSym = pures.head
+ // turn it back into a literal
+ // (since equality on literals is in terms of equality
+ // of the underlying symbol and its positivity, simply construct a new Lit)
+ val pureLit = Lit(pureSym, pos(pureSym))
+ // debug.patmat("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
+ val simplified = f.filterNot(_.contains(pureLit))
+ withLit(findModelFor(simplified), pureLit)
+ } else {
+ val split = f.head.head
+ // debug.patmat("split: "+ split)
+ orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split)))
+ }
+ }
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
+
+ satisfiableWithModel
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 4bf7f78167..49049f110d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -589,6 +589,10 @@ trait ContextErrors {
setError(tree)
}
+ // typedPattern
+ def PatternMustBeValue(pat: Tree, pt: Type) =
+ issueNormalTypeError(pat, s"pattern must be a value: $pat"+ typePatternAdvice(pat.tpe.typeSymbol, pt.typeSymbol))
+
// SelectFromTypeTree
def TypeSelectionFromVolatileTypeError(tree: Tree, qual: Tree) = {
val hiBound = qual.tpe.bounds.hi
@@ -920,33 +924,10 @@ trait ContextErrors {
def IncompatibleScrutineeTypeError(tree: Tree, pattp: Type, pt: Type) =
issueNormalTypeError(tree, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt))
- def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) = {
- def errMsg = {
- val sym = pat.tpe.typeSymbol
- val clazz = sym.companionClass
- val addendum = (
- if (sym.isModuleClass && clazz.isCaseClass && (clazz isSubClass pt1.typeSymbol)) {
- // TODO: move these somewhere reusable.
- val typeString = clazz.typeParams match {
- case Nil => "" + clazz.name
- case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
- }
- val caseString = (
- clazz.caseFieldAccessors
- map (_ => "_") // could use the actual param names here
- mkString (clazz.name + "(", ",", ")")
- )
- (
- "\nNote: if you intended to match against the class, try `case _: " +
- typeString + "` or `case " + caseString + "`"
- )
- }
- else ""
- )
- "pattern type is incompatible with expected type"+foundReqMsg(pat.tpe, pt) + addendum
- }
- issueNormalTypeError(pat, errMsg)
- }
+ def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) =
+ issueNormalTypeError(pat,
+ "pattern type is incompatible with expected type"+ foundReqMsg(pat.tpe, pt) +
+ typePatternAdvice(pat.tpe.typeSymbol, pt1.typeSymbol))
def PolyAlternativeError(tree: Tree, argtypes: List[Type], sym: Symbol, err: PolyAlternativeErrorKind.ErrorType) = {
import PolyAlternativeErrorKind._
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 620665126e..6c2945cad3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -84,6 +84,8 @@ trait Contexts { self: Analyzer =>
case Import(qual, _) => qual.tpe = singleType(qual.symbol.owner.thisType, qual.symbol)
case _ =>
}
+ sc.flushAndReturnBuffer()
+ sc.flushAndReturnWarningsBuffer()
sc = sc.outer
}
}
@@ -644,7 +646,14 @@ trait Contexts { self: Analyzer =>
new ImplicitInfo(sym.name, pre, sym)
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
- val pre = imp.qual.tpe
+ val qual = imp.qual
+
+ val pre =
+ if (qual.tpe.typeSymbol.isPackageClass)
+ // SI-6225 important if the imported symbol is inherited by the the package object.
+ singleType(qual.tpe, qual.tpe member nme.PACKAGE)
+ else
+ qual.tpe
def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match {
case List() =>
List()
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index bbba7e0435..57b9dfe3e4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -101,6 +101,7 @@ trait EtaExpansion { self: Analyzer =>
case TypeApply(fn, args) =>
treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
case Select(qual, name) =>
+ val name = tree.symbol.name // account for renamed imports, SI-7233
treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null
case Ident(name) =>
tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 3bea049c59..04e0b9d653 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -149,7 +149,7 @@ trait Implicits {
class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter) {
override def toString = "SearchResult(%s, %s)".format(tree,
if (subst.isEmpty) "" else subst)
-
+
def isFailure = false
def isAmbiguousFailure = false
final def isSuccess = !isFailure
@@ -158,7 +158,7 @@ trait Implicits {
lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
}
-
+
lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
override def isAmbiguousFailure = true
@@ -892,11 +892,20 @@ trait Implicits {
*/
if (divergence)
throw DivergentImplicit
-
- if (invalidImplicits.nonEmpty)
+ else invalidImplicits take 1 foreach { sym =>
+ def isSensibleAddendum = pt match {
+ case Function1(_, out) => out <:< sym.tpe.finalResultType
+ case tp => tp <:< sym.tpe.finalResultType
+ case _ => false
+ }
+ // Don't pitch in with this theory unless it looks plausible that the
+ // implicit would have helped
setAddendum(pos, () =>
- "\n Note: implicit "+invalidImplicits.head+" is not applicable here"+
- " because it comes after the application point and it lacks an explicit result type")
+ if (isSensibleAddendum)
+ s"\n Note: implicit $sym is not applicable here because it comes after the application point and it lacks an explicit result type"
+ else ""
+ )
+ }
}
best
@@ -1132,7 +1141,7 @@ trait Implicits {
)
// todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
- if (settings.XlogImplicits.value) println("materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
+ if (settings.XlogImplicits.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
if (context.macrosEnabled) success(materializer)
// don't call `failure` here. if macros are disabled, we just fail silently
// otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled"
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 7161043dcf..55e0a954f0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -55,6 +55,10 @@ trait Infer extends Checkable {
*
* `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
*
+ * @param nbSubPats The number of arguments to the extractor pattern
+ * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
+ * bind patterns, is a Tuple pattern, in which case it is the number of
+ * elements. Used to issue warnings about binding a `TupleN` to a single value.
* @throws TypeError when the unapply[Seq] definition is ill-typed
* @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
*
@@ -84,7 +88,8 @@ trait Infer extends Checkable {
* `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
*
*/
- def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int, unappSym: Symbol): (List[Type], List[Type]) = {
+ def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
+ unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
val isUnapplySeq = unappSym.name == nme.unapplySeq
val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
@@ -114,8 +119,9 @@ trait Infer extends Checkable {
else if (optionArgs.nonEmpty)
if (nbSubPats == 1) {
val productArity = productArgs.size
- if (productArity > 1 && settings.lint.value)
- global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
+ if (settings.lint.value && productArity > 1 && productArity != effectiveNbSubPats)
+ global.currentUnit.warning(pos,
+ s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
optionArgs
}
// TODO: update spec to reflect we allow any ProductN, not just TupleN
@@ -286,7 +292,11 @@ trait Infer extends Checkable {
// println("set error: "+tree);
// throw new Error()
// }
- def name = newTermName("<error: " + tree.symbol + ">")
+ def name = {
+ val sym = tree.symbol
+ val nameStr = try sym.toString catch { case _: CyclicReference => sym.nameString }
+ newTermName(s"<error: $nameStr>")
+ }
def errorClass = if (context.reportErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 245656e2d7..0ba30ffa73 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -333,9 +333,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
def param(tree: Tree): Symbol =
paramCache.getOrElseUpdate(tree.symbol, {
val sym = tree.symbol
- val sigParam = makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe))
- if (sym.isSynthetic) sigParam.flags |= SYNTHETIC
- sigParam
+ makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym.flags)
})
val paramss = List(ctxParam) :: mmap(vparamss)(param)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 7a3ab00578..379f56521b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -516,7 +516,13 @@ trait Namers extends MethodSynthesis {
// Setting the position at the import means that if there is
// more than one hidden name, the second will not be warned.
// So it is the position of the actual hidden name.
- checkNotRedundant(tree.pos withPoint fromPos, from, to)
+ //
+ // Note: java imports have precence over definitions in the same package
+ // so don't warn for them. There is a corresponding special treatment
+ // in the shadowing rules in typedIdent to (SI-7232). In any case,
+ // we shouldn't be emitting warnings for .java source files.
+ if (!context.unit.isJava)
+ checkNotRedundant(tree.pos withPoint fromPos, from, to)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 2340c78f8c..f3736f1519 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -273,22 +273,25 @@ trait NamesDefaults { self: Analyzer =>
*/
def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[Option[ValDef]] = {
val context = blockTyper.context
- val symPs = map2(args, paramTypes)((arg, tpe) => arg match {
+ val symPs = map2(args, paramTypes)((arg, paramTpe) => arg match {
case Ident(nme.SELECTOR_DUMMY) =>
None // don't create a local ValDef if the argument is <unapply-selector>
case _ =>
- val byName = isByNameParamType(tpe)
- val repeated = isScalaRepeatedParamType(tpe)
+ val byName = isByNameParamType(paramTpe)
+ val repeated = isScalaRepeatedParamType(paramTpe)
val argTpe = (
if (repeated) arg match {
case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
case _ => seqType(arg.tpe)
}
- else arg.tpe
- ).widen // have to widen or types inferred from literal defaults will be singletons
+ else
+ // Note stabilizing can lead to a non-conformant argument when existentials are involved, e.g. neg/t3507-old.scala, hence the filter.
+ // We have to deconst or types inferred from literal arguments will be Constant(_), e.g. pos/z1730.scala.
+ gen.stableTypeFor(arg).filter(_ <:< paramTpe).getOrElse(arg.tpe).deconst
+ )
val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
@@ -329,11 +332,10 @@ trait NamesDefaults { self: Analyzer =>
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
- if (typedApp.isErrorTyped) tree
- else typedApp match {
+ typedApp match {
// Extract the typed arguments, restore the call-site evaluation order (using
// ValDef's in the block), change the arguments to these local values.
- case Apply(expr, typedArgs) =>
+ case Apply(expr, typedArgs) if !(typedApp :: typedArgs).exists(_.isErrorTyped) => // bail out with erroneous args, see SI-7238
// typedArgs: definition-site order
val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
@@ -361,6 +363,7 @@ trait NamesDefaults { self: Analyzer =>
context.namedApplyBlockInfo =
Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
+ case _ => tree
}
}
@@ -499,7 +502,7 @@ trait NamesDefaults { self: Analyzer =>
// disable conforms as a view...
val errsBefore = reporter.ERROR.count
try typer.silent { tpr =>
- val res = tpr.typed(arg, subst(paramtpe))
+ val res = tpr.typed(arg.duplicate, subst(paramtpe))
// better warning for SI-5044: if `silent` was not actually silent give a hint to the user
// [H]: the reason why `silent` is not silent is because the cyclic reference exception is
// thrown in a context completely different from `context` here. The exception happens while
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
deleted file mode 100644
index 87fe3fb3f6..0000000000
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ /dev/null
@@ -1,3876 +0,0 @@
-/* NSC -- new Scala compiler
- *
- * Copyright 2011-2013 LAMP/EPFL
- * @author Adriaan Moors
- */
-
-package scala.tools.nsc
-package typechecker
-
-import symtab._
-import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC, ARTIFACT}
-import scala.language.postfixOps
-import scala.tools.nsc.transform.TypingTransformers
-import scala.tools.nsc.transform.Transform
-import scala.collection.mutable.HashSet
-import scala.collection.mutable.HashMap
-import scala.reflect.internal.util.Statistics
-import scala.reflect.internal.Types
-
-/** Translate pattern matching.
- *
- * Either into optimized if/then/else's,
- * or virtualized as method calls (these methods form a zero-plus monad), similar in spirit to how for-comprehensions are compiled.
- *
- * For each case, express all patterns as extractor calls, guards as 0-ary extractors, and sequence them using `flatMap`
- * (lifting the body of the case into the monad using `one`).
- *
- * Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`).
- *
- * TODO:
- * - DCE (on irrefutable patterns)
- * - update spec and double check it's implemented correctly (see TODO's)
- *
- * (longer-term) TODO:
- * - user-defined unapplyProd
- * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
- * - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure
- */
-trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL { // self: Analyzer =>
- import Statistics._
- import PatternMatchingStats._
-
- val global: Global // need to repeat here because otherwise last mixin defines global as
- // SymbolTable. If we had DOT this would not be an issue
- import global._ // the global environment
- import definitions._ // standard classes and methods
-
- val phaseName: String = "patmat"
-
- // TODO: the inliner fails to inline the closures to patmatDebug
- object debugging {
- val printPatmat = settings.Ypatmatdebug.value
- @inline final def patmatDebug(s: => String) = if (printPatmat) println(s)
- }
- import debugging.patmatDebug
-
- // to govern how much time we spend analyzing matches for unreachability/exhaustivity
- object AnalysisBudget {
- import scala.tools.cmd.FromString.IntFromString
- val max = sys.props.get("scalac.patmat.analysisBudget").collect(IntFromString.orElse{case "off" => Integer.MAX_VALUE}).getOrElse(256)
-
- abstract class Exception extends RuntimeException("CNF budget exceeded") {
- val advice: String
- def warn(pos: Position, kind: String) = currentUnit.uncheckedWarning(pos, s"Cannot check match for $kind.\n$advice")
- }
-
- object exceeded extends Exception {
- val advice = s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)"
- }
- }
-
- def newTransformer(unit: CompilationUnit): Transformer =
- if (opt.virtPatmat) new MatchTransformer(unit)
- else noopTransformer
-
- // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
- private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
- private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
- private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
- private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
- private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
- private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
- private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
-
- class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
- override def transform(tree: Tree): Tree = tree match {
- case Match(sel, cases) =>
- val origTp = tree.tpe
- // setType origTp intended for CPS -- TODO: is it necessary?
- val translated = translator.translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]]))
- try {
- localTyper.typed(translated) setType origTp
- } catch {
- case x: (Types#TypeError) =>
- // TODO: this should never happen; error should've been reported during type checking
- unit.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg)
- translated
- }
- case Try(block, catches, finalizer) =>
- treeCopy.Try(tree, transform(block), translator.translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer))
- case _ => super.transform(tree)
- }
-
- def translator: MatchTranslation with CodegenCore = {
- new OptimizingMatchTranslator(localTyper)
- }
- }
-
- import definitions._
- import analyzer._ //Typer
-
- object vpmName {
- val one = newTermName("one")
- val drop = newTermName("drop")
- val flatMap = newTermName("flatMap")
- val get = newTermName("get")
- val guard = newTermName("guard")
- val isEmpty = newTermName("isEmpty")
- val orElse = newTermName("orElse")
- val outer = newTermName("<outer>")
- val runOrElse = newTermName("runOrElse")
- val zero = newTermName("zero")
- val _match = newTermName("__match") // don't call the val __match, since that will trigger virtual pattern matching...
-
- def counted(str: String, i: Int) = newTermName(str+i)
- }
-
- class PureMatchTranslator(val typer: Typer, val matchStrategy: Tree) extends MatchTranslation with TreeMakers with PureCodegen
- class OptimizingMatchTranslator(val typer: Typer) extends MatchTranslation with TreeMakers with MatchOptimizations
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// talking to userland
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- /** Interface with user-defined match monad?
- * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
-
- type Matcher[P[_], M[+_], A] = {
- def flatMap[B](f: P[A] => M[B]): M[B]
- def orElse[B >: A](alternative: => M[B]): M[B]
- }
-
- abstract class MatchStrategy[P[_], M[+_]] {
- // runs the matcher on the given input
- def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U]
-
- def zero: M[Nothing]
- def one[T](x: P[T]): M[T]
- def guard[T](cond: P[Boolean], then: => P[T]): M[T]
- }
-
- * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
-
-
- * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
-
- object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
- def zero = None
- def one[T](x: T) = Some(x)
- // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
- def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
- def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
- }
-
- */
- trait MatchMonadInterface {
- val typer: Typer
- val matchOwner = typer.context.owner
-
- def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
- def reportMissingCases(pos: Position, counterExamples: List[String]) = {
- val ceString =
- if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
- else "inputs: " + counterExamples.mkString(", ")
-
- typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
- }
-
- def inMatchMonad(tp: Type): Type
- def pureType(tp: Type): Type
- final def matchMonadResult(tp: Type): Type =
- tp.baseType(matchMonadSym).typeArgs match {
- case arg :: Nil => arg
- case _ => ErrorType
- }
-
- protected def matchMonadSym: Symbol
- }
-
- trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
- import typer.{typed, context, silent, reallyExists}
- // import typer.infer.containsUnchecked
-
- // Why is it so difficult to say "here's a name and a context, give me any
- // matching symbol in scope" ? I am sure this code is wrong, but attempts to
- // use the scopes of the contexts in the enclosing context chain discover
- // nothing. How to associate a name with a symbol would would be a wonderful
- // linkage for which to establish a canonical acquisition mechanism.
- def matchingSymbolInScope(pat: Tree): Symbol = {
- def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
- case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
- case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
- case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
- case _ => NoSymbol
- }
- pat match {
- case Bind(name, _) =>
- context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
- res orElse declarationOfName(ctx.owner.rawInfo, name))
- case _ => NoSymbol
- }
- }
-
- // Issue better warnings than "unreachable code" when people mis-use
- // variable patterns thinking they bind to existing identifiers.
- //
- // Possible TODO: more deeply nested variable patterns, like
- // case (a, b) => 1 ; case (c, d) => 2
- // However this is a pain (at least the way I'm going about it)
- // and I have to think these detailed errors are primarily useful
- // for beginners, not people writing nested pattern matches.
- def checkMatchVariablePatterns(cases: List[CaseDef]) {
- // A string describing the first variable pattern
- var vpat: String = null
- // Using an iterator so we can recognize the last case
- val it = cases.iterator
-
- def addendum(pat: Tree) = {
- matchingSymbolInScope(pat) match {
- case NoSymbol => ""
- case sym =>
- val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
- s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
- }
- }
-
- while (it.hasNext) {
- val cdef = it.next
- // If a default case has been seen, then every succeeding case is unreachable.
- if (vpat != null)
- context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
- // If this is a default case and more cases follow, warn about this one so
- // we have a reason to mention its pattern variable name and any corresponding
- // symbol in scope. Errors will follow from the remaining cases, at least
- // once we make the above warning an error.
- else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
- val vpatName = cdef.pat match {
- case Bind(name, _) => s" '$name'"
- case _ => ""
- }
- vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
- context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
- }
- }
- }
-
- /** Implement a pattern match by turning its cases (including the implicit failure case)
- * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
- *
- * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape
- * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))`
- *
- * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
- * thus, you must typecheck the result (and that will in turn translate nested matches)
- * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
- */
- def translateMatch(match_ : Match): Tree = {
- val Match(selector, cases) = match_
-
- val (nonSyntheticCases, defaultOverride) = cases match {
- case init :+ last if treeInfo isSyntheticDefaultCase last =>
- (init, Some(((scrut: Tree) => last.body)))
- case _ =>
- (cases, None)
- }
-
- checkMatchVariablePatterns(nonSyntheticCases)
-
- // we don't transform after uncurry
- // (that would require more sophistication when generating trees,
- // and the only place that emits Matches after typers is for exception handling anyway)
- if(phase.id >= currentRun.uncurryPhase.id) debugwarn("running translateMatch at "+ phase +" on "+ selector +" match "+ cases)
- patmatDebug("translating "+ cases.mkString("{", "\n", "}"))
-
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null
-
- val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
-
- val origPt = match_.tpe
- // when one of the internal cps-type-state annotations is present, strip all CPS annotations
- // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
- // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
- val ptUnCPS =
- if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
- removeCPSAdaptAnnotations(origPt)
- else origPt
-
- // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
- // pt is the skolemized version
- val pt = repeatedToSeq(ptUnCPS)
-
- // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
-
- val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
-
- // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride)
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
- combined
- }
-
- // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
- // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
- // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
- // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
- // unlike translateMatch, we type our result before returning it
- def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
- // if they're already simple enough to be handled by the back-end, we're done
- if (caseDefs forall treeInfo.isCatchCase) caseDefs
- else {
- val swatches = { // switch-catches
- val bindersAndCases = caseDefs map { caseDef =>
- // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
- // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
- val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
- }
-
- for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
- if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
- cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
- }
-
- val catches = if (swatches.nonEmpty) swatches else {
- val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
-
- val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
-
- List(
- atPos(pos) {
- CaseDef(
- Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
- EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
- )
- })
- }
-
- typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
- }
-
-
-
- /** The translation of `pat if guard => body` has two aspects:
- * 1) the substitution due to the variables bound by patterns
- * 2) the combination of the extractor calls using `flatMap`.
- *
- * 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
- * this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
- * variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
- * 1) is tricky because translatePattern_i determines the shape of translatePattern_i+1:
- * zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
- * `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
- *
- * `x_1` references the result (inside the monad) of the extractor corresponding to `pat_1`,
- * this result holds the values for the constructor arguments, which translatePattern_1 has extracted
- * from the object pointed to by `x_scrut`. The `y_i` are the symbols bound by `pat_1` (in order)
- * in the scope of the remainder of the pattern, and they must thus be replaced by:
- * - (for 1-ary unapply) x_1
- * - (for n-ary unapply, n > 1) selection of the i'th tuple component of `x_1`
- * - (for unapplySeq) x_1.apply(i)
- *
- * in the treemakers,
- *
- * Thus, the result type of `translatePattern_i`'s extractor must conform to `M[(T_1,..., T_n)]`.
- *
- * Operationally, phase 1) is a foldLeft, since we must consider the depth-first-flattening of
- * the transformed patterns from left to right. For every pattern ast node, it produces a transformed ast and
- * a function that will take care of binding and substitution of the next ast (to the right).
- *
- */
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
- translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
- }
-
- def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
- // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
- type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
- def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
- def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
-
- val pos = patTree.pos
-
- def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
- if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
- // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
-
- patmatDebug("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
-
- // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
- // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
- // (it will later result in a type test when `tp` is not a subtype of `b.info`)
- // TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
- patmatDebug("changing "+ b +" : "+ b.info +" -> "+ tp)
- b setInfo tp
- }
-
- // example check: List[Int] <:< ::[Int]
- // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
- val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
- if (patBinder.info.widen <:< extractor.paramType) {
- // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
- // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
- // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
- // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
- /* TODO: uncomment when `settings.developer` and `devWarning` become available
- if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
- devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
- */
- (Nil, patBinder setInfo extractor.paramType, false)
- } else {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
-
- // check whether typetest implies patBinder is not null,
- // even though the eventual null check will be on patBinderOrCasted
- // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
- (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
- }
-
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
- }
-
-
- object MaybeBoundTyped {
- /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
- * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
- * The returned type is the one inferred by inferTypedPattern (`owntype`)
- *
- * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
- */
- def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
- // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
- case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
- case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
- case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
- case _ => None
- }
- }
-
- val (treeMakers, subpats) = patTree match {
- // skip wildcard trees -- no point in checking them
- case WildcardPattern() => noFurtherSubPats()
- case UnApply(unfun, args) =>
- // TODO: check unargs == args
- // patmatDebug("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
- translateExtractorPattern(ExtractorCall(unfun, args))
-
- /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
- It consists of a stable identifier c, followed by element patterns p1, ..., pn.
- The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
-
- If the case class is monomorphic, then it must conform to the expected type of the pattern,
- and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
-
- If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
- The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
-
- The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
- A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
- **/
- case Apply(fun, args) =>
- ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
- ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
- noFurtherSubPats()
- }
-
- /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
- The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
- This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
- **/
- // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
- case MaybeBoundTyped(subPatBinder, pt) =>
- val next = glb(List(patBinder.info.widen, pt)).normalize
- // a typed pattern never has any subtrees
- noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
-
- /** A pattern binder x@p consists of a pattern variable x and a pattern p.
- The type of the variable x is the static type T of the pattern p.
- This pattern matches any value v matched by the pattern p,
- provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
- and it binds the variable name to that value.
- **/
- case Bound(subpatBinder, p) =>
- // replace subpatBinder by patBinder (as if the Bind was not there)
- withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
- // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
- (patBinder, p)
- )
-
- /** 8.1.4 Literal Patterns
- A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
- The type of L must conform to the expected type of the pattern.
-
- 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
- The pattern matches any value v such that r == v (§12.1).
- The type of r must conform to the expected type of the pattern.
- **/
- case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
- noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
-
- case Alternative(alts) =>
- noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
-
- /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
- case class Foo(x: Int, y: String)
- case class Bar(z: Int)
-
- def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
- */
-
- case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- patmatDebug("WARNING: Bind tree with unbound symbol "+ patTree)
- noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
-
- // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
-
- case _ =>
- typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
- noFurtherSubPats()
- }
-
- treeMakers ++ subpats.flatMap { case (binder, pat) =>
- translatePattern(binder, pat) // recurse on subpatterns
- }
- }
-
- def translateGuard(guard: Tree): List[TreeMaker] =
- if (guard == EmptyTree) Nil
- else List(GuardTreeMaker(guard))
-
- // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
- // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
- // to enable this, probably need to move away from Option to a monad specific to pattern-match,
- // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
- // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
- // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
- def translateBody(body: Tree, matchPt: Type): TreeMaker =
- BodyTreeMaker(body, matchPt)
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- object ExtractorCall {
- def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
-
- def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
-
- // THE PRINCIPLED SLOW PATH -- NOT USED
- // generate a call to the (synthetically generated) extractor of a case class
- // NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one)
- // that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY),
- // and replace that dummy by a reference to the actual binder in translateExtractorPattern
- def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = {
- // TODO: can we rework the typer so we don't have to do all this twice?
- // undo rewrite performed in (5) of adapt
- val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- val origSym = orig.symbol
- val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe)
-
- if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) {
- None
- } else {
- // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy
- // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe)
- // can't always infer type arguments (pos/t602):
- /* case class Span[K <: Ordered[K]](low: Option[K]) {
- override def equals(x: Any): Boolean = x match {
- case Span((low0 @ _)) if low0 equals low => true
- }
- }*/
- // so... leave undetermined type params floating around if we have to
- // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala)
- // (see also run/virtpatmat_alts.scala)
- val savedUndets = context.undetparams
- val extractorCall = try {
- context.undetparams = Nil
- silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
- case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError()
- case _ =>
- // this fails to resolve overloading properly...
- // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
-
- // patmatDebug("funtpe after = "+ fun.tpe.finalResultType)
- // patmatDebug("orig: "+(orig, orig.tpe))
- val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg,
- // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K])
- // patmatDebug("tgt = "+ (tgt, tgt.tpe))
- val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
- // patmatDebug("oper: "+ (oper, oper.tpe))
- Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
- }
- } finally context.undetparams = savedUndets
-
- Some(this(extractorCall, args)) // TODO: simplify spliceApply?
- }
- }
- }
-
- abstract class ExtractorCall(val args: List[Tree]) {
- val nbSubPats = args.length
-
- // everything okay, captain?
- def isTyped : Boolean
-
- def isSeq: Boolean
- lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
-
- // to which type should the previous binder be casted?
- def paramType : Type
-
- /** Create the TreeMaker that embodies this extractor call
- *
- * `binder` has been casted to `paramType` if necessary
- * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
- * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
- */
- def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
-
- // `subPatBinders` are the variables bound by this pattern in the following patterns
- // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
- lazy val subPatBinders = args map {
- case Bound(b, p) => b
- case p => freshSym(p.pos, prefix = "p")
- }
-
- lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
- case (b, Bound(_, p)) => (b, p)
- case bp => bp
- }
-
- // never store these in local variables (for PreserveSubPatBinders)
- lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
- case (b, PatternBoundToUnderscore()) => b
- }.toSet
-
- def subPatTypes: List[Type] =
- if(isSeq) {
- val TypeRef(pre, SeqClass, args) = seqTp
- // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
- val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
-
- if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
- else formalTypes(formalsWithRepeated, nbSubPats)
- } else rawSubPatTypes
-
- protected def rawSubPatTypes: List[Type]
-
- protected def seqTp = rawSubPatTypes.last baseType SeqClass
- protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
- protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
- protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
- protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
- protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
- protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require isSeq
- protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
- val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
- val nbIndexingIndices = indexingIndices.length
-
- // this error-condition has already been checked by checkStarPatOK:
- // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
- // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
- (((1 to firstIndexingBinder) map tupleSel(binder)) ++
- // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map codegen.index(seqTree(binder))) ++
- // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
- (if(!lastIsStar) Nil else List(
- if(nbIndexingIndices == 0) seqTree(binder)
- else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
- }
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
- protected def subPatRefs(binder: Symbol): List[Tree] =
- if (nbSubPats == 0) Nil
- else if (isSeq) subPatRefsSeq(binder)
- else ((1 to nbSubPats) map tupleSel(binder)).toList
-
- protected def lengthGuard(binder: Symbol): Option[Tree] =
- // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- checkedLength map { expectedLength => import CODE._
- // `binder.lengthCompare(expectedLength)`
- def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
-
- // the comparison to perform
- // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
- // (otherwise equality is required)
- def compareOp: (Tree, Tree) => Tree =
- if (lastIsStar) _ INT_>= _
- else _ INT_== _
-
- // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
- (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
- }
-
- def checkedLength: Option[Int] =
- // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || (expectedLength < minLenToCheck)) None
- else Some(expectedLength)
-
- }
-
- // TODO: to be called when there's a def unapplyProd(x: T): U
- // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
- //
- // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
- class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
- // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
- /*override def equals(x$1: Any): Boolean = ...
- val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
- */
- // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
- // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
- // patmatDebug("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
- // patmatDebug("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
- private def constructorTp = fun.tpe
-
- def isTyped = fun.isTyped
-
- // to which type should the previous binder be casted?
- def paramType = constructorTp.finalResultType
-
- def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
- protected def rawSubPatTypes = constructorTp.paramTypes
-
- /** Create the TreeMaker that embodies this extractor call
- *
- * `binder` has been casted to `paramType` if necessary
- * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
- * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
- */
- def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- val paramAccessors = binder.constrParamAccessors
- // binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
- // make an exception for classes under the scala package as they should be well-behaved,
- // to optimize matching on List
- val mutableBinders =
- if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
- (paramAccessors exists (_.isMutable)))
- subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
- else Nil
-
- // checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
- }
-
- // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
- override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
- val accessors = binder.caseFieldAccessors
- if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
- else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
- }
-
- override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
- }
-
- class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
- private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
-
- def tpe = extractorCall.tpe
- def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
- def paramType = tpe.paramTypes.head
- def resultType = tpe.finalResultType
- def isSeq = extractorCall.symbol.name == nme.unapplySeq
-
- /** Create the TreeMaker that embodies this extractor call
- *
- * `binder` has been casted to `paramType` if necessary
- * `binderKnownNonNull` is not used in this subclass
- *
- * TODO: implement review feedback by @retronym:
- * Passing the pair of values around suggests:
- * case class Binder(sym: Symbol, knownNotNull: Boolean).
- * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
- */
- def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
- val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
- }
-
- override protected def seqTree(binder: Symbol): Tree =
- if (firstIndexingBinder == 0) CODE.REF(binder)
- else super.seqTree(binder)
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
- override protected def subPatRefs(binder: Symbol): List[Tree] =
- if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
- else super.subPatRefs(binder)
-
- protected def spliceApply(binder: Symbol): Tree = {
- object splice extends Transformer {
- override def transform(t: Tree) = t match {
- case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
- treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
- case _ => super.transform(t)
- }
- }
- splice.transform(extractorCallIncludingDummy)
- }
-
- // what's the extractor's result type in the monad?
- // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
- protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
- else matchMonadResult(resultType)
- }
-
- protected lazy val rawSubPatTypes =
- if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(!isSeq && nbSubPats == 1) List(resultInMonad)
- else getProductArgs(resultInMonad) match {
- case Nil => List(resultInMonad)
- case x => x
- }
-
- override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
- }
-
- /** A conservative approximation of which patterns do not discern anything.
- * They are discarded during the translation.
- */
- object WildcardPattern {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
- case Star(WildcardPattern()) => true
- case x: Ident => treeInfo.isVarPattern(x)
- case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
- case EmptyTree => true
- case _ => false
- }
- }
-
- object PatternBoundToUnderscore {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
- case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
- case Typed(PatternBoundToUnderscore(), _) => true
- case _ => false
- }
- }
-
- object Bound {
- def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
- case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
- Some((t.symbol, p))
- case _ => None
- }
- }
- }
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// substitution
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait TypedSubstitution extends MatchMonadInterface {
- object Substitution {
- def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
- // requires sameLength(from, to)
- def apply(from: List[Symbol], to: List[Tree]) =
- if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
- }
-
- class Substitution(val from: List[Symbol], val to: List[Tree]) {
- // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
- // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
- def apply(tree: Tree): Tree = {
- // according to -Ystatistics 10% of translateMatch's time is spent in this method...
- // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
- if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
- else (new Transformer {
- private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
- if (origTp == null || origTp == NoType) to
- // important: only type when actually substing and when original tree was typed
- // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
- else typer.typed(to, EXPRmode, WildcardType)
-
- override def transform(tree: Tree): Tree = {
- def subst(from: List[Symbol], to: List[Tree]): Tree =
- if (from.isEmpty) tree
- else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe)
- else subst(from.tail, to.tail)
-
- tree match {
- case Ident(_) => subst(from, to)
- case _ => super.transform(tree)
- }
- }
- }).transform(tree)
- }
-
-
- // the substitution that chains `other` before `this` substitution
- // forall t: Tree. this(other(t)) == (this >> other)(t)
- def >>(other: Substitution): Substitution = {
- val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
- new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
- }
- override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")")
- }
-
- object EmptySubstitution extends Substitution(Nil, Nil) {
- override def apply(tree: Tree): Tree = tree
- override def >>(other: Substitution): Substitution = other
- }
- }
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// the making of the trees
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait TreeMakers extends TypedSubstitution { self: CodegenCore =>
- def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, supression: Suppression): (List[List[TreeMaker]], List[Tree]) =
- (cases, Nil)
-
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] =
- None
-
- // for catch (no need to customize match failure)
- def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
- None
-
- abstract class TreeMaker {
- def pos: Position
-
- /** captures the scope and the value of the bindings in patterns
- * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
- */
- def substitution: Substitution =
- if (currSub eq null) localSubstitution
- else currSub
-
- protected def localSubstitution: Substitution
-
- private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
- if (currSub ne null) {
- patmatDebug("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
- Thread.dumpStack()
- }
- else currSub = outerSubst >> substitution
- }
- private[this] var currSub: Substitution = null
-
- /** The substitution that specifies the trees that compute the values of the subpattern binders.
- *
- * Should not be used to perform actual substitution!
- * Only used to reason symbolically about the values the subpattern binders are bound to.
- * See TreeMakerToCond#updateSubstitution.
- *
- * Overridden in PreserveSubPatBinders to pretend it replaces the subpattern binders by subpattern refs
- * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
- *
- * TODO: clean this up, would be nicer to have some higher-level way to compute
- * the binders bound by this tree maker and the symbolic values that correspond to them
- */
- def subPatternsAsSubstitution: Substitution = substitution
-
- // build Tree that chains `next` after the current extractor
- def chainBefore(next: Tree)(casegen: Casegen): Tree
- }
-
- trait NoNewBinders extends TreeMaker {
- protected val localSubstitution: Substitution = EmptySubstitution
- }
-
- case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
- def pos = tree.pos
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
- }
-
- case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
- def pos = body.pos
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
- atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
- override def toString = "B"+(body, matchPt)
- }
-
- case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
- val pos = NoPosition
-
- val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
- def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
- override def toString = "S"+ localSubstitution
- }
-
- abstract class FunTreeMaker extends TreeMaker {
- val nextBinder: Symbol
- def pos = nextBinder.pos
- }
-
- abstract class CondTreeMaker extends FunTreeMaker {
- val prevBinder: Symbol
- val nextBinderTp: Type
- val cond: Tree
- val res: Tree
-
- lazy val nextBinder = freshSym(pos, nextBinderTp)
- lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree =
- atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
- }
-
- // unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
- protected val debugInfoEmitVars = !settings.optimise.value
-
- trait PreserveSubPatBinders extends TreeMaker {
- val subPatBinders: List[Symbol]
- val subPatRefs: List[Tree]
- val ignoredSubPatBinders: Set[Symbol]
-
- // unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
- // mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
- // sub patterns bound to wildcard (_) are never stored as they can't be referenced
- // dirty debuggers will have to get dirty to see the wildcards
- lazy val storedBinders: Set[Symbol] =
- (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
-
- // e.g., mutable fields of a case class in ProductExtractorTreeMaker
- def extraStoredBinders: Set[Symbol]
-
- def emitVars = storedBinders.nonEmpty
-
- private lazy val (stored, substed) = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) }
-
- protected lazy val localSubstitution: Substitution = if (!emitVars) Substitution(subPatBinders, subPatRefs)
- else {
- val (subPatBindersSubstituted, subPatRefsSubstituted) = substed.unzip
- Substitution(subPatBindersSubstituted.toList, subPatRefsSubstituted.toList)
- }
-
- /** The substitution that specifies the trees that compute the values of the subpattern binders.
- *
- * We pretend to replace the subpattern binders by subpattern refs
- * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
- */
- override def subPatternsAsSubstitution =
- Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
-
- import CODE._
- def bindSubPats(in: Tree): Tree =
- if (!emitVars) in
- else {
- // binders in `subPatBindersStored` that are referenced by tree `in`
- val usedBinders = new collection.mutable.HashSet[Symbol]()
- // all potentially stored subpat binders
- val potentiallyStoredBinders = stored.unzip._1.toSet
- // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
- in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
-
- if (usedBinders.isEmpty) in
- else {
- // only store binders actually used
- val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
- }
- }
- }
-
- /**
- * Make a TreeMaker that will result in an extractor call specified by `extractor`
- * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing
- * a function with binder `nextBinder` over our extractor's result
- * the function's body is determined by the next TreeMaker
- * (furthermore, the interpretation of `flatMap` depends on the codegen instance we're using).
- *
- * The values for the subpatterns, as computed by the extractor call in `extractor`,
- * are stored in local variables that re-use the symbols in `subPatBinders`.
- * This makes extractor patterns more debuggable (SI-5739).
- */
- case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
- val subPatBinders: List[Symbol],
- val subPatRefs: List[Tree],
- extractorReturnsBoolean: Boolean,
- val checkedLength: Option[Int],
- val prevBinder: Symbol,
- val ignoredSubPatBinders: Set[Symbol]
- ) extends FunTreeMaker with PreserveSubPatBinders {
-
- def extraStoredBinders: Set[Symbol] = Set()
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- val condAndNext = extraCond match {
- case Some(cond) =>
- casegen.ifThenElseZero(substitution(cond), bindSubPats(substitution(next)))
- case _ =>
- bindSubPats(substitution(next))
- }
- atPos(extractor.pos)(
- if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, condAndNext)
- else casegen.flatMap(extractor, nextBinder, condAndNext)
- )
- }
-
- override def toString = "X"+(extractor, nextBinder.name)
- }
-
- /**
- * An optimized version of ExtractorTreeMaker for Products.
- * For now, this is hard-coded to case classes, and we simply extract the case class fields.
- *
- * The values for the subpatterns, as specified by the case class fields at the time of extraction,
- * are stored in local variables that re-use the symbols in `subPatBinders`.
- * This makes extractor patterns more debuggable (SI-5739) as well as
- * avoiding mutation after the pattern has been matched (SI-5158, SI-6070)
- *
- * TODO: make this user-definable as follows
- * When a companion object defines a method `def unapply_1(x: T): U_1`, but no `def unapply` or `def unapplySeq`,
- * the extractor is considered to match any non-null value of type T
- * the pattern is expected to have as many sub-patterns as there are `def unapply_I(x: T): U_I` methods,
- * and the type of the I'th sub-pattern is `U_I`.
- * The same exception for Seq patterns applies: if the last extractor is of type `Seq[U_N]`,
- * the pattern must have at least N arguments (exactly N if the last argument is annotated with `: _*`).
- * The arguments starting at N (and beyond) are taken from the sequence returned by apply_N,
- * and it is checked that that sequence has enough elements to provide values for all expected sub-patterns.
- *
- * For a case class C, the implementation is assumed to be `def unapply_I(x: C) = x._I`,
- * and the extractor call is inlined under that assumption.
- */
- case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
- val subPatBinders: List[Symbol],
- val subPatRefs: List[Tree],
- val mutableBinders: List[Symbol],
- binderKnownNonNull: Boolean,
- val ignoredSubPatBinders: Set[Symbol]
- ) extends FunTreeMaker with PreserveSubPatBinders {
-
- import CODE._
- val nextBinder = prevBinder // just passing through
-
- // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
- def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond =
- if (binderKnownNonNull) extraCond
- else (extraCond map (nullCheck AND _)
- orElse Some(nullCheck))
-
- cond match {
- case Some(cond) =>
- casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
- case _ =>
- bindSubPats(substitution(next))
- }
- }
-
- override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
- }
-
- object TypeTestTreeMaker {
- // factored out so that we can consistently generate other representations of the tree that implements the test
- // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
- trait TypeTestCondStrategy {
- type Result
-
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result
- // TODO: can probably always widen
- def typeTest(testedBinder: Symbol, expectedTp: Type): Result
- def nonNullTest(testedBinder: Symbol): Result
- def equalsTest(pat: Tree, testedBinder: Symbol): Result
- def eqTest(pat: Tree, testedBinder: Symbol): Result
- def and(a: Result, b: Result): Result
- def tru: Result
- }
-
- object treeCondStrategy extends TypeTestCondStrategy { import CODE._
- type Result = Tree
-
- def and(a: Result, b: Result): Result = a AND b
- def tru = TRUE_typed
- def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
- def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
- def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
- def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat
-
- def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
- val expectedOuter = expectedTp.prefix match {
- case ThisType(clazz) => THIS(clazz)
- case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
- case _ => TRUE_typed // fallback for SI-6183
- }
-
- // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
- // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
- val outer = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedTp.prefix
-
- (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
- }
- }
-
- object pureTypeTestChecker extends TypeTestCondStrategy {
- type Result = Boolean
-
- def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
-
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
- def nonNullTest(testedBinder: Symbol): Result = false
- def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
- def eqTest(pat: Tree, testedBinder: Symbol): Result = false
- def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
- def tru = true
- }
-
- def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
- type Result = Boolean
-
- def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
- def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
- def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
- def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
- def and(a: Result, b: Result): Result = a || b
- def tru = false
- }
- }
-
- /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
- *
- * Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
- - A reference to a class C, p.C, or T#C.
- This type pattern matches any non-null instance of the given class.
- Note that the prefix of the class, if it is given, is relevant for determining class instances.
- For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
- The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
-
- - A singleton type p.type.
- This type pattern matches only the value denoted by the path p
- (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
- // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
-
- - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
- This type pattern matches all values that are matched by each of the type patterns Ti.
-
- - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
- This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
- The bounds or alias type of these type variable are determined as described in (§8.3).
-
- - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
- This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
- **/
- case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
- import TypeTestTreeMaker._
- patmatDebug("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
-
- lazy val outerTestNeeded = (
- !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
- && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
-
- // the logic to generate the run-time test that follows from the fact that
- // a `prevBinder` is expected to have type `expectedTp`
- // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
- // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
- // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
- def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
- import cs._
-
- def default =
- // do type test first to ensure we won't select outer on null
- if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
- else typeTest(testedBinder, expectedTp)
-
- // propagate expected type
- def expTp(t: Tree): t.type = t setType expectedTp
-
- // true when called to type-test the argument to an extractor
- // don't do any fancy equality checking, just test the type
- if (extractorArgTypeTest) default
- else expectedTp match {
- // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
- // this implies sym.isStable
- case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
- // must use == to support e.g. List() == Nil
- case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
- case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
- => eqTest(expTp(CODE.NULL), testedBinder)
- case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
- case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
-
- // TODO: verify that we don't need to special-case Array
- // I think it's okay:
- // - the isInstanceOf test includes a test for the element type
- // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if testedBinder.info.widen <:< expectedTp =>
- // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
- // since the types conform, no further checking is required
- if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
- // have to test outer and non-null only when it's a reference type
- else if (expectedTp <:< AnyRefClass.tpe) {
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
- } else default
-
- case _ => default
- }
- }
-
- val cond = renderCondition(treeCondStrategy)
- val res = codegen._asInstanceOf(testedBinder, nextBinderTp)
-
- // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
- def isPureTypeTest = renderCondition(pureTypeTestChecker)
-
- def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
-
- override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
- }
-
- // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
- case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, override val pos: Position) extends CondTreeMaker {
- val nextBinderTp = prevBinder.info.widen
-
- // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
- // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
- val cond = codegen._equals(patTree, prevBinder)
- val res = CODE.REF(prevBinder)
- override def toString = "ET"+(prevBinder.name, patTree)
- }
-
- case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
- // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
-
- override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
- super.incorporateOuterSubstitution(outerSubst)
- altss = altss map (alts => propagateSubstitution(alts, substitution))
- }
-
- def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
- atPos(pos){
- // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
- // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
- val combinedAlts = altss map (altTreeMakers =>
- ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE_typed)))(casegen))
- )
-
- val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE_typed))
- codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
- }
- }
- }
-
- case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
- val pos = guardTree.pos
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
- override def toString = "G("+ guardTree +")"
- }
-
- // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
- // requires propagateSubstitution(treeMakers) has been called
- def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
- treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
-
-
- def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
-
- // a foldLeft to accumulate the localSubstitution left-to-right
- // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
- def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
- var accumSubst: Substitution = initial
- treeMakers foreach { maker =>
- maker incorporateOuterSubstitution accumSubst
- accumSubst = maker.substitution
- }
- removeSubstOnly(treeMakers)
- }
-
- // calls propagateSubstitution on the treemakers
- def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
- // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
- val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
- combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
- }
-
- // pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
- def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
- fixerUpper(owner, scrut.pos){
- def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
- patmatDebug("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
-
- val (suppression, requireSwitch): (Suppression, Boolean) =
- if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppresion, false)
- else scrut match {
- case Typed(tree, tpt) =>
- val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
- val supressUnreachable = tree match {
- case Ident(name) if name startsWith nme.CHECK_IF_REFUTABLE_STRING => true // SI-7183 don't warn for withFilter's that turn out to be irrefutable.
- case _ => false
- }
- val suppression = Suppression(suppressExhaustive, supressUnreachable)
- // matches with two or fewer cases need not apply for switchiness (if-then-else will do)
- val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0
- (suppression, requireSwitch)
- case _ =>
- (Suppression.NoSuppresion, false)
- }
-
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{
- if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
-
- if (casesNoSubstOnly nonEmpty) {
- // before optimizing, check casesNoSubstOnly for presence of a default case,
- // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
- // exhaustivity and reachability must be checked before optimization as well
- // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
- // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
- // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
- val synthCatchAll =
- if (casesNoSubstOnly.nonEmpty && {
- val nonTrivLast = casesNoSubstOnly.last
- nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
- }) None
- else matchFailGen
-
- val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt, suppression)
-
- val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
-
- if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
- } else {
- codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
- }
- }
- }
-
- // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
- // TODO: assign more fine-grained positions
- // fixes symbol nesting, assigns positions
- protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
- currentOwner = origOwner
-
- override def traverse(t: Tree) {
- if (t != EmptyTree && t.pos == NoPosition) {
- t.setPos(pos)
- }
- t match {
- case Function(_, _) if t.symbol == NoSymbol =>
- t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
- patmatDebug("new symbol for "+ (t, t.symbol.ownerChain))
- case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- patmatDebug("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
- t.symbol.owner = currentOwner
- case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- patmatDebug("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
-
- d.symbol.owner = currentOwner
- // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- patmatDebug("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
- case _ =>
- }
- super.traverse(t)
- }
-
- // override def apply
- // patmatDebug("before fixerupper: "+ xTree)
- // currentRun.trackerFactory.snapshot()
- // patmatDebug("after fixerupper")
- // currentRun.trackerFactory.snapshot()
- }
- }
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// generate actual trees
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait CodegenCore extends MatchMonadInterface {
- private var ctr = 0
- def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
-
- // assert(owner ne null); assert(owner ne NoSymbol)
- def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
- NoSymbol.newTermSymbol(freshName(prefix), pos, newFlags = SYNTHETIC) setInfo tp
-
- def newSynthCaseLabel(name: String) =
- NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS
-
- // codegen relevant to the structure of the translation (how extractors are combined)
- trait AbsCodegen {
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
-
- // local / context-free
- def _asInstanceOf(b: Symbol, tp: Type): Tree
- def _asInstanceOf(t: Tree, tp: Type): Tree
- def _equals(checker: Tree, binder: Symbol): Tree
- def _isInstanceOf(b: Symbol, tp: Type): Tree
- def and(a: Tree, b: Tree): Tree
- def drop(tgt: Tree)(n: Int): Tree
- def index(tgt: Tree)(i: Int): Tree
- def mkZero(tp: Type): Tree
- def tupleSel(binder: Symbol)(i: Int): Tree
- }
-
- // structure
- trait Casegen extends AbsCodegen { import CODE._
- def one(res: Tree): Tree
-
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
- def flatMapGuard(cond: Tree, next: Tree): Tree
- def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
- protected def zero: Tree
- }
-
- def codegen: AbsCodegen
-
- def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt))
-
- // we use subtyping as a model for implication between instanceof tests
- // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
- // unfortunately this is not true in general:
- // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
- def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
- val tpValue = tp.typeSymbol.isPrimitiveValueClass
-
- // pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
- // (and the subtype is respectively a value type or not a value type)
- // this allows us to reuse subtyping as a model for implication between instanceOf tests
- // the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
- val tpImpliedNormalizedToAny =
- if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
- else tpImplied
-
- tp <:< tpImpliedNormalizedToAny
- }
-
- abstract class CommonCodegen extends AbsCodegen { import CODE._
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
- def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
- def and(a: Tree, b: Tree): Tree = a AND b
-
- // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
- def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else gen.mkCastPreservingAnnotations(t, tp)
- def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
- // if (typesConform(b.info, tpX)) { patmatDebug("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
-
- // duplicated out of frustration with cast generation
- def mkZero(tp: Type): Tree = {
- tp.typeSymbol match {
- case UnitClass => Literal(Constant())
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
- }
- }
- }
- }
-
- trait PureMatchMonadInterface extends MatchMonadInterface {
- val matchStrategy: Tree
-
- def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
- def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
- protected def matchMonadSym = oneSig.finalResultType.typeSymbol
-
- import CODE._
- def _match(n: Name): SelectStart = matchStrategy DOT n
-
- private lazy val oneSig: Type =
- typer.typed(_match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
- }
-
- trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
- def codegen: AbsCodegen = pureCodegen
-
- object pureCodegen extends CommonCodegen with Casegen { import CODE._
- //// methods in MatchingStrategy (the monad companion) -- used directly in translation
- // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
- // TODO: consider catchAll, or virtualized matching will break in exception handlers
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
- _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
-
- // __match.one(`res`)
- def one(res: Tree): Tree = (_match(vpmName.one)) (res)
- // __match.zero
- protected def zero: Tree = _match(vpmName.zero)
- // __match.guard(`c`, `then`)
- def guard(c: Tree, then: Tree): Tree = _match(vpmName.guard) APPLY (c, then)
-
- //// methods in the monad instance -- used directly in translation
- // `prev`.flatMap(`b` => `next`)
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
- // `thisCase`.orElse(`elseCase`)
- def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
- // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
- // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
- }
- }
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// OPTIMIZATIONS
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// decisions, decisions
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- trait TreeMakerApproximation extends TreeMakers with Prettification{ self: CodegenCore =>
- object Test {
- var currId = 0
- }
- case class Test(cond: Cond, treeMaker: TreeMaker) {
- // private val reusedBy = new scala.collection.mutable.HashSet[Test]
- var reuses: Option[Test] = None
- def registerReuseBy(later: Test): Unit = {
- assert(later.reuses.isEmpty, later.reuses)
- // reusedBy += later
- later.reuses = Some(this)
- }
-
- val id = { Test.currId += 1; Test.currId}
- override def toString =
- "T"+ id + "C("+ cond +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
- }
-
- // TODO: remove Cond, replace by Prop from Logic
- object Cond {
- var currId = 0
- }
-
- abstract class Cond {
- val id = { Cond.currId += 1; Cond.currId}
- }
-
- case object TrueCond extends Cond {override def toString = "T"}
- case object FalseCond extends Cond {override def toString = "F"}
-
- case class AndCond(a: Cond, b: Cond) extends Cond {override def toString = a +"/\\"+ b}
- case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
-
- object EqualityCond {
- private val uniques = new scala.collection.mutable.HashMap[(Tree, Tree), EqualityCond]
- def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
- def unapply(c: EqualityCond) = Some((c.testedPath, c.rhs))
- }
- class EqualityCond(val testedPath: Tree, val rhs: Tree) extends Cond {
- override def toString = testedPath +" == "+ rhs +"#"+ id
- }
-
- object NonNullCond {
- private val uniques = new scala.collection.mutable.HashMap[Tree, NonNullCond]
- def apply(testedPath: Tree): NonNullCond = uniques getOrElseUpdate(testedPath, new NonNullCond(testedPath))
- def unapply(c: NonNullCond) = Some(c.testedPath)
- }
- class NonNullCond(val testedPath: Tree) extends Cond {
- override def toString = testedPath +" ne null " +"#"+ id
- }
-
- object TypeCond {
- private val uniques = new scala.collection.mutable.HashMap[(Tree, Type), TypeCond]
- def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
- def unapply(c: TypeCond) = Some((c.testedPath, c.pt))
- }
- class TypeCond(val testedPath: Tree, val pt: Type) extends Cond {
- override def toString = testedPath +" : "+ pt +"#"+ id
- }
-
-// class OuterEqCond(val testedPath: Tree, val expectedType: Type) extends Cond {
-// val expectedOuter = expectedTp.prefix match {
-// case ThisType(clazz) => THIS(clazz)
-// case pre => REF(pre.prefix, pre.termSymbol)
-// }
-//
-// // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
-// // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
-// val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
-//
-// (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
-// }
-
- // TODO: improve, e.g., for constants
- def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
- case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
- case _ => false
- })
-
- object IrrefutableExtractorTreeMaker {
- // will an extractor with unapply method of methodtype `tp` always succeed?
- // note: this assumes the other side-conditions implied by the extractor are met
- // (argument of the right type, length check succeeds for unapplySeq,...)
- def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
- case TypeRef(_, SomeClass, _) => true
- // probably not useful since this type won't be inferred nor can it be written down (yet)
- case ConstantType(Constant(true)) => true
- case _ => false
- }
-
- def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
- case ExtractorTreeMaker(extractor, None, nextBinder) if irrefutableExtractorType(extractor.tpe) =>
- Some((extractor, nextBinder))
- case _ =>
- None
- }
- }
-
- // returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
- class TreeMakersToConds(val root: Symbol) {
- // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
- private val pointsToBound = scala.collection.mutable.HashSet(root)
- private val trees = scala.collection.mutable.HashSet.empty[Tree]
-
- // the substitution that renames variables to variables in pointsToBound
- private var normalize: Substitution = EmptySubstitution
- private var substitutionComputed = false
-
- // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
- // in the end, instead of having x1, x1.hd, x2, x2.hd, ... flying around,
- // we want something like x1, x1.hd, x1.hd.tl, x1.hd.tl.hd, so that we can easily recognize when
- // we're testing the same variable
- // TODO check:
- // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
- private var accumSubst: Substitution = EmptySubstitution
-
- // hashconsing trees (modulo value-equality)
- def unique(t: Tree, tpOverride: Type = NoType): Tree =
- trees find (a => a.correspondsStructure(t)(sameValue)) match {
- case Some(orig) =>
- // patmatDebug("unique: "+ (t eq orig, orig))
- orig
- case _ =>
- trees += t
- if (tpOverride != NoType) t setType tpOverride
- else t
- }
-
- def uniqueTp(tp: Type): Type = tp match {
- // typerefs etc are already hashconsed
- case _ : UniqueType => tp
- case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
- case _ => tp
- }
-
- // produce the unique tree used to refer to this binder
- // the type of the binder passed to the first invocation
- // determines the type of the tree that'll be returned for that binder as of then
- final def binderToUniqueTree(b: Symbol) =
- unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
-
- def /\(conds: Iterable[Cond]) = if (conds.isEmpty) TrueCond else conds.reduceLeft(AndCond(_, _))
- def \/(conds: Iterable[Cond]) = if (conds.isEmpty) FalseCond else conds.reduceLeft(OrCond(_, _))
-
- // note that the sequencing of operations is important: must visit in same order as match execution
- // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
- abstract class TreeMakerToCond extends (TreeMaker => Cond) {
- // requires(if (!substitutionComputed))
- def updateSubstitution(subst: Substitution): Unit = {
- // find part of substitution that replaces bound symbols by new symbols, and reverse that part
- // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
- val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
- case (f, t) =>
- t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
- }
- val (boundFrom, boundTo) = boundSubst.unzip
- val (unboundFrom, unboundTo) = unboundSubst.unzip
-
- // reverse substitution that would otherwise replace a variable we already encountered by a new variable
- // NOTE: this forgets the more precise type we have for these later variables, but that's probably okay
- normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
- // patmatDebug ("normalize subst: "+ normalize)
-
- val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
- pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
- // patmatDebug("pointsToBound: "+ pointsToBound)
-
- accumSubst >>= okSubst
- // patmatDebug("accumSubst: "+ accumSubst)
- }
-
- def handleUnknown(tm: TreeMaker): Cond
-
- /** apply itself must render a faithful representation of the TreeMaker
- *
- * Concretely, TrueCond must only be used to represent a TreeMaker that is sure to match and that does not do any computation at all
- * e.g., doCSE relies on apply itself being sound in this sense (since it drops TreeMakers that are approximated to TrueCond -- SI-6077)
- *
- * handleUnknown may be customized by the caller to approximate further
- *
- * TODO: don't ignore outer-checks
- */
- def apply(tm: TreeMaker): Cond = {
- if (!substitutionComputed) updateSubstitution(tm.subPatternsAsSubstitution)
-
- tm match {
- case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
- object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
- type Result = Cond
- def and(a: Result, b: Result) = AndCond(a, b)
- def outerTest(testedBinder: Symbol, expectedTp: Type) = TrueCond // TODO OuterEqCond(testedBinder, expectedType)
- def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
- val p = binderToUniqueTree(b); AndCond(NonNullCond(p), TypeCond(p, uniqueTp(pt)))
- }
- def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
- def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
- def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
- def tru = TrueCond
- }
- ttm.renderCondition(condStrategy)
- case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
- case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map this)))
- case ProductExtractorTreeMaker(testedBinder, None) => NonNullCond(binderToUniqueTree(testedBinder))
- case SubstOnlyTreeMaker(_, _) => TrueCond
- case GuardTreeMaker(guard) =>
- guard.tpe match {
- case ConstantType(Constant(true)) => TrueCond
- case ConstantType(Constant(false)) => FalseCond
- case _ => handleUnknown(tm)
- }
- case ExtractorTreeMaker(_, _, _) |
- ProductExtractorTreeMaker(_, _) |
- BodyTreeMaker(_, _) => handleUnknown(tm)
- }
- }
- }
-
-
- private val irrefutableExtractor: PartialFunction[TreeMaker, Cond] = {
- // the extra condition is None, the extractor's result indicates it always succeeds,
- // (the potential type-test for the argument is represented by a separate TypeTestTreeMaker)
- case IrrefutableExtractorTreeMaker(_, _) => TrueCond
- }
-
- // special-case: interpret pattern `List()` as `Nil`
- // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil -- not sure this is a good idea...
- private val rewriteListPattern: PartialFunction[TreeMaker, Cond] = {
- case p @ ExtractorTreeMaker(_, _, testedBinder)
- if testedBinder.tpe.typeSymbol == ListClass && p.checkedLength == Some(0) =>
- EqualityCond(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
- }
- val fullRewrite = (irrefutableExtractor orElse rewriteListPattern)
- val refutableRewrite = irrefutableExtractor
-
- @inline def onUnknown(handler: TreeMaker => Cond) = new TreeMakerToCond {
- def handleUnknown(tm: TreeMaker) = handler(tm)
- }
-
- // used for CSE -- rewrite all unknowns to False (the most conserative option)
- object conservative extends TreeMakerToCond {
- def handleUnknown(tm: TreeMaker) = FalseCond
- }
-
- final def approximateMatch(cases: List[List[TreeMaker]], treeMakerToCond: TreeMakerToCond = conservative) ={
- val testss = cases.map { _ map (tm => Test(treeMakerToCond(tm), tm)) }
- substitutionComputed = true // a second call to approximateMatch should not re-compute the substitution (would be wrong)
- testss
- }
- }
-
- def approximateMatchConservative(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] =
- (new TreeMakersToConds(root)).approximateMatch(cases)
-
- def showTreeMakers(cases: List[List[TreeMaker]]) = {
- patmatDebug("treeMakers:")
- patmatDebug(alignAcrossRows(cases, ">>"))
- }
-
- def showTests(testss: List[List[Test]]) = {
- patmatDebug("tests: ")
- patmatDebug(alignAcrossRows(testss, "&"))
- }
- }
-
- trait Prettification {
- private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
-
- def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
- def toString(x: AnyRef) = if (x eq null) "" else x.toString
- if (cols.isEmpty || cols.tails.isEmpty) cols map toString
- else {
- val (colStrs, colLens) = cols map {c => val s = toString(c); (s, s.length)} unzip
- val maxLen = max(colLens)
- val avgLen = colLens.sum/colLens.length
- val goalLen = maxLen min avgLen*2
- def pad(s: String) = {
- val toAdd = ((goalLen - s.length) max 0) + 2
- (" " * (toAdd/2)) + s + (" " * (toAdd/2 + (toAdd%2)))
- }
- cols map (x => pad(toString(x)))
- }
- }
- def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
- val maxLen = max(xss map (_.length))
- val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
- padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
- }
- }
-
- // http://www.cis.upenn.edu/~cis510/tcl/chap3.pdf
- // http://users.encs.concordia.ca/~ta_ahmed/ms_thesis.pdf
- trait Logic extends Prettification {
- class Prop
- case class Eq(p: Var, q: Const) extends Prop
-
- type Const
-
- type TypeConst <: Const
- def TypeConst: TypeConstExtractor
- trait TypeConstExtractor {
- def apply(tp: Type): Const
- }
-
- def NullConst: Const
-
- type Var <: AbsVar
-
- trait AbsVar {
- // indicate we may later require a prop for V = C
- def registerEquality(c: Const): Unit
-
- // call this to indicate null is part of the domain
- def registerNull(): Unit
-
- // can this variable be null?
- def mayBeNull: Boolean
-
- // compute the domain and return it (call registerNull first!)
- def domainSyms: Option[Set[Sym]]
-
- // the symbol for this variable being equal to its statically known type
- // (only available if registerEquality has been called for that type before)
- def symForStaticTp: Option[Sym]
-
- // for this var, call it V, turn V = C into the equivalent proposition in boolean logic
- // registerEquality(c) must have been called prior to this call
- // in fact, all equalities relevant to this variable must have been registered
- def propForEqualsTo(c: Const): Prop
-
- // populated by registerEquality
- // once implications has been called, must not call registerEquality anymore
- def implications: List[(Sym, List[Sym], List[Sym])]
- }
-
- // would be nice to statically check whether a prop is equational or pure,
- // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
- case class And(a: Prop, b: Prop) extends Prop
- case class Or(a: Prop, b: Prop) extends Prop
- case class Not(a: Prop) extends Prop
-
- case object True extends Prop
- case object False extends Prop
-
- // symbols are propositions
- abstract case class Sym(val variable: Var, val const: Const) extends Prop {
- private[this] val id = Sym.nextSymId
-
- override def toString = variable +"="+ const +"#"+ id
- }
- class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
- object Sym {
- private val uniques: util.HashSet[Sym] = new util.HashSet("uniques", 512)
- def apply(variable: Var, const: Const): Sym = {
- val newSym = new UniqueSym(variable, const)
- (uniques findEntryOrUpdate newSym)
- }
- private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
- }
-
- def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
- def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
-
- trait PropTraverser {
- def apply(x: Prop): Unit = x match {
- case And(a, b) => apply(a); apply(b)
- case Or(a, b) => apply(a); apply(b)
- case Not(a) => apply(a)
- case Eq(a, b) => applyVar(a); applyConst(b)
- case _ =>
- }
- def applyVar(x: Var): Unit = {}
- def applyConst(x: Const): Unit = {}
- }
-
- def gatherVariables(p: Prop): Set[Var] = {
- val vars = new HashSet[Var]()
- (new PropTraverser {
- override def applyVar(v: Var) = vars += v
- })(p)
- vars.toSet
- }
-
- trait PropMap {
- def apply(x: Prop): Prop = x match { // TODO: mapConserve
- case And(a, b) => And(apply(a), apply(b))
- case Or(a, b) => Or(apply(a), apply(b))
- case Not(a) => Not(apply(a))
- case p => p
- }
- }
-
- // convert finite domain propositional logic with subtyping to pure boolean propositional logic
- // a type test or a value equality test are modelled as a variable being equal to some constant
- // a variable V may be assigned multiple constants, as long as they do not contradict each other
- // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments
- // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain
- // in a prelude (the equality axioms)
- // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain
- // 2. for each variable V in props, and each constant C it is compared to,
- // compute which assignments imply each other (as in the example above: V = 1 implies V = Int)
- // and which assignments are mutually exclusive (V = String implies -(V = Int))
- //
- // note that this is a conservative approximation: V = Constant(A) and V = Constant(B)
- // are considered mutually exclusive (and thus both cases are considered reachable in {case A => case B =>}),
- // even though A may be equal to B (and thus the second case is not "dynamically reachable")
- //
- // TODO: for V1 representing x1 and V2 standing for x1.head, encode that
- // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
- // may throw an AnalysisBudget.Exception
- def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = {
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
-
- val vars = new scala.collection.mutable.HashSet[Var]
-
- object gatherEqualities extends PropTraverser {
- override def apply(p: Prop) = p match {
- case Eq(v, c) =>
- vars += v
- v.registerEquality(c)
- case _ => super.apply(p)
- }
- }
-
- object rewriteEqualsToProp extends PropMap {
- override def apply(p: Prop) = p match {
- case Eq(v, c) => v.propForEqualsTo(c)
- case _ => super.apply(p)
- }
- }
-
- props foreach gatherEqualities.apply
- if (modelNull) vars foreach (_.registerNull)
-
- val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
-
- val eqAxioms = formulaBuilder
- @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p))
-
- patmatDebug("removeVarEq vars: "+ vars)
- vars.foreach { v =>
- // if v.domainSyms.isEmpty, we must consider the domain to be infinite
- // otherwise, since the domain fully partitions the type of the value,
- // exactly one of the types (and whatever it implies, imposed separately) must be chosen
- // consider X ::= A | B | C, and A => B
- // coverage is formulated as: A \/ B \/ C and the implications are
- v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) }
-
- // when this variable cannot be null the equality corresponding to the type test `(x: T)`, where T is x's static type,
- // is always true; when the variable may be null we use the implication `(x != null) => (x: T)` for the axiom
- v.symForStaticTp foreach { symForStaticTp =>
- if (v.mayBeNull) addAxiom(Or(v.propForEqualsTo(NullConst), symForStaticTp))
- else addAxiom(symForStaticTp)
- }
-
- v.implications foreach { case (sym, implied, excluded) =>
- // when sym is true, what must hold...
- implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
- // ... and what must not?
- excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym))))
- }
- }
-
- patmatDebug("eqAxioms:\n"+ cnfString(toFormula(eqAxioms)))
- patmatDebug("pure:"+ pure.map(p => cnfString(p)).mkString("\n"))
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
-
- (toFormula(eqAxioms), pure)
- }
-
-
- // an interface that should be suitable for feeding a SAT solver when the time comes
- type Formula
- type FormulaBuilder
-
- // creates an empty formula builder to which more formulae can be added
- def formulaBuilder: FormulaBuilder
-
- // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN)
- // toFormula(f) == andFormula(f1, andFormula(..., fN))
- def addFormula(buff: FormulaBuilder, f: Formula): Unit
- def toFormula(buff: FormulaBuilder): Formula
-
- // the conjunction of formulae `a` and `b`
- def andFormula(a: Formula, b: Formula): Formula
-
- // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses)
- def simplifyFormula(a: Formula): Formula
-
- // may throw an AnalysisBudget.Exception
- def propToSolvable(p: Prop): Formula = {
- val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
- andFormula(eqAxioms, pure)
- }
-
- // may throw an AnalysisBudget.Exception
- def eqFreePropToSolvable(p: Prop): Formula
- def cnfString(f: Formula): String
-
- type Model = Map[Sym, Boolean]
- val EmptyModel: Model
- val NoModel: Model
-
- def findModelFor(f: Formula): Model
- def findAllModelsFor(f: Formula): List[Model]
- }
-
- trait CNF extends Logic {
- /** Override Array creation for efficiency (to not go through reflection). */
- private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
- def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
- final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
- }
-
- import scala.collection.mutable.ArrayBuffer
- type FormulaBuilder = ArrayBuffer[Clause]
- def formulaBuilder = ArrayBuffer[Clause]()
- def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
- def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
- def toFormula(buff: FormulaBuilder): Formula = buff
-
- // CNF: a formula is a conjunction of clauses
- type Formula = FormulaBuilder
- def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
-
- type Clause = Set[Lit]
- // a clause is a disjunction of distinct literals
- def clause(l: Lit*): Clause = l.toSet
-
- type Lit
- def Lit(sym: Sym, pos: Boolean = true): Lit
-
- def andFormula(a: Formula, b: Formula): Formula = a ++ b
- def simplifyFormula(a: Formula): Formula = a.distinct
-
- private def merge(a: Clause, b: Clause) = a ++ b
-
- // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
- // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
- def eqFreePropToSolvable(p: Prop): Formula = {
- def negationNormalFormNot(p: Prop, budget: Int = AnalysisBudget.max): Prop =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else p match {
- case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
- case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
- case Not(p) => negationNormalForm(p, budget - 1)
- case True => False
- case False => True
- case s: Sym => Not(s)
- }
-
- def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else p match {
- case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
- case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
- case Not(negated) => negationNormalFormNot(negated, budget - 1)
- case True
- | False
- | (_ : Sym) => p
- }
-
- val TrueF = formula()
- val FalseF = formula(clause())
- def lit(s: Sym) = formula(clause(Lit(s)))
- def negLit(s: Sym) = formula(clause(Lit(s, false)))
-
- def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
- def distribute(a: Formula, b: Formula, budget: Int): Formula =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else
- (a, b) match {
- // true \/ _ = true
- // _ \/ true = true
- case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
- // lit \/ lit
- case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
- // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
- // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
- case (cs, ds) =>
- val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
- big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size)))
- }
-
- if (budget <= 0) throw AnalysisBudget.exceeded
-
- p match {
- case True => TrueF
- case False => FalseF
- case s: Sym => lit(s)
- case Not(s: Sym) => negLit(s)
- case And(a, b) =>
- val cnfA = conjunctiveNormalForm(a, budget - 1)
- val cnfB = conjunctiveNormalForm(b, budget - cnfA.size)
- cnfA ++ cnfB
- case Or(a, b) =>
- val cnfA = conjunctiveNormalForm(a)
- val cnfB = conjunctiveNormalForm(b)
- distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size))
- }
- }
-
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null
- val res = conjunctiveNormalForm(negationNormalForm(p))
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start)
-
- //
- if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1
-
-// patmatDebug("cnf for\n"+ p +"\nis:\n"+cnfString(res))
- res
- }
-
- }
-
- trait DPLLSolver extends CNF {
- // a literal is a (possibly negated) variable
- def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos)
- class Lit(val sym: Sym, val pos: Boolean) {
- override def toString = if (!pos) "-"+ sym.toString else sym.toString
- override def equals(o: Any) = o match {
- case o: Lit => (o.sym eq sym) && (o.pos == pos)
- case _ => false
- }
- override def hashCode = sym.hashCode + pos.hashCode
-
- def unary_- = Lit(sym, !pos)
- }
-
- def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
-
- // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
- val EmptyModel = Map.empty[Sym, Boolean]
- val NoModel: Model = null
-
- // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
- def findAllModelsFor(f: Formula): List[Model] = {
- val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet
- // patmatDebug("vars "+ vars)
- // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
- def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
-
- def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
- if (recursionDepthAllowed == 0) models
- else {
- patmatDebug("find all models for\n"+ cnfString(f))
- val model = findModelFor(f)
- // if we found a solution, conjunct the formula with the model's negation and recurse
- if (model ne NoModel) {
- val unassigned = (vars -- model.keySet).toList
- patmatDebug("unassigned "+ unassigned +" in "+ model)
- def force(lit: Lit) = {
- val model = withLit(findModelFor(dropUnit(f, lit)), lit)
- if (model ne NoModel) List(model)
- else Nil
- }
- val forced = unassigned flatMap { s =>
- force(Lit(s, true)) ++ force(Lit(s, false))
- }
- patmatDebug("forced "+ forced)
- val negated = negateModel(model)
- findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
- }
- else models
- }
-
- findAllModels(f, Nil)
- }
-
- private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- private def dropUnit(f: Formula, unitLit: Lit): Formula = {
- val negated = -unitLit
- // drop entire clauses that are trivially true
- // (i.e., disjunctions that contain the literal we're making true in the returned model),
- // and simplify clauses by dropping the negation of the literal we're making true
- // (since False \/ X == X)
- val dropped = formulaBuilderSized(f.size)
- for {
- clause <- f
- if !(clause contains unitLit)
- } dropped += (clause - negated)
- dropped
- }
-
- def findModelFor(f: Formula): Model = {
- @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
-
- patmatDebug("DPLL\n"+ cnfString(f))
-
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null
-
- val satisfiableWithModel: Model =
- if (f isEmpty) EmptyModel
- else if(f exists (_.isEmpty)) NoModel
- else f.find(_.size == 1) match {
- case Some(unitClause) =>
- val unitLit = unitClause.head
- // patmatDebug("unit: "+ unitLit)
- withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
- case _ =>
- // partition symbols according to whether they appear in positive and/or negative literals
- val pos = new HashSet[Sym]()
- val neg = new HashSet[Sym]()
- f.foreach{_.foreach{ lit =>
- if (lit.pos) pos += lit.sym else neg += lit.sym
- }}
- // appearing in both positive and negative
- val impures = pos intersect neg
- // appearing only in either positive/negative positions
- val pures = (pos ++ neg) -- impures
-
- if (pures nonEmpty) {
- val pureSym = pures.head
- // turn it back into a literal
- // (since equality on literals is in terms of equality
- // of the underlying symbol and its positivity, simply construct a new Lit)
- val pureLit = Lit(pureSym, pos(pureSym))
- // patmatDebug("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
- val simplified = f.filterNot(_.contains(pureLit))
- withLit(findModelFor(simplified), pureLit)
- } else {
- val split = f.head.head
- // patmatDebug("split: "+ split)
- orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split)))
- }
- }
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
-
- satisfiableWithModel
- }
- }
-
-
- /**
- * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
- *
- */
- trait SymbolicMatchAnalysis extends TreeMakerApproximation with Logic { self: CodegenCore =>
- def prepareNewAnalysis() = { Var.resetUniques(); Const.resetUniques() }
-
- object Var {
- private var _nextId = 0
- def nextId = {_nextId += 1; _nextId}
-
- def resetUniques() = {_nextId = 0; uniques.clear()}
- private val uniques = new scala.collection.mutable.HashMap[Tree, Var]
- def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
- }
- class Var(val path: Tree, staticTp: Type) extends AbsVar {
- private[this] val id: Int = Var.nextId
-
- // private[this] var canModify: Option[Array[StackTraceElement]] = None
- private[this] def ensureCanModify = {} //if (canModify.nonEmpty) patmatDebug("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n"))
-
- private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace)
-
- // don't access until all potential equalities have been registered using registerEquality
- private[this] val symForEqualsTo = new scala.collection.mutable.HashMap[Const, Sym]
-
- // when looking at the domain, we only care about types we can check at run time
- val staticTpCheckable: Type = checkableType(staticTp)
-
- private[this] var _mayBeNull = false
- def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
- def mayBeNull: Boolean = _mayBeNull
-
- // case None => domain is unknown,
- // case Some(List(tps: _*)) => domain is exactly tps
- // we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
- // once we go to run-time checks (on Const's), convert them to checkable types
- // TODO: there seems to be bug for singleton domains (variable does not show up in model)
- lazy val domain: Option[Set[Const]] = {
- val subConsts = enumerateSubtypes(staticTp).map{ tps =>
- tps.toSet[Type].map{ tp =>
- val domainC = TypeConst(tp)
- registerEquality(domainC)
- domainC
- }
- }
-
- val allConsts =
- if (mayBeNull) {
- registerEquality(NullConst)
- subConsts map (_ + NullConst)
- } else
- subConsts
-
- observed; allConsts
- }
-
- // populate equalitySyms
- // don't care about the result, but want only one fresh symbol per distinct constant c
- def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
-
- // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
- // (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
- def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
-
- // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
- /** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
- *
- * that models a type test pattern `_: C` or constant pattern `C`, where the type test gives rise to a TypeConst C,
- * and the constant pattern yields a ValueConst C
- *
- * for exhaustivity, we really only need implication (e.g., V = 1 implies that V = 1 /\ V = Int, if both tests occur in the match,
- * and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
- */
- lazy val implications = {
- /** when we know V = C, which other equalities must hold
- *
- * in general, equality to some type implies equality to its supertypes
- * (this multi-valued kind of equality is necessary for unreachability)
- * note that we use subtyping as a model for implication between instanceof tests
- * i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
- * unfortunately this is not true in general (see e.g. SI-6022)
- */
- def implies(lower: Const, upper: Const): Boolean =
- // values and null
- lower == upper ||
- // type implication
- (lower != NullConst && !upper.isValue &&
- instanceOfTpImplies(if (lower.isValue) lower.wideTp else lower.tp, upper.tp))
-
- // if(r) patmatDebug("implies : "+(lower, lower.tp, upper, upper.tp))
- // else patmatDebug("NOT implies: "+(lower, upper))
-
-
- /** does V = C preclude V having value `other`?
- (1) V = null is an exclusive assignment,
- (2) V = A and V = B, for A and B value constants, are mutually exclusive unless A == B
- we err on the safe side, for example:
- - assume `val X = 1; val Y = 1`, then
- (2: Int) match { case X => case Y => <falsely considered reachable> }
- - V = 1 does not preclude V = Int, or V = Any, it could be said to preclude V = String, but we don't model that
-
- (3) for types we could try to do something fancy, but be conservative and just say no
- */
- def excludes(a: Const, b: Const): Boolean =
- a != b && ((a == NullConst || b == NullConst) || (a.isValue && b.isValue))
-
- // if(r) patmatDebug("excludes : "+(a, a.tp, b, b.tp))
- // else patmatDebug("NOT excludes: "+(a, b))
-
-/*
-[ HALF BAKED FANCINESS: //!equalitySyms.exists(common => implies(common.const, a) && implies(common.const, b)))
- when type tests are involved, we reason (conservatively) under a closed world assumption,
- since we are really only trying to counter the effects of the symbols that we introduce to model type tests
- we don't aim to model the whole subtyping hierarchy, simply to encode enough about subtyping to do unreachability properly
-
- consider the following hierarchy:
-
- trait A
- trait B
- trait C
- trait AB extends B with A
-
- // two types are mutually exclusive if there is no equality symbol whose constant implies both
- object Test extends App {
- def foo(x: Any) = x match {
- case _ : C => println("C")
- case _ : AB => println("AB")
- case _ : (A with B) => println("AB'")
- case _ : B => println("B")
- case _ : A => println("A")
- }
-
- of course this kind of reasoning is not true in general,
- but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing}
-*/
-
- val excludedPair = new scala.collection.mutable.HashSet[ExcludedPair]
-
- case class ExcludedPair(a: Const, b: Const) {
- override def equals(o: Any) = o match {
- case ExcludedPair(aa, bb) => (a == aa && b == bb) || (a == bb && b == aa)
- case _ => false
- }
- // make ExcludedPair(a, b).hashCode == ExcludedPair(b, a).hashCode
- override def hashCode = a.hashCode ^ b.hashCode
- }
-
- equalitySyms map { sym =>
- // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
- // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
- val todo = equalitySyms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
- val (excluded, notExcluded) = todo partition (b => excludes(sym.const, b.const))
- val implied = notExcluded filter (b => implies(sym.const, b.const))
-
- patmatDebug("eq axioms for: "+ sym.const)
- patmatDebug("excluded: "+ excluded)
- patmatDebug("implied: "+ implied)
-
- excluded foreach { excludedSym => excludedPair += ExcludedPair(sym.const, excludedSym.const)}
-
- (sym, implied, excluded)
- }
- }
-
- // accessing after calling registerNull will result in inconsistencies
- lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
-
- lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
-
- // don't access until all potential equalities have been registered using registerEquality
- private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
-
- // don't call until all equalities have been registered and registerNull has been called (if needed)
- def describe = toString + ": " + staticTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path
- override def toString = "V"+ id
- }
-
-
- // all our variables range over types
- // a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
- // equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
- object Const {
- def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear()}
-
- private var _nextTypeId = 0
- def nextTypeId = {_nextTypeId += 1; _nextTypeId}
-
- private var _nextValueId = 0
- def nextValueId = {_nextValueId += 1; _nextValueId}
-
- private val uniques = new scala.collection.mutable.HashMap[Type, Const]
- private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const =
- uniques.get(tp).getOrElse(
- uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
- case Some((_, c)) =>
- patmatDebug("unique const: "+ (tp, c))
- c
- case _ =>
- val fresh = mkFresh
- patmatDebug("uniqued const: "+ (tp, fresh))
- uniques(tp) = fresh
- fresh
- })
-
- private val trees = scala.collection.mutable.HashSet.empty[Tree]
-
- // hashconsing trees (modulo value-equality)
- private[SymbolicMatchAnalysis] def uniqueTpForTree(t: Tree): Type =
- // a new type for every unstable symbol -- only stable value are uniqued
- // technically, an unreachable value may change between cases
- // thus, the failure of a case that matches on a mutable value does not exclude the next case succeeding
- // (and thuuuuus, the latter case must be considered reachable)
- if (!t.symbol.isStable) t.tpe.narrow
- else trees find (a => a.correspondsStructure(t)(sameValue)) match {
- case Some(orig) =>
- patmatDebug("unique tp for tree: "+ (orig, orig.tpe))
- orig.tpe
- case _ =>
- // duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
- val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
- patmatDebug("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
- trees += treeWithNarrowedType
- treeWithNarrowedType.tpe
- }
- }
-
- sealed abstract class Const {
- def tp: Type
- def wideTp: Type
-
- def isAny = wideTp.typeSymbol == AnyClass
- def isValue: Boolean //= tp.isStable
-
- // note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
- // the equals inherited from AnyRef does just this
- }
-
- // find most precise super-type of tp that is a class
- // we skip non-class types (singleton types, abstract types) so that we can
- // correctly compute how types relate in terms of the values they rule out
- // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
- // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
- // (At least conceptually: `true` is an instance of class `Boolean`)
- private def widenToClass(tp: Type): Type =
- if (tp.typeSymbol.isClass) tp
- else tp.baseType(tp.baseClasses.head)
-
- object TypeConst extends TypeConstExtractor {
- def apply(tp: Type) = {
- if (tp =:= NullTp) NullConst
- else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
- else Const.unique(tp, new TypeConst(tp))
- }
- def unapply(c: TypeConst): Some[Type] = Some(c.tp)
- }
-
- // corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
- sealed class TypeConst(val tp: Type) extends Const {
- assert(!(tp =:= NullTp))
- private[this] val id: Int = Const.nextTypeId
-
- val wideTp = widenToClass(tp)
- def isValue = false
- override def toString = tp.toString //+"#"+ id
- }
-
- // p is a unique type or a constant value
- object ValueConst {
- def fromType(tp: Type) = {
- assert(tp.isInstanceOf[SingletonType])
- val toString = tp match {
- case ConstantType(c) => c.escapedStringValue
- case _ => tp.toString
- }
- Const.unique(tp, new ValueConst(tp, tp.widen, toString))
- }
- def apply(p: Tree) = {
- val tp = p.tpe.normalize
- if (tp =:= NullTp) NullConst
- else {
- val wideTp = widenToClass(tp)
-
- val narrowTp =
- if (tp.isInstanceOf[SingletonType]) tp
- else p match {
- case Literal(c) =>
- if (c.tpe.typeSymbol == UnitClass) c.tpe
- else ConstantType(c)
- case Ident(_) if p.symbol.isStable =>
- // for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
- // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala)
- singleType(tp.prefix, p.symbol)
- case _ =>
- Const.uniqueTpForTree(p)
- }
-
- val toString =
- if (p.hasSymbol && p.symbol.isStable) p.symbol.name.toString // tp.toString
- else p.toString //+"#"+ id
-
- Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst
- }
- }
- }
- sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
- // patmatDebug("VC"+(tp, wideTp, toString))
- assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
- private[this] val id: Int = Const.nextValueId
- def isValue = true
- }
-
- lazy val NullTp = ConstantType(Constant(null))
- case object NullConst extends Const {
- def tp = NullTp
- def wideTp = NullTp
-
- def isValue = true
- override def toString = "null"
- }
-
-
- // turns a case (represented as a list of abstract tests)
- // into a proposition that is satisfiable if the case may match
- def symbolicCase(tests: List[Test], modelNull: Boolean = false): Prop = {
- def symbolic(t: Cond): Prop = t match {
- case AndCond(a, b) => And(symbolic(a), symbolic(b))
- case OrCond(a, b) => Or(symbolic(a), symbolic(b))
- case TrueCond => True
- case FalseCond => False
- case TypeCond(p, pt) => Eq(Var(p), TypeConst(checkableType(pt)))
- case EqualityCond(p, q) => Eq(Var(p), ValueConst(q))
- case NonNullCond(p) => if (!modelNull) True else Not(Eq(Var(p), NullConst))
- }
-
- val testsBeforeBody = tests.takeWhile(t => !t.treeMaker.isInstanceOf[BodyTreeMaker])
- /\(testsBeforeBody.map(t => symbolic(t.cond)))
- }
-
- // TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil
- // right now hackily implement this by pruning counter-examples
- // unreachability would also benefit from a more faithful representation
-
-
- // reachability (dead code)
-
- // computes the first 0-based case index that is unreachable (if any)
- // a case is unreachable if it implies its preceding cases
- // call C the formula that is satisfiable if the considered case matches
- // call P the formula that is satisfiable if the cases preceding it match
- // the case is reachable if there is a model for -P /\ C,
- // thus, the case is unreachable if there is no model for -(-P /\ C),
- // or, equivalently, P \/ -C, or C => P
- def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaReach) else null
-
- // use the same approximator so we share variables,
- // but need different conditions depending on whether we're conservatively looking for failure or success
- // don't rewrite List-like patterns, as List() and Nil need to distinguished for unreachability
- val approx = new TreeMakersToConds(prevBinder)
- def approximate(default: Cond) = approx.approximateMatch(cases, approx.onUnknown { tm =>
- approx.refutableRewrite.applyOrElse(tm, (_: TreeMaker) => default )
- })
-
- val testCasesOk = approximate(TrueCond)
- val testCasesFail = approximate(FalseCond)
-
- prepareNewAnalysis()
-
- val propsCasesOk = testCasesOk map (t => symbolicCase(t, modelNull = true))
- val propsCasesFail = testCasesFail map (t => Not(symbolicCase(t, modelNull = true)))
-
- try {
- val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
- val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
- val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
-
- val prefix = formulaBuilder
- addFormula(prefix, eqAxioms)
-
- var prefixRest = symbolicCasesFail
- var current = symbolicCasesOk
- var reachable = true
- var caseIndex = 0
-
- patmatDebug("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
- patmatDebug("equality axioms:\n"+ cnfString(eqAxiomsOk))
-
- // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
- // termination: prefixRest.length decreases by 1
- while (prefixRest.nonEmpty && reachable) {
- val prefHead = prefixRest.head
- caseIndex += 1
- prefixRest = prefixRest.tail
- if (prefixRest.isEmpty) reachable = true
- else {
- addFormula(prefix, prefHead)
- current = current.tail
- val model = findModelFor(andFormula(current.head, toFormula(prefix)))
-
- // patmatDebug("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
- // if (NoModel ne model) patmatDebug("reached: "+ modelString(model))
-
- reachable = NoModel ne model
- }
- }
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start)
-
- if (reachable) None else Some(caseIndex)
- } catch {
- case ex: AnalysisBudget.Exception =>
- ex.warn(prevBinder.pos, "unreachability")
- None // CNF budget exceeded
- }
- }
-
- // exhaustivity
-
- // TODO: domain of other feasibly enumerable built-in types (char?)
- def enumerateSubtypes(tp: Type): Option[List[Type]] =
- tp.typeSymbol match {
- // TODO case _ if tp.isTupleType => // recurse into component types?
- case UnitClass =>
- Some(List(UnitClass.tpe))
- case BooleanClass =>
- Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
- // TODO case _ if tp.isTupleType => // recurse into component types
- case modSym: ModuleClassSymbol =>
- Some(List(tp))
- // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
- patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
- None
- case sym =>
- val subclasses = (
- sym.sealedDescendants.toList sortBy (_.sealedSortName)
- // symbols which are both sealed and abstract need not be covered themselves, because
- // all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- patmatDebug("enum sealed -- subclasses: "+ (sym, subclasses))
-
- val tpApprox = typer.infer.approximateAbstracts(tp)
- val pre = tpApprox.prefix
- // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
- val validSubTypes = (subclasses flatMap {sym =>
- // have to filter out children which cannot match: see ticket #3683 for an example
- // compare to the fully known type `tp` (modulo abstract types),
- // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
- // however, must approximate abstract types in
-
- val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
- val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
- val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
- // patmatDebug("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
- if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
- else None
- })
- patmatDebug("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
- Some(validSubTypes)
- }
-
- // approximate a type to the static type that is fully checkable at run time,
- // hiding statically known but dynamically uncheckable information using existential quantification
- // TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time)
- def checkableType(tp: Type): Type = {
- // TODO: this is extremely rough...
- // replace type args by wildcards, since they can't be checked (don't use existentials: overkill)
- // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
- // similar to typer.infer.approximateAbstracts
- object typeArgsToWildcardsExceptArray extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) =>
- TypeRef(pre, sym, args map (_ => WildcardType))
- case _ =>
- mapOver(tp)
- }
- }
-
- val res = typeArgsToWildcardsExceptArray(tp)
- patmatDebug("checkable "+(tp, res))
- res
- }
-
- // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
- // we consider tuple types with at least one component of a checkable type as a checkable type
- def uncheckableType(tp: Type): Boolean = {
- def tupleComponents(tp: Type) = tp.normalize.typeArgs
- val checkable = (
- (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
- || enumerateSubtypes(tp).nonEmpty)
- // if (!checkable) patmatDebug("deemed uncheckable: "+ tp)
- !checkable
- }
-
- def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[String] = if (uncheckableType(prevBinder.info)) Nil else {
- // customize TreeMakersToConds (which turns a tree of tree makers into a more abstract DAG of tests)
- // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`,
- // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive
- // - back off (to avoid crying exhaustive too often) when:
- // - there are guards -->
- // - there are extractor calls (that we can't secretly/soundly) rewrite
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaExhaust) else null
- var backoff = false
-
- val approx = new TreeMakersToConds(prevBinder)
- val tests = approx.approximateMatch(cases, approx.onUnknown { tm =>
- approx.fullRewrite.applyOrElse[TreeMaker, Cond](tm, {
- case BodyTreeMaker(_, _) => TrueCond // irrelevant -- will be discarded by symbolCase later
- case _ => // patmatDebug("backing off due to "+ tm)
- backoff = true
- FalseCond
- })
- })
-
- if (backoff) Nil else {
- val prevBinderTree = approx.binderToUniqueTree(prevBinder)
-
- prepareNewAnalysis()
-
- val symbolicCases = tests map (symbolicCase(_, modelNull = false))
-
-
- // TODO: null tests generate too much noise, so disabled them -- is there any way to bring them back?
- // assuming we're matching on a non-null scrutinee (prevBinder), when does the match fail?
- // val nonNullScrutineeCond =
- // assume non-null for all the components of the tuple we're matching on (if we're matching on a tuple)
- // if (isTupleType(prevBinder.tpe))
- // prevBinder.tpe.typeArgs.mapWithIndex{case (_, i) => NonNullCond(codegen.tupleSel(prevBinderTree)(i))}.reduceLeft(AndCond)
- // else
- // NonNullCond(prevBinderTree)
- // val matchFails = And(symbolic(nonNullScrutineeCond), Not(symbolicCases reduceLeft (Or(_, _))))
-
- // when does the match fail?
- val matchFails = Not(\/(symbolicCases))
- val vars = gatherVariables(matchFails)
-
- // debug output:
- patmatDebug("analysing:")
- showTreeMakers(cases)
- showTests(tests)
-
- // patmatDebug("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
- // patmatDebug("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
-
- try {
- // find the models (under which the match fails)
- val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
-
- val scrutVar = Var(prevBinderTree)
- val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
-
- val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
- pruned
- } catch {
- case ex : AnalysisBudget.Exception =>
- ex.warn(prevBinder.pos, "exhaustivity")
- Nil // CNF budget exceeded
- }
- }
- }
-
- object CounterExample {
- def prune(examples: List[CounterExample]): List[CounterExample] = {
- val distinct = examples.filterNot(_ == NoExample).toSet
- distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
- }
- }
-
- // a way to construct a value that will make the match fail: a constructor invocation, a constant, an object of some type)
- class CounterExample {
- protected[SymbolicMatchAnalysis] def flattenConsArgs: List[CounterExample] = Nil
- def coveredBy(other: CounterExample): Boolean = this == other || other == WildcardExample
- }
- case class ValueExample(c: ValueConst) extends CounterExample { override def toString = c.toString }
- case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
- case class NegativeExample(eqTo: Const, nonTrivialNonEqualTo: List[Const]) extends CounterExample {
- // require(nonTrivialNonEqualTo.nonEmpty, nonTrivialNonEqualTo)
- override def toString = {
- val negation =
- if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toString
- else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("(", ", ", ")")
- "(x: "+ eqTo +" forSome x not in "+ negation +")"
- }
- }
- case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
- protected[SymbolicMatchAnalysis] override def flattenConsArgs: List[CounterExample] = ctorArgs match {
- case hd :: tl :: Nil => hd :: tl.flattenConsArgs
- case _ => Nil
- }
- protected[SymbolicMatchAnalysis] lazy val elems = flattenConsArgs
-
- override def coveredBy(other: CounterExample): Boolean =
- other match {
- case other@ListExample(_) =>
- this == other || ((elems.length == other.elems.length) && (elems zip other.elems).forall{case (a, b) => a coveredBy b})
- case _ => super.coveredBy(other)
- }
-
- override def toString = elems.mkString("List(", ", ", ")")
- }
- case class TupleExample(ctorArgs: List[CounterExample]) extends CounterExample {
- override def toString = ctorArgs.mkString("(", ", ", ")")
-
- override def coveredBy(other: CounterExample): Boolean =
- other match {
- case TupleExample(otherArgs) =>
- this == other || ((ctorArgs.length == otherArgs.length) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b})
- case _ => super.coveredBy(other)
- }
- }
- case class ConstructorExample(cls: Symbol, ctorArgs: List[CounterExample]) extends CounterExample {
- override def toString = cls.decodedName + (if (cls.isModuleClass) "" else ctorArgs.mkString("(", ", ", ")"))
- }
-
- case object WildcardExample extends CounterExample { override def toString = "_" }
- case object NoExample extends CounterExample { override def toString = "??" }
-
- def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
- model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
- val (trues, falses) = xs.partition(_._2)
- (trues map (_._1.const), falses map (_._1.const))
- // should never be more than one value in trues...
- }
-
- def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) =
- varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
- val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
- v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
- }.mkString("\n")
-
- def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model))
-
- // return constructor call when the model is a true counter example
- // (the variables don't take into account type information derived from other variables,
- // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
- // since we didn't realize the tail of the outer cons was a Nil)
- def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
- // x1 = ...
- // x1.hd = ...
- // x1.tl = ...
- // x1.hd.hd = ...
- // ...
- val varAssignment = modelToVarAssignment(model)
-
- patmatDebug("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
-
- // chop a path into a list of symbols
- def chop(path: Tree): List[Symbol] = path match {
- case Ident(_) => List(path.symbol)
- case Select(pre, name) => chop(pre) :+ path.symbol
- case _ =>
- // patmatDebug("don't know how to chop "+ path)
- Nil
- }
-
- // turn the variable assignments into a tree
- // the root is the scrutinee (x1), edges are labelled by the fields that are assigned
- // a node is a variable example (which is later turned into a counter example)
- object VariableAssignment {
- private def findVar(path: List[Symbol]) = path match {
- case List(root) if root == scrutVar.path.symbol => Some(scrutVar)
- case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
- }
-
- private val uniques = new scala.collection.mutable.HashMap[Var, VariableAssignment]
- private def unique(variable: Var): VariableAssignment =
- uniques.getOrElseUpdate(variable, {
- val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
- VariableAssignment(variable, eqTo.toList, neqTo.toList, HashMap.empty)
- })
-
- def apply(variable: Var): VariableAssignment = {
- val path = chop(variable.path)
- val pre = path.init
- val field = path.last
-
- val newCtor = unique(variable)
-
- if (pre.isEmpty) newCtor
- else {
- findVar(pre) foreach { preVar =>
- val outerCtor = this(preVar)
- outerCtor.fields(field) = newCtor
- }
- newCtor
- }
- }
- }
-
- // node in the tree that describes how to construct a counter-example
- case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
- // need to prune since the model now incorporates all super types of a constant (needed for reachability)
- private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
- private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
- private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
- private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
- private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
- private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
-
-
- def allFieldAssignmentsLegal: Boolean =
- (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
-
- private lazy val nonTrivialNonEqualTo = notEqualTo.filterNot{c => c.isAny }
-
- // NoExample if the constructor call is ill-typed
- // (thus statically impossible -- can we incorporate this into the formula?)
- // beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
- def toCounterExample(beBrief: Boolean = false): CounterExample =
- if (!allFieldAssignmentsLegal) NoExample
- else {
- patmatDebug("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
- val res = prunedEqualTo match {
- // a definite assignment to a value
- case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
-
- // constructor call
- // or we did not gather any information about equality but we have information about the fields
- // --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
- case _ if cls != NoSymbol && !isPrimitiveValueClass(cls) &&
- ( uniqueEqualTo.nonEmpty
- || (fields.nonEmpty && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
-
- def args(brevity: Boolean = beBrief) = {
- // figure out the constructor arguments from the field assignment
- val argLen = (caseFieldAccs.length min ctorParams.length)
-
- (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
- }
-
- cls match {
- case ConsClass => ListExample(args())
- case _ if isTupleSymbol(cls) => TupleExample(args(true))
- case _ => ConstructorExample(cls, args())
- }
-
- // a definite assignment to a type
- case List(eq) if fields.isEmpty => TypeExample(eq)
-
- // negative information
- case Nil if nonTrivialNonEqualTo.nonEmpty =>
- // negation tends to get pretty verbose
- if (beBrief) WildcardExample
- else {
- val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
- NegativeExample(eqTo, nonTrivialNonEqualTo)
- }
-
- // not a valid counter-example, possibly since we have a definite type but there was a field mismatch
- // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
- case _ => NoExample
- }
- patmatDebug("described as: "+ res)
- res
- }
-
- override def toString = toCounterExample().toString
- }
-
- // slurp in information from other variables
- varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) }
-
- // this is the variable we want a counter example for
- VariableAssignment(scrutVar).toCounterExample()
- }
- }
-
-////
- trait CommonSubconditionElimination extends TreeMakerApproximation { self: OptimizedCodegen =>
- /** a flow-sensitive, generalised, common sub-expression elimination
- * reuse knowledge from performed tests
- * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
- * when a sub-expression is shared, it is stored in a mutable variable
- * the variable is floated up so that its scope includes all of the program that shares it
- * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
- */
- def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
- patmatDebug("before CSE:")
- showTreeMakers(cases)
-
- val testss = approximateMatchConservative(prevBinder, cases)
-
- // interpret:
- val dependencies = new scala.collection.mutable.LinkedHashMap[Test, Set[Cond]]
- val tested = new scala.collection.mutable.HashSet[Cond]
-
- def storeDependencies(test: Test) = {
- val cond = test.cond
-
- def simplify(c: Cond): Set[Cond] = c match {
- case AndCond(a, b) => simplify(a) ++ simplify(b)
- case OrCond(_, _) => Set(FalseCond) // TODO: make more precise
- case NonNullCond(_) => Set(TrueCond) // not worth remembering
- case _ => Set(c)
- }
- val conds = simplify(cond)
-
- if (conds(FalseCond)) false // stop when we encounter a definite "no" or a "not sure"
- else {
- val nonTrivial = conds filterNot (_ == TrueCond)
- if (nonTrivial nonEmpty) {
- tested ++= nonTrivial
-
- // is there an earlier test that checks our condition and whose dependencies are implied by ours?
- dependencies find {
- case (priorTest, deps) =>
- ((simplify(priorTest.cond) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly
- (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions
- ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
- } foreach {
- case (priorTest, _) =>
- // if so, note the dependency in both tests
- priorTest registerReuseBy test
- }
-
- dependencies(test) = tested.toSet // copies
- }
- true
- }
- }
-
-
- testss foreach { tests =>
- tested.clear()
- tests dropWhile storeDependencies
- }
- patmatDebug("dependencies: "+ dependencies)
-
- // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
- // then, collapse these contiguous sequences of reusing tests
- // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
- // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
- val reused = new scala.collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
- var okToCall = false
- val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
-
- // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker
- // once this has been computed, we'll know which tree makers are reused,
- // and we'll replace those by the ReusedCondTreeMakers we've constructed (and stored in the reused map)
- val collapsed = testss map { tests =>
- // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker
- // if there's no sharing, simply map to the tree makers corresponding to the tests
- var currDeps = Set[Cond]()
- val (sharedPrefix, suffix) = tests span { test =>
- (test.cond == TrueCond) || (for(
- reusedTest <- test.reuses;
- nextDeps <- dependencies.get(reusedTest);
- diff <- (nextDeps -- currDeps).headOption;
- _ <- Some(currDeps = nextDeps))
- yield diff).nonEmpty
- }
-
- val collapsedTreeMakers =
- if (sharedPrefix.isEmpty) None
- else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
- for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
- case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
- case _ =>
- }
-
- patmatDebug("sharedPrefix: "+ sharedPrefix)
- patmatDebug("suffix: "+ sharedPrefix)
- // if the shared prefix contains interesting conditions (!= TrueCond)
- // and the last of such interesting shared conditions reuses another treemaker's test
- // replace the whole sharedPrefix by a ReusingCondTreeMaker
- for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond == TrueCond).headOption;
- lastReused <- lastShared.reuses)
- yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
- }
-
- collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains TrueCond-tests, which are dropped above)
- }
- okToCall = true // TODO: remove (debugging)
-
- // replace original treemakers that are reused (as determined when computing collapsed),
- // by ReusedCondTreeMakers
- val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
- patmatDebug("after CSE:")
- showTreeMakers(reusedMakers)
- reusedMakers
- }
-
- object ReusedCondTreeMaker {
- def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
- }
- class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
- lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
- lazy val treesToHoist: List[Tree] = {
- nextBinder setFlag MUTABLE
- List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
- }
-
- // TODO: finer-grained duplication
- def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
- atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
-
- override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
- }
-
- case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
- val pos = sharedPrefix.last.treeMaker.pos
-
- lazy val localSubstitution = {
- // replace binder of each dropped treemaker by corresponding binder bound by the most recent reused treemaker
- var mostRecentReusedMaker: ReusedCondTreeMaker = null
- def mapToStored(droppedBinder: Symbol) = if (mostRecentReusedMaker eq null) Nil else List((droppedBinder, REF(mostRecentReusedMaker.nextBinder)))
- val (from, to) = sharedPrefix.flatMap { dropped =>
- dropped.reuses.map(test => toReused(test.treeMaker)).foreach {
- case reusedMaker: ReusedCondTreeMaker =>
- mostRecentReusedMaker = reusedMaker
- case _ =>
- }
-
- // TODO: have super-trait for retrieving the variable that's operated on by a tree maker
- // and thus assumed in scope, either because it binds it or because it refers to it
- dropped.treeMaker match {
- case dropped: FunTreeMaker =>
- mapToStored(dropped.nextBinder)
- case _ => Nil
- }
- }.unzip
- val rerouteToReusedBinders = Substitution(from, to)
-
- val collapsedDroppedSubst = sharedPrefix map (t => (toReused(t.treeMaker).substitution))
-
- collapsedDroppedSubst.foldLeft(rerouteToReusedBinders)(_ >> _)
- }
-
- lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => tm.reuses map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift,
- // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
- casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
- }
- override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
- }
- }
-
-
- //// DCE
- trait DeadCodeElimination extends TreeMakers { self: CodegenCore =>
- // TODO: non-trivial dead-code elimination
- // e.g., the following match should compile to a simple instanceof:
- // case class Ident(name: String)
- // for (Ident(name) <- ts) println(name)
- def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
- // do minimal DCE
- cases
- }
- }
-
- //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
- trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface { self: CodegenCore =>
- import treeInfo.isGuardedCase
-
- abstract class SwitchMaker {
- abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
- val SwitchableTreeMaker: SwitchableTreeMakerExtractor
-
- def alternativesSupported: Boolean
-
- // when collapsing guarded switch cases we may sometimes need to jump to the default case
- // however, that's not supported in exception handlers, so when we can't jump when we need it, don't emit a switch
- // TODO: make more fine-grained, as we don't always need to jump
- def canJump: Boolean
-
- /** Should exhaustivity analysis be skipped? */
- def unchecked: Boolean
-
-
- def isDefault(x: CaseDef): Boolean
- def defaultSym: Symbol
- def defaultBody: Tree
- def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
-
- private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
- if (xs exists (_.isEmpty)) None else Some(xs.flatten)
-
- object GuardAndBodyTreeMakers {
- def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
- tms match {
- case (btm@BodyTreeMaker(body, _)) :: Nil => Some((EmptyTree, btm.substitution(body)))
- case (gtm@GuardTreeMaker(guard)) :: (btm@BodyTreeMaker(body, _)) :: Nil => Some((gtm.substitution(guard), btm.substitution(body)))
- case _ => None
- }
- }
- }
-
- private val defaultLabel: Symbol = newSynthCaseLabel("default")
-
- /** Collapse guarded cases that switch on the same constant (the last case may be unguarded).
- *
- * Cases with patterns A and B switch on the same constant iff for all values x that match A also match B and vice versa.
- * (This roughly corresponds to equality on trees modulo alpha renaming and reordering of alternatives.)
- *
- * The rewrite only applies if some of the cases are guarded (this must be checked before invoking this method).
- *
- * The rewrite goes through the switch top-down and merges each case with the subsequent cases it is implied by
- * (i.e. it matches if they match, not taking guards into account)
- *
- * If there are no unreachable cases, all cases can be uniquely assigned to a partition of such 'overlapping' cases,
- * save for the default case (thus we jump to it rather than copying it several times).
- * (The cases in a partition are implied by the principal element of the partition.)
- *
- * The overlapping cases are merged into one case with their guards pushed into the body as follows
- * (with P the principal element of the overlapping patterns Pi):
- *
- * `{case Pi if(G_i) => B_i }*` is rewritten to `case P => {if(G_i) B_i}*`
- *
- * The rewrite fails (and returns Nil) when:
- * (1) there is a subsequence of overlapping cases that has an unguarded case in the middle;
- * only the last case of each subsequence of overlapping cases may be unguarded (this is implied by unreachability)
- *
- * (2) there are overlapping cases that differ (tested by `caseImpliedBy`)
- * cases with patterns A and B are overlapping if for SOME value x, A matches x implies B matches y OR vice versa <-- note the difference with case equality defined above
- * for example `case 'a' | 'b' =>` and `case 'b' =>` are different and overlapping (overlapping and equality disregard guards)
- *
- * The second component of the returned tuple indicates whether we'll need to emit a labeldef to jump to the default case.
- */
- private def collapseGuardedCases(cases: List[CaseDef]): (List[CaseDef], Boolean) = {
- // requires(same.forall(caseEquals(same.head)))
- // requires(same.nonEmpty, same)
- def collapse(same: List[CaseDef], isDefault: Boolean): CaseDef = {
- val commonPattern = same.head.pat
- // jump to default case (either the user-supplied one or the synthetic one)
- // unless we're collapsing the default case: then we re-use the same body as the synthetic catchall (throwing a matcherror, rethrowing the exception)
- val jumpToDefault: Tree =
- if (isDefault || !canJump) defaultBody
- else Apply(Ident(defaultLabel), Nil)
-
- val guardedBody = same.foldRight(jumpToDefault){
- // the last case may be un-guarded (we know it's the last one since fold's accum == jumpToDefault)
- // --> replace jumpToDefault by the un-guarded case's body
- case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b
- case (cd@CaseDef(_, g, b), els) if isGuardedCase(cd) => If(g, b, els)
- }
-
- // if the cases that we're going to collapse bind variables,
- // must replace them by the single binder introduced by the collapsed case
- val binders = same.collect{case CaseDef(x@Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol}
- val (pat, guardedBodySubst) =
- if (binders.isEmpty) (commonPattern, guardedBody)
- else {
- // create a single fresh binder to subsume the old binders (and their types)
- // TODO: I don't think the binder's types can actually be different (due to checks in caseEquals)
- // if they do somehow manage to diverge, the lub might not be precise enough and we could get a type error
- // TODO: reuse name exactly if there's only one binder in binders
- val binder = freshSym(binders.head.pos, lub(binders.map(_.tpe)), binders.head.name.toString)
-
- // the patterns in same are equal (according to caseEquals)
- // we can thus safely pick the first one arbitrarily, provided we correct binding
- val origPatWithoutBind = commonPattern match {
- case Bind(b, orig) => orig
- case o => o
- }
- // need to replace `defaultSym` as well -- it's used in `defaultBody` (see `jumpToDefault` above)
- val unifiedBody = guardedBody.substituteSymbols(defaultSym :: binders, binder :: binders.map(_ => binder))
- (Bind(binder, origPatWithoutBind), unifiedBody)
- }
-
- atPos(commonPattern.pos)(CaseDef(pat, EmptyTree, guardedBodySubst))
- }
-
- // requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless)
- var remainingCases = cases
- val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef]
-
- // when some of collapsed cases (except for the default case itself) did not include an un-guarded case
- // we'll need to emit a labeldef for the default case
- var needDefault = false
-
- while (remainingCases.nonEmpty) {
- val currCase = remainingCases.head
- val currIsDefault = isDefault(CaseDef(currCase.pat, EmptyTree, EmptyTree))
- val (impliesCurr, others) =
- // the default case is implied by all cases, no need to partition (and remainingCases better all be default cases as well)
- if (currIsDefault) (remainingCases.tail, Nil)
- else remainingCases.tail partition (caseImplies(currCase))
-
- val unguardedComesLastOrAbsent =
- (!isGuardedCase(currCase) && impliesCurr.isEmpty) || { val LastImpliesCurr = impliesCurr.length - 1
- impliesCurr.indexWhere(oc => !isGuardedCase(oc)) match {
- // if all cases are guarded we will have to jump to the default case in the final else
- // (except if we're collapsing the default case itself)
- case -1 =>
- if (!currIsDefault) needDefault = true
- true
-
- // last case is not guarded, no need to jump to the default here
- // note: must come after case -1 => (since LastImpliesCurr may be -1)
- case LastImpliesCurr => true
-
- case _ => false
- }}
-
- if (unguardedComesLastOrAbsent /*(1)*/ && impliesCurr.forall(caseEquals(currCase)) /*(2)*/) {
- collapsed += (
- if (impliesCurr.isEmpty && !isGuardedCase(currCase)) currCase
- else collapse(currCase :: impliesCurr, currIsDefault)
- )
-
- remainingCases = others
- } else { // fail
- collapsed.clear()
- remainingCases = Nil
- }
- }
-
- (collapsed.toList, needDefault)
- }
-
- private def caseEquals(x: CaseDef)(y: CaseDef) = patternEquals(x.pat)(y.pat)
- private def patternEquals(x: Tree)(y: Tree): Boolean = (x, y) match {
- case (Alternative(xs), Alternative(ys)) =>
- xs.forall(x => ys.exists(patternEquals(x))) &&
- ys.forall(y => xs.exists(patternEquals(y)))
- case (Alternative(pats), _) => pats.forall(p => patternEquals(p)(y))
- case (_, Alternative(pats)) => pats.forall(q => patternEquals(x)(q))
- // regular switch
- case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
- case (Ident(nme.WILDCARD), Ident(nme.WILDCARD)) => true
- // type-switch for catch
- case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)), Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => tpX.tpe =:= tpY.tpe
- case _ => false
- }
-
- // if y matches then x matches for sure (thus, if x comes before y, y is unreachable)
- private def caseImplies(x: CaseDef)(y: CaseDef) = patternImplies(x.pat)(y.pat)
- private def patternImplies(x: Tree)(y: Tree): Boolean = (x, y) match {
- // since alternatives are flattened, must treat them as separate cases
- case (Alternative(pats), _) => pats.exists(p => patternImplies(p)(y))
- case (_, Alternative(pats)) => pats.exists(q => patternImplies(x)(q))
- // regular switch
- case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
- case (Ident(nme.WILDCARD), _) => true
- // type-switch for catch
- case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)),
- Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => instanceOfTpImplies(tpY.tpe, tpX.tpe)
- case _ => false
- }
-
- private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase)
-
- // must do this before removing guards from cases and collapsing (SI-6011, SI-6048)
- private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
- var cases = cs
- var unreachable: Option[CaseDef] = None
-
- while (cases.nonEmpty && unreachable.isEmpty) {
- val currCase = cases.head
- if (isDefault(currCase) && cases.tail.nonEmpty) // subsumed by the `else if` that follows, but faster
- unreachable = Some(cases.tail.head)
- else if (!isGuardedCase(currCase) || currCase.guard.tpe =:= ConstantType(Constant(true)))
- unreachable = cases.tail.find(caseImplies(currCase))
- else if (currCase.guard.tpe =:= ConstantType(Constant(false)))
- unreachable = Some(currCase)
-
- cases = cases.tail
- }
-
- unreachable
- }
-
- // empty list ==> failure
- def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] =
- // generate if-then-else for 1 case switch (avoids verify error... can't imagine a one-case switch being faster than if-then-else anyway)
- if (cases.isEmpty || cases.tail.isEmpty) Nil
- else {
- val caseDefs = cases map { case (scrutSym, makers) =>
- makers match {
- // default case
- case GuardAndBodyTreeMakers(guard, body) =>
- Some(defaultCase(scrutSym, guard, body))
- // constant (or typetest for typeSwitch)
- case SwitchableTreeMaker(pattern) :: GuardAndBodyTreeMakers(guard, body) =>
- Some(CaseDef(pattern, guard, body))
- // alternatives
- case AlternativesTreeMaker(_, altss, _) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported =>
- val switchableAlts = altss map {
- case SwitchableTreeMaker(pattern) :: Nil =>
- Some(pattern)
- case _ =>
- None
- }
-
- // succeed if they were all switchable
- sequence(switchableAlts) map { switchableAlts =>
- CaseDef(Alternative(switchableAlts), guard, body)
- }
- case _ =>
- // patmatDebug("can't emit switch for "+ makers)
- None //failure (can't translate pattern to a switch)
- }
- }
-
- val caseDefsWithGuards = sequence(caseDefs) match {
- case None => return Nil
- case Some(cds) => cds
- }
-
- // a switch with duplicate cases yields a verify error,
- // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
- // (even though the verify error would disappear, the behaviour would change)
- val allReachable = unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
-
- if (!allReachable) Nil
- else if (noGuards(caseDefsWithGuards)) {
- if (isDefault(caseDefsWithGuards.last)) caseDefsWithGuards
- else caseDefsWithGuards :+ defaultCase()
- } else {
- // collapse identical cases with different guards, push guards into body for all guarded cases
- // this translation is only sound if there are no unreachable (duplicate) cases
- // it should only be run if there are guarded cases, and on failure it returns Nil
- val (collapsed, needDefaultLabel) = collapseGuardedCases(caseDefsWithGuards)
-
- if (collapsed.isEmpty || (needDefaultLabel && !canJump)) Nil
- else {
- def wrapInDefaultLabelDef(cd: CaseDef): CaseDef =
- if (needDefaultLabel) deriveCaseDef(cd){ b =>
- // TODO: can b.tpe ever be null? can't really use pt, see e.g. pos/t2683 or cps/match1.scala
- defaultLabel setInfo MethodType(Nil, if (b.tpe != null) b.tpe else pt)
- LabelDef(defaultLabel, Nil, b)
- } else cd
-
- val last = collapsed.last
- if (isDefault(last)) {
- if (!needDefaultLabel) collapsed
- else collapsed.init :+ wrapInDefaultLabelDef(last)
- } else collapsed :+ wrapInDefaultLabelDef(defaultCase())
- }
- }
- }
- }
-
- class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
- val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
- val alternativesSupported = true
- val canJump = true
-
- // Constant folding sets the type of a constant tree to `ConstantType(Constant(folded))`
- // The tree itself can be a literal, an ident, a selection, ...
- object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat.tpe match {
- case ConstantType(const) if const.isIntRange =>
- Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
- case _ => None
- }}
-
- object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
- def unapply(x: TreeMaker): Option[Tree] = x match {
- case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
- case _ => None
- }
- }
-
- def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
- case _ => false
- }
-
- def defaultSym: Symbol = scrutSym
- def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
- def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- (DEFAULT IF guard) ==> body
- }}
- }
-
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._
- val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked)
- // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
- if (regularSwitchMaker.switchableTpe(scrutSym.tpe.dealias)) { // TODO: switch to dealiasWiden in 2.11
- val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
- if (caseDefsWithDefault isEmpty) None // not worth emitting a switch.
- else {
- // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
- val scrutToInt: Tree =
- if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
- else (REF(scrutSym) DOT (nme.toInt))
- Some(BLOCK(
- VAL(scrutSym) === scrut,
- Match(scrutToInt, caseDefsWithDefault) // a switch
- ))
- }
- } else None
- }
-
- // for the catch-cases in a try/catch
- private object typeSwitchMaker extends SwitchMaker {
- val unchecked = false
- def switchableTpe(tp: Type) = true
- val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
- val canJump = false
-
- // TODO: there are more treemaker-sequences that can be handled by type tests
- // analyze the result of approximateTreeMaker rather than the TreeMaker itself
- object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
- def unapply(x: TreeMaker): Option[Tree] = x match {
- case tm@TypeTestTreeMaker(_, _, pt, _) if tm.isPureTypeTest => // -- TODO: use this if binder does not occur in the body
- Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(pt)) /* not used by back-end */))
- case _ =>
- None
- }
- }
-
- def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
- case _ => false
- }
-
- lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
- def defaultBody: Tree = Throw(CODE.REF(defaultSym))
- def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
- }}
- }
-
- // TODO: drop null checks
- override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
- val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
- if (caseDefsWithDefault isEmpty) None
- else Some(caseDefsWithDefault)
- }
- }
-
- trait OptimizedMatchMonadInterface extends MatchMonadInterface {
- override def inMatchMonad(tp: Type): Type = optionType(tp)
- override def pureType(tp: Type): Type = tp
- override protected def matchMonadSym = OptionClass
- }
-
- trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
- override def codegen: AbsCodegen = optimizedCodegen
-
- // trait AbsOptimizedCodegen extends AbsCodegen {
- // def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree
- // }
- // def optimizedCodegen: AbsOptimizedCodegen
-
- // when we know we're targetting Option, do some inlining the optimizer won't do
- // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
- // this is a special instance of the advanced inlining optimization that takes a method call on
- // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
- object optimizedCodegen extends CommonCodegen { import CODE._
-
- /** Inline runOrElse and get rid of Option allocations
- *
- * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
- * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
- * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
- */
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
- val matchEnd = newSynthCaseLabel("matchEnd")
- val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
- matchEnd setInfo MethodType(List(matchRes), restpe)
-
- def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
- var _currCase = newCaseSym
-
- val caseDefs = cases map { (mkCase: Casegen => Tree) =>
- val currCase = _currCase
- val nextCase = newCaseSym
- _currCase = nextCase
-
- LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
- }
-
- // must compute catchAll after caseLabels (side-effects nextCase)
- // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
- // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
- val catchAllDef = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
-
- LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
- } toList // at most 1 element
-
- // scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
-
- // the generated block is taken apart in TailCalls under the following assumptions
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- Block(
- scrutDef ++ caseDefs ++ catchAllDef,
- LabelDef(matchEnd, List(matchRes), REF(matchRes))
- )
- }
-
- class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol) extends CommonCodegen with Casegen {
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
- optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
-
- // only used to wrap the RHS of a body
- // res: T
- // returns MatchMonad[T]
- def one(res: Tree): Tree = matchEnd APPLY (res) // a jump to a case label is special-cased in typedApply
- protected def zero: Tree = nextCase APPLY ()
-
- // prev: MatchMonad[T]
- // b: T
- // next: MatchMonad[U]
- // returns MatchMonad[U]
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
-
- BLOCK(
- VAL(prevSym) === prev,
- // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
- )
- }
-
- // cond: Boolean
- // res: T
- // nextBinder: T
- // next == MatchMonad[U]
- // returns MatchMonad[U]
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
- val rest =
- // only emit a local val for `nextBinder` if it's actually referenced in `next`
- if (next.exists(_.symbol eq nextBinder))
- BLOCK(
- VAL(nextBinder) === res,
- next
- )
- else next
- ifThenElseZero(cond, rest)
- }
-
- // guardTree: Boolean
- // next: MatchMonad[T]
- // returns MatchMonad[T]
- def flatMapGuard(guardTree: Tree, next: Tree): Tree =
- ifThenElseZero(guardTree, next)
-
- def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- condSym === TRUE_typed,
- nextBinder === res,
- next
- ))
- }
-
- }
- }
-
-
- trait MatchOptimizations extends CommonSubconditionElimination
- with DeadCodeElimination
- with SwitchEmission
- with OptimizedCodegen
- with SymbolicMatchAnalysis
- with DPLLSolver { self: TreeMakers =>
- override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): (List[List[TreeMaker]], List[Tree]) = {
- if (!suppression.unreachable) {
- unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
- reportUnreachable(cases(caseIndex).last.pos)
- }
- }
- if (!suppression.exhaustive) {
- val counterExamples = exhaustive(prevBinder, cases, pt)
- if (counterExamples.nonEmpty)
- reportMissingCases(prevBinder.pos, counterExamples)
- }
-
- val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
- val toHoist = (
- for (treeMakers <- optCases)
- yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
- ).flatten.flatten.toList
- (optCases, toHoist)
- }
- }
-}
-
-object PatternMatchingStats {
- val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat")
- val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos)
- val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos)
- val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter(""))
- val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos)
- val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos)
- val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos)
-}
-
-final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
-
-object Suppression {
- val NoSuppresion = Suppression(false, false)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index b9fdd7280e..03ce710700 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -135,7 +135,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
if (settings.lint.value) {
clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
- val alts = clazz.info.decl(sym.name).alternatives
+ // implicit classes leave both a module symbol and a method symbol as residue
+ val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
if (alts.size > 1)
alts foreach (x => unit.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
}
@@ -382,7 +383,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
overrideError("cannot be used here - classes can only override abstract types");
} else if (other.isEffectivelyFinal) { // (1.2)
overrideError("cannot override final member");
- } else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*)
+ } else if (!other.isDeferred && !other.hasFlag(DEFAULTMETHOD) && !member.isAnyOverride && !member.isSynthetic) { // (*)
// (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
if (isNeitherInClass && !(other.owner isSubClass member.owner))
@@ -1062,9 +1063,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ =>
}
+ private def isObjectOrAnyComparisonMethod(sym: Symbol) = sym match {
+ case Object_eq | Object_ne | Object_== | Object_!= | Any_== | Any_!= => true
+ case _ => false
+ }
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
- case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 =>
- def isReferenceOp = name == nme.eq || name == nme.ne
+ case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+ def isReferenceOp = fn.symbol == Object_eq || fn.symbol == Object_ne
def isNew(tree: Tree) = tree match {
case Function(_, _)
| Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
@@ -1101,12 +1106,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val s = fn.symbol
(s == Object_==) || (s == Object_!=) || (s == Any_==) || (s == Any_!=)
}
+ def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
+
// Whether the operands+operator represent a warnable combo (assuming anyrefs)
// Looking for comparisons performed with ==/!= in combination with either an
// equals method inherited from Object or a case class synthetic equals (for
// which we know the logic.)
def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
+ def isEitherValueClass = actual.isDerivedValueClass || receiver.isDerivedValueClass
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
@@ -1121,6 +1129,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// unused
def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
val nullCount = onSyms(_ filter (_ == NullClass) size)
+ def isNonsenseValueClassCompare = (
+ !haveSubclassRelationship
+ && isUsingDefaultScalaOp
+ && isEitherValueClass
+ && !isCaseEquals
+ )
def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
@@ -1132,10 +1146,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def nonSensiblyNeq() = nonSensible("", false)
def nonSensiblyNew() = nonSensibleWarning("a fresh object", false)
+ def unrelatedMsg = name match {
+ case nme.EQ | nme.eq => "never compare equal"
+ case _ => "always compare unequal"
+ }
def unrelatedTypes() = {
- val msg = if (name == nme.EQ || name == nme.eq)
- "never compare equal" else "always compare unequal"
- unit.warning(pos, typesString + " are unrelated: they will most likely " + msg)
+ val weaselWord = if (isEitherValueClass) "" else " most likely"
+ unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
}
if (nullCount == 2) // null == null
@@ -1174,15 +1191,19 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ // warn if one but not the other is a derived value class
+ // this is especially important to enable transitioning from
+ // regular to value classes without silent failures.
+ if (isNonsenseValueClassCompare)
+ unrelatedTypes()
// possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
- if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+ else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
// better to have lubbed and lost
def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
if (ObjectClass.tpe <:< common)
unrelatedTypes()
}
- def eitherSubclasses = (actual isSubClass receiver) || (receiver isSubClass actual)
// warn if actual has a case parent that is not same as receiver's;
// if actual is not a case, then warn if no common supertype, as below
if (isCaseEquals) {
@@ -1195,14 +1216,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
//else
// if a class, it must be super to thisCase (and receiver) since not <: thisCase
if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
- else if (!eitherSubclasses) warnIfLubless()
+ else if (!haveSubclassRelationship) warnIfLubless()
case _ =>
}
}
- else if (actual isSubClass receiver) ()
- else if (receiver isSubClass actual) ()
// warn only if they have no common supertype below Object
- else {
+ else if (!haveSubclassRelationship) {
warnIfLubless()
}
}
@@ -1373,12 +1392,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
*/
private def checkMigration(sym: Symbol, pos: Position) = {
if (sym.hasMigrationAnnotation) {
- val changed = try
+ val changed = try
settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
catch {
- case e : NumberFormatException =>
+ case e : NumberFormatException =>
unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
- // if we can't parse the format on the migration annotation just conservatively assume it changed
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
true
}
if (changed)
@@ -1396,6 +1415,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ private def checkDelayedInitSelect(qual: Tree, sym: Symbol, pos: Position) = {
+ def isLikelyUninitialized = (
+ (sym.owner isSubClass DelayedInitClass)
+ && !qual.tpe.isInstanceOf[ThisType]
+ && sym.accessedOrSelf.isVal
+ )
+ if (settings.lint.value && isLikelyUninitialized)
+ unit.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value")
+ }
+
private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = (
(otherSym != NoSymbol)
&& !otherSym.isProtected
@@ -1595,6 +1624,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
+ checkDelayedInitSelect(qual, sym, tree.pos)
if (sym eq NoSymbol) {
unit.warning(tree.pos, "Select node has NoSymbol! " + tree + " / " + tree.tpe)
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index fb8a111da1..fa72ad64bf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -67,7 +67,10 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case t => t
}
acc setInfoAndEnter (tpe cloneInfo acc)
- storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
+ // Diagnostic for SI-7091
+ if (!accDefs.contains(clazz))
+ reporter.error(sel.pos, s"Internal error: unable to store accessor definition in ${clazz}. clazz.isPackage=${clazz.isPackage}. Accessor required for ${sel} (${showRaw(sel)})")
+ else storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
acc
}
@@ -264,9 +267,16 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
- // direct calls to aliases of param accessors to the superclass in order to avoid
+
+ def isAccessibleFromSuper(sym: Symbol) = {
+ val pre = SuperType(sym.owner.tpe, qual.tpe)
+ localTyper.context.isAccessible(sym, pre, superAccess = true)
+ }
+
+ // Direct calls to aliases of param accessors to the superclass in order to avoid
// duplicating fields.
- if (sym.isParamAccessor && sym.alias != NoSymbol) {
+ // ... but, only if accessible (SI-6793)
+ if (sym.isParamAccessor && sym.alias != NoSymbol && isAccessibleFromSuper(sym.alias)) {
val result = (localTyper.typedPos(tree.pos) {
Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
}).asInstanceOf[Select]
@@ -288,6 +298,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
currentClass.isTrait
&& sym.isProtected
&& sym.enclClass != currentClass
+ && !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols
&& !sym.owner.isTrait
&& (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
&& (qual.symbol.info.member(sym.name) ne NoSymbol)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index c5c3c560ea..88d10f1d72 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -313,6 +313,9 @@ abstract class TreeCheckers extends Analyzer {
}
for {
sym <- referencedSymbols
+ // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
+ // This happens in Namer#accessorTypeCompleter. We just look the other way here.
+ if !tree.symbol.isAccessor
if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
} errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 4233bde770..4950a7efef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -111,14 +111,9 @@ trait TypeDiagnostics {
"\n(Note that variables need to be initialized to be defined)"
else ""
- /** Only prints the parameter names if they're not synthetic,
- * since "x$1: Int" does not offer any more information than "Int".
- */
private def methodTypeErrorString(tp: Type) = tp match {
case mt @ MethodType(params, resultType) =>
- def forString =
- if (params exists (_.isSynthetic)) params map (_.tpe)
- else params map (_.defString)
+ def forString = params map (_.defString)
forString.mkString("(", ",", ")") + resultType
case x => x.toString
@@ -291,6 +286,24 @@ trait TypeDiagnostics {
)
}
+ def typePatternAdvice(sym: Symbol, ptSym: Symbol) = {
+ val clazz = if (sym.isModuleClass) sym.companionClass else sym
+ val caseString =
+ if (clazz.isCaseClass && (clazz isSubClass ptSym))
+ ( clazz.caseFieldAccessors
+ map (_ => "_") // could use the actual param names here
+ mkString (s"`case ${clazz.name}(", ",", ")`")
+ )
+ else
+ "`case _: " + (clazz.typeParams match {
+ case Nil => "" + clazz.name
+ case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
+ })+ "`"
+
+ "\nNote: if you intended to match against the class, try "+ caseString
+
+ }
+
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
// save the name because it will be mutated until it has been
// distinguished from the other types in the same error message
@@ -427,17 +440,24 @@ trait TypeDiagnostics {
contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
object checkDead {
- private var expr: Symbol = NoSymbol
+ private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol)
+ // The method being applied to `tree` when `apply` is called.
+ private def expr = exprStack.top
private def exprOK =
(expr != Object_synchronized) &&
!(expr.isLabel && treeInfo.isSynthCaseSymbol(expr)) // it's okay to jump to matchEnd (or another case) with an argument of type nothing
- private def treeOK(tree: Tree) = tree.tpe != null && tree.tpe.typeSymbol == NothingClass
+ private def treeOK(tree: Tree) = {
+ val isLabelDef = tree match { case _: LabelDef => true; case _ => false}
+ tree.tpe != null && tree.tpe.typeSymbol == NothingClass && !isLabelDef
+ }
- def updateExpr(fn: Tree) = {
- if (fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isConstructor)
- checkDead.expr = fn.symbol
+ @inline def updateExpr[A](fn: Tree)(f: => A) = {
+ if (fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isConstructor) {
+ exprStack push fn.symbol
+ try f finally exprStack.pop()
+ } else f
}
def apply(tree: Tree): Tree = {
// Error suppression will squash some of these warnings unless we circumvent it.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 6e9b3b3fcd..bb3ebb4e0f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -507,10 +507,7 @@ trait Typers extends Modes with Adaptations with Tags {
@inline
final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
- val res = f(newTyper(c))
- if (c.hasErrors)
- context.issue(c.errBuffer.head)
- res
+ f(newTyper(c))
}
@inline
@@ -780,16 +777,19 @@ trait Typers extends Modes with Adaptations with Tags {
if (!OK) {
val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) =
featureTrait getAnnotation LanguageFeatureAnnot
- val req = if (required) "needs to" else "should"
- var raw = featureDesc + " " + req + " be enabled\n" +
- "by making the implicit value language." + featureName + " visible."
- if (!(currentRun.reportedFeature contains featureTrait))
- raw += "\nThis can be achieved by adding the import clause 'import scala.language." + featureName + "'\n" +
- "or by setting the compiler option -language:" + featureName + ".\n" +
- "See the Scala docs for value scala.language." + featureName + " for a discussion\n" +
- "why the feature " + req + " be explicitly enabled."
+ val req = if (required) "needs to" else "should"
+ val fqname = "scala.language." + featureName
+ val explain = (
+ if (currentRun.reportedFeature contains featureTrait) "" else
+ s"""|
+ |This can be achieved by adding the import clause 'import $fqname'
+ |or by setting the compiler option -language:$featureName.
+ |See the Scala docs for value $fqname for a discussion
+ |why the feature $req be explicitly enabled.""".stripMargin
+ )
currentRun.reportedFeature += featureTrait
- val msg = raw replace ("#", construct)
+
+ val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct)
if (required) unit.error(pos, msg)
else currentRun.featureWarnings.warn(pos, msg)
}
@@ -1375,9 +1375,8 @@ trait Typers extends Modes with Adaptations with Tags {
def onError(reportError: => Tree): Tree = {
context.tree match {
case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
- silent(_.typedArgs(args, mode)) match {
- case SilentResultValue(xs) =>
- val args = xs.asInstanceOf[List[Tree]]
+ silent(_.typedArgs(args.map(_.duplicate), mode)) match {
+ case SilentResultValue(args) =>
if (args exists (_.isErrorTyped))
reportError
else
@@ -1576,7 +1575,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
ConstrArgsInTraitParentTpeError(sarg, firstParent)
if (!supertparams.isEmpty)
- supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
+ supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos
case _ =>
if (!supertparams.isEmpty)
MissingTypeArgumentsParentTpeError(supertpt)
@@ -1748,9 +1747,7 @@ trait Typers extends Modes with Adaptations with Tags {
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
- val impl1 = typerReportAnyContextErrors(context.make(cdef.impl, clazz, newScope)) {
- _.typedTemplate(cdef.impl, parentTypes(cdef.impl))
- }
+ val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, parentTypes(cdef.impl))
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
@@ -1791,17 +1788,16 @@ trait Typers extends Modes with Adaptations with Tags {
|| !linkedClass.isSerializable
|| clazz.isSerializable
)
- val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, newScope)) {
- _.typedTemplate(mdef.impl, {
- parentTypes(mdef.impl) ++ (
- if (noSerializable) Nil
- else {
- clazz.makeSerializable()
- List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
- }
- )
- })
- }
+ val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
+ parentTypes(mdef.impl) ++ (
+ if (noSerializable) Nil
+ else {
+ clazz.makeSerializable()
+ List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
+ }
+ )
+ })
+
val impl2 = finishMethodSynthesis(impl1, clazz, context)
// SI-5954. On second compile of a companion class contained in a package object we end up
@@ -3312,8 +3308,6 @@ trait Typers extends Modes with Adaptations with Tags {
// but behaves as if it were (=> T) => T) we need to know what is the actual
// target of a call. Since this information is no longer available from
// typedArg, it is recorded here.
- checkDead.updateExpr(fun)
-
val args1 =
// no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
// ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
@@ -3368,7 +3362,9 @@ trait Typers extends Modes with Adaptations with Tags {
else
constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
}
- handleMonomorphicCall
+ checkDead.updateExpr(fun) {
+ handleMonomorphicCall
+ }
} else if (needsInstantiation(tparams, formals, args)) {
//println("needs inst "+fun+" "+tparams+"/"+(tparams map (_.info)))
inferExprInstance(fun, tparams)
@@ -3481,8 +3477,8 @@ trait Typers extends Modes with Adaptations with Tags {
else {
val resTp = fun1.tpe.finalResultType.normalize
val nbSubPats = args.length
-
- val (formals, formalsExpanded) = extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol)
+ val (formals, formalsExpanded) =
+ extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
else {
val args1 = typedArgs(args, mode, formals, formalsExpanded)
@@ -3768,77 +3764,9 @@ trait Typers extends Modes with Adaptations with Tags {
} else res
}
- def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
- sym.isTypeParameter && sym.owner.isJavaDefined
-
- /** If we map a set of hidden symbols to their existential bounds, we
- * have a problem: the bounds may themselves contain references to the
- * hidden symbols. So this recursively calls existentialBound until
- * the typeSymbol is not amongst the symbols being hidden.
- */
- def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
- def safeBound(t: Type): Type =
- if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
-
- def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
- case tp @ RefinedType(parents, decls) =>
- val parents1 = parents mapConserve safeBound
- if (parents eq parents1) tp
- else copyRefinedType(tp, parents1, decls)
- case tp => tp
- }
-
- // Hanging onto lower bound in case anything interesting
- // happens with it.
- mapFrom(hidden)(s => s.existentialBound match {
- case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
- case _ => hiBound(s)
- })
- }
-
- /** Given a set `rawSyms` of term- and type-symbols, and a type
- * `tp`, produce a set of fresh type parameters and a type so that
- * it can be abstracted to an existential type. Every type symbol
- * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
- * type `T` in `rawSyms` is given an associated type symbol of the
- * following form:
- *
- * type x.type <: T with Singleton
- *
- * The name of the type parameter is `x.type`, to produce nice
- * diagnostics. The Singleton parent ensures that the type
- * parameter is still seen as a stable type. Type symbols in
- * rawSyms are fully replaced by the new symbols. Term symbols are
- * also replaced, except for term symbols of an Ident tree, where
- * only the type of the Ident is changed.
- */
- protected def existentialTransform[T](rawSyms: List[Symbol], tp: Type)(creator: (List[Symbol], Type) => T): T = {
- val allBounds = existentialBoundsExcludingHidden(rawSyms)
- val typeParams: List[Symbol] = rawSyms map { sym =>
- val name = sym.name match {
- case x: TypeName => x
- case x => tpnme.singletonName(x)
- }
- val bound = allBounds(sym)
- val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified = sowner.newExistential(name, sym.pos)
-
- quantified setInfo bound.cloneInfo(quantified)
- }
- // Higher-kinded existentials are not yet supported, but this is
- // tpeHK for when they are: "if a type constructor is expected/allowed,
- // tpeHK must be called instead of tpe."
- val typeParamTypes = typeParams map (_.tpeHK)
- def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
-
- creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
- }
-
/** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
- def packSymbols(hidden: List[Symbol], tp: Type): Type =
- if (hidden.isEmpty) tp
- else existentialTransform(hidden, tp)(existentialAbstraction)
+ def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, Some(context0.owner))
def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
ctx.owner.isTerm &&
@@ -3956,8 +3884,16 @@ trait Typers extends Modes with Adaptations with Tags {
if (vd.symbol.tpe.isVolatile)
AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
- existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) =>
- TypeTree(newExistentialType(tparams, tp)) setOriginal tree
+ existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) => {
+ val original = tpt1 match {
+ case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses))
+ case _ => {
+ debuglog(s"cannot reconstruct the original for $tree, because $tpt1 is not a TypeTree")
+ tree
+ }
+ }
+ TypeTree(newExistentialType(tparams, tp)) setOriginal original
+ }
)
}
@@ -4462,6 +4398,12 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.New(tree, tpt1).setType(tp)
}
+ def functionTypeWildcard(tree: Tree, arity: Int): Type = {
+ val tp = functionType(List.fill(arity)(WildcardType), WildcardType)
+ if (tp == NoType) MaxFunctionArityError(tree)
+ tp
+ }
+
def typedEta(expr1: Tree): Tree = expr1.tpe match {
case TypeRef(_, ByNameParamClass, _) =>
val expr2 = Function(List(), expr1) setPos expr1.pos
@@ -4473,7 +4415,7 @@ trait Typers extends Modes with Adaptations with Tags {
typed1(expr2, mode, pt)
case PolyType(_, MethodType(formals, _)) =>
if (isFunctionType(pt)) expr1
- else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
+ else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
case MethodType(formals, _) =>
if (isFunctionType(pt)) expr1
else expr1 match {
@@ -4492,7 +4434,7 @@ trait Typers extends Modes with Adaptations with Tags {
val rhs = Apply(f, args)
typed(rhs)
case _ =>
- adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
+ adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
}
case ErrorType =>
expr1
@@ -4587,8 +4529,13 @@ trait Typers extends Modes with Adaptations with Tags {
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
- if (stableApplication && isPatternMode) {
+ if (args.isEmpty && stableApplication && isPatternMode) {
// treat stable function applications f() as expressions.
+ //
+ // [JZ] According to Martin, this is related to the old pattern matcher, which
+ // needs to typecheck after a the translation of `x.f` to `x.f()` in a prior
+ // compilation phase. As part of SI-7377, this has been tightened with `args.isEmpty`,
+ // but we should remove it altogether in Scala 2.11.
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
val funpt = if (isPatternMode) pt else WildcardType
@@ -4813,7 +4760,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (!reallyExists(sym)) {
def handleMissing: Tree = {
- if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
+ if (context.unit.isJava && name.isTypeName) {
+ // SI-3120 Java uses the same syntax, A.B, to express selection from the
+ // value A and from the type A. We have to try both.
val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
}
@@ -5046,7 +4995,25 @@ trait Typers extends Modes with Adaptations with Tags {
else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
var impSym: Symbol = NoSymbol // the imported symbol
var imports = context.imports // impSym != NoSymbol => it is imported from imports.head
- while (!reallyExists(impSym) && !imports.isEmpty && imports.head.depth > symDepth) {
+
+ // Java: A single-type-import declaration d in a compilation unit c of package p
+ // that imports a type named n shadows, throughout c, the declarations of:
+ //
+ // 1) any top level type named n declared in another compilation unit of p
+ //
+ // A type-import-on-demand declaration never causes any other declaration to be shadowed.
+ //
+ // Scala: Bindings of different kinds have a precedence defined on them:
+ //
+ // 1) Definitions and declarations that are local, inherited, or made available by a
+ // package clause in the same compilation unit where the definition occurs have
+ // highest precedence.
+ // 2) Explicit imports have next highest precedence.
+ def depthOk(imp: ImportInfo) = (
+ imp.depth > symDepth
+ || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symDepth)
+ )
+ while (!reallyExists(impSym) && !imports.isEmpty && depthOk(imports.head)) {
impSym = imports.head.importedSymbol(name)
if (!impSym.exists) imports = imports.tail
}
@@ -5292,8 +5259,7 @@ trait Typers extends Modes with Adaptations with Tags {
def typedDocDef(docdef: DocDef) = {
if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
val comment = docdef.comment
- docComments(sym) = comment
- comment.defineVariables(sym)
+ fillDocComment(sym, comment)
val typer1 = newTyper(context.makeNewScope(tree, context.owner))
for (useCase <- comment.useCases) {
typer1.silent(_.typedUseCase(useCase)) match {
@@ -5333,7 +5299,7 @@ trait Typers extends Modes with Adaptations with Tags {
def typedUnApply(tree: UnApply) = {
val fun1 = typed(tree.fun)
- val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args.length), tree.args.length)
+ val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
val args1 = map2(tree.args, tpes)(typedPattern)
treeCopy.UnApply(tree, fun1, args1) setType pt
}
@@ -5741,7 +5707,10 @@ trait Typers extends Modes with Adaptations with Tags {
// as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
// but arbitrary conversions (in adapt) are disabled
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
- typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt)))
+ typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))) match {
+ case tpt if tpt.isType => PatternMustBeValue(tpt, pt); tpt
+ case pat => pat
+ }
}
/** Types a (fully parameterized) type tree */
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index b51dc0ccd5..31c5a61a8c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -34,12 +34,13 @@ trait Unapplies extends ast.TreeDSL
/** returns type list for return type of the extraction
* @see extractorFormalTypes
*/
- def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, nbSubPats: Int) = {
+ def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
assert(ufn.isMethod, ufn)
+ val nbSubPats = args.length
//Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
ufn.name match {
case nme.unapply | nme.unapplySeq =>
- val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn)
+ val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
else formals
case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
index b103ae9cb0..2601798b96 100644
--- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
+++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
@@ -10,7 +10,7 @@ import java.io.PrintStream
* @param enabled: A condition that must be true for trace info to be produced.
*/
class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
- def apply[T](msg: String)(value: T): T = {
+ def apply[T](msg: => String)(value: T): T = {
if (enabled) out.println(msg+value)
value
}
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
index 3cdbcc5110..d2e9238e8f 100644
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ b/src/compiler/scala/tools/nsc/util/TreeSet.scala
@@ -40,12 +40,22 @@ class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
tree = add(tree)
}
- def iterator = {
- def elems(t: Tree): Iterator[T] = {
- if (t eq null) Iterator.empty
- else elems(t.l) ++ (Iterator single t.elem) ++ elems(t.r)
+ def iterator = toList.iterator
+
+ override def foreach[U](f: T => U) {
+ def loop(t: Tree) {
+ if (t ne null) {
+ loop(t.l)
+ f(t.elem)
+ loop(t.r)
+ }
}
- elems(tree)
+ loop(tree)
+ }
+ override def toList = {
+ val xs = scala.collection.mutable.ListBuffer[T]()
+ foreach(xs += _)
+ xs.toList
}
override def toString(): String = {
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index 86cd845c54..f4f385f8b3 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -4,6 +4,7 @@ import scala.reflect.macros.{ReificationException, UnexpectedReificationExceptio
import scala.reflect.macros.runtime.Context
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Stack
+import scala.reflect.internal.util.OffsetPosition
abstract class MacroImplementations {
val c: Context
@@ -26,16 +27,12 @@ abstract class MacroImplementations {
c.abort(args(parts.length-1).pos,
"too many arguments for interpolated string")
}
- val stringParts = parts map {
- case Literal(Constant(s: String)) => s;
- case _ => throw new IllegalArgumentException("argument parts must be a list of string literals")
- }
- val pi = stringParts.iterator
+ val pi = parts.iterator
val bldr = new java.lang.StringBuilder
val evals = ListBuffer[ValDef]()
val ids = ListBuffer[Ident]()
- val argsStack = Stack(args : _*)
+ val argStack = Stack(args : _*)
def defval(value: Tree, tpe: Type): Unit = {
val freshName = newTermName(c.fresh("arg$"))
@@ -82,49 +79,73 @@ abstract class MacroImplementations {
}
def copyString(first: Boolean): Unit = {
- val str = StringContext.treatEscapes(pi.next())
+ val strTree = pi.next()
+ val rawStr = strTree match {
+ case Literal(Constant(str: String)) => str
+ case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
+ }
+ val str = StringContext.treatEscapes(rawStr)
val strLen = str.length
val strIsEmpty = strLen == 0
- var start = 0
+ def charAtIndexIs(idx: Int, ch: Char) = idx < strLen && str(idx) == ch
+ def isPercent(idx: Int) = charAtIndexIs(idx, '%')
+ def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%')
var idx = 0
+ def errorAtIndex(idx: Int, msg: String) = c.error(new OffsetPosition(strTree.pos.source, strTree.pos.point + idx), msg)
+ def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
+ def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
+ def nonEscapedPercent(idx: Int) = errorAtIndex(idx, "percent signs not directly following splicees must be escaped")
+
+ // STEP 1: handle argument conversion
+ // 1) "...${smth}" => okay, equivalent to "...${smth}%s"
+ // 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
+ // 3) "...${smth}%" => error
+ // 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
+ // 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
+ // 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
+ // 7) "...${smth}[%illegalJavaConversion]" => error
if (!first) {
- val arg = argsStack.pop
- if (strIsEmpty || (str charAt 0) != '%') {
- bldr append "%s"
- defval(arg, AnyTpe)
- } else {
+ val arg = argStack.pop
+ if (isConversion(0)) {
// PRE str is not empty and str(0) == '%'
// argument index parameter is not allowed, thus parse
// [flags][width][.precision]conversion
var pos = 1
- while(pos < strLen && isFlag(str charAt pos)) pos += 1
- while(pos < strLen && Character.isDigit(str charAt pos)) pos += 1
- if(pos < strLen && str.charAt(pos) == '.') { pos += 1
- while(pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ while (pos < strLen && isFlag(str charAt pos)) pos += 1
+ while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ if (pos < strLen && str.charAt(pos) == '.') {
+ pos += 1
+ while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
}
- if(pos < strLen) {
+ if (pos < strLen) {
conversionType(str charAt pos, arg) match {
case Some(tpe) => defval(arg, tpe)
- case None => c.error(arg.pos, "illegal conversion character")
+ case None => illegalConversionCharacter(pos)
}
} else {
- // TODO: place error message on conversion string
- c.error(arg.pos, "wrong conversion string")
+ wrongConversionString(pos - 1)
}
+ idx = 1
+ } else {
+ bldr append "%s"
+ defval(arg, AnyTpe)
}
- idx = 1
}
+
+ // STEP 2: handle the rest of the text
+ // 1) %n tokens are left as is
+ // 2) %% tokens are left as is
+ // 3) other usages of percents are reported as errors
if (!strIsEmpty) {
- val len = str.length
- while (idx < len) {
- if (str(idx) == '%') {
- bldr append (str substring (start, idx)) append "%%"
- start = idx + 1
+ while (idx < strLen) {
+ if (isPercent(idx)) {
+ if (isConversion(idx)) nonEscapedPercent(idx)
+ else idx += 1 // skip n and % in %n and %%
}
idx += 1
}
- bldr append (str substring (start, idx))
+ bldr append (str take idx)
}
}
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 670540187e..75817350e5 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -156,8 +156,12 @@ extends AbstractMap[A, B]
* @return the value associated with the given key.
*/
override def apply(k: A): B1 = apply0(this, k)
-
- @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (k == cur.key) cur.value else apply0(cur.tail, k)
+
+
+ @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
+ if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
+ else if (k == cur.key) cur.value
+ else apply0(cur.tail, k)
/** Checks if this map maps `key` to a value and return the
* value if it exists.
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 0877bfbb69..f1cfd2d69a 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -107,7 +107,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* the identity of the buffer. It takes time linear in
* the buffer size.
*
- * @param elem the element to append.
+ * @param elem the element to prepend.
* @return the updated buffer.
*/
def +=:(elem: A): this.type = {
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index bb938a7aeb..25ba7e4ce6 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -76,18 +76,21 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
* @return An array obtained by replacing elements of this arrays with rows the represent.
*/
def transpose[U](implicit asArray: T => Array[U]): Array[Array[U]] = {
- def mkRowBuilder() = Array.newBuilder(ClassTag[U](arrayElementClass(elementClass)))
- val bs = asArray(head) map (_ => mkRowBuilder())
- for (xs <- this) {
- var i = 0
- for (x <- asArray(xs)) {
- bs(i) += x
- i += 1
+ val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass))
+ if (isEmpty) bb.result()
+ else {
+ def mkRowBuilder() = Array.newBuilder(ClassTag[U](arrayElementClass(elementClass)))
+ val bs = asArray(head) map (_ => mkRowBuilder())
+ for (xs <- this) {
+ var i = 0
+ for (x <- asArray(xs)) {
+ bs(i) += x
+ i += 1
+ }
}
+ for (b <- bs) bb += b.result()
+ bb.result()
}
- val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass))
- for (b <- bs) bb += b.result
- bb.result
}
def seq = thisCollection
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 83aa99ad11..988886b4ea 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -42,11 +42,8 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
private[parallel] def getTaskSupport: TaskSupport =
- if (scala.util.Properties.isJavaAtLeast("1.6")) {
- val vendor = scala.util.Properties.javaVmVendor
- if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport
- else new ThreadPoolTaskSupport
- } else new ThreadPoolTaskSupport
+ if (scala.util.Properties.isJavaAtLeast("1.6")) new ForkJoinTaskSupport
+ else new ThreadPoolTaskSupport
val defaultTaskSupport: TaskSupport = getTaskSupport
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 5a51e97072..6b6ad29074 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -299,8 +299,8 @@ trait Future[+T] extends Awaitable[T] {
* val f = future { 5 }
* val g = f filter { _ % 2 == 1 }
* val h = f filter { _ % 2 == 0 }
- * await(g, 0) // evaluates to 5
- * await(h, 0) // throw a NoSuchElementException
+ * Await.result(g, Duration.Zero) // evaluates to 5
+ * Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
@@ -348,8 +348,8 @@ trait Future[+T] extends Awaitable[T] {
* val h = f collect {
* case x if x > 0 => x * 2
* }
- * await(g, 0) // evaluates to 5
- * await(h, 0) // throw a NoSuchElementException
+ * Await.result(g, Duration.Zero) // evaluates to 5
+ * Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = {
@@ -454,7 +454,7 @@ trait Future[+T] extends Awaitable[T] {
* val f = future { sys.error("failed") }
* val g = future { 5 }
* val h = f fallbackTo g
- * await(h, 0) // evaluates to 5
+ * Await.result(h, Duration.Zero) // evaluates to 5
* }}}
*/
def fallbackTo[U >: T](that: Future[U]): Future[U] = {
@@ -634,7 +634,7 @@ object Future {
*
* Example:
* {{{
- * val result = Await.result(Futures.reduce(futures)(_ + _), 5 seconds)
+ * val result = Await.result(Future.reduce(futures)(_ + _), 5 seconds)
* }}}
*/
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 441bf5aa4d..fdba0ec716 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -253,7 +253,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
*/
def gcd (that: BigInt): BigInt = new BigInt(this.bigInteger.gcd(that.bigInteger))
- /** Returns a BigInt whose value is (this mod m).
+ /** Returns a BigInt whose value is (this mod that).
* This method differs from `%` in that it always returns a non-negative BigInt.
*/
def mod (that: BigInt): BigInt = new BigInt(this.bigInteger.mod(that.bigInteger))
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index e9b92541c2..11b12050c9 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -26,7 +26,7 @@ import scala.language.{implicitConversions, higherKinds}
* val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3))
*
* // sort by 2nd element
- * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)
+ * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2))
*
* // sort by the 3rd element, then 1st
* Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1)))
diff --git a/src/library/scala/runtime/BoxedUnit.java b/src/library/scala/runtime/BoxedUnit.java
index 035c05d12a..f436b7c209 100644
--- a/src/library/scala/runtime/BoxedUnit.java
+++ b/src/library/scala/runtime/BoxedUnit.java
@@ -17,6 +17,8 @@ public final class BoxedUnit implements java.io.Serializable {
public final static BoxedUnit UNIT = new BoxedUnit();
public final static Class<Void> TYPE = java.lang.Void.TYPE;
+
+ private Object readResolve() { return UNIT; }
private BoxedUnit() { }
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 9429e9caa7..030a89773e 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -64,7 +64,7 @@ object Utility extends AnyRef with parsing.TokenTests {
val key = md.key
val smaller = sort(md.filter { m => m.key < key })
val greater = sort(md.filter { m => m.key > key })
- smaller.copy(md.copy ( greater ))
+ smaller.foldRight (md copy greater) ((x, xs) => x copy xs)
}
/** Return the node with its attribute list sorted alphabetically
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index d73d99bc89..848deef8c5 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -19,7 +19,7 @@ abstract class CompilerTest extends DirectTest {
def check(source: String, unit: global.CompilationUnit): Unit
lazy val global: Global = newCompiler()
- lazy val units = compilationUnits(global)(sources: _ *)
+ lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
import global._
import definitions._
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index 483cb491a1..e2dac2fd55 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -7,7 +7,7 @@ package scala.tools.partest
import scala.tools.nsc._
import io.Directory
-import util.{BatchSourceFile, CommandLineParser}
+import util.{ SourceFile, BatchSourceFile, CommandLineParser }
import reporters.{Reporter, ConsoleReporter}
/** A class for testing code which is embedded as a string.
@@ -49,18 +49,32 @@ abstract class DirectTest extends App {
def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
- def newSources(sourceCodes: String*) = sourceCodes.toList.zipWithIndex map {
- case (src, idx) => new BatchSourceFile("newSource" + (idx + 1), src)
- }
+ private def newSourcesWithExtension(ext: String)(codes: String*): List[BatchSourceFile] =
+ codes.toList.zipWithIndex map {
+ case (src, idx) => new BatchSourceFile(s"newSource${idx + 1}.$ext", src)
+ }
+
+ def newJavaSources(codes: String*) = newSourcesWithExtension("java")(codes: _*)
+ def newSources(codes: String*) = newSourcesWithExtension("scala")(codes: _*)
+
def compileString(global: Global)(sourceCode: String): Boolean = {
withRun(global)(_ compileSources newSources(sourceCode))
!global.reporter.hasErrors
}
- def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
- val units = withRun(global) { run =>
- run compileSources newSources(sourceCodes: _*)
+
+ def javaCompilationUnits(global: Global)(sourceCodes: String*) = {
+ sourceFilesToCompiledUnits(global)(newJavaSources(sourceCodes: _*))
+ }
+
+ def sourceFilesToCompiledUnits(global: Global)(files: List[SourceFile]) = {
+ withRun(global) { run =>
+ run compileSources files
run.units.toList
}
+ }
+
+ def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
+ val units = sourceFilesToCompiledUnits(global)(newSources(sourceCodes: _*))
if (global.reporter.hasErrors) {
global.reporter.flush()
sys.error("Compilation failure.")
diff --git a/src/partest/scala/tools/partest/nest/Diff.java b/src/partest/scala/tools/partest/nest/Diff.java
deleted file mode 100644
index f69fc6858b..0000000000
--- a/src/partest/scala/tools/partest/nest/Diff.java
+++ /dev/null
@@ -1,873 +0,0 @@
-
-package scala.tools.partest.nest;
-
-import java.util.Hashtable;
-
-/** A class to compare IndexedSeqs of objects. The result of comparison
- is a list of <code>change</code> objects which form an
- edit script. The objects compared are traditionally lines
- of text from two files. Comparison options such as "ignore
- whitespace" are implemented by modifying the <code>equals</code>
- and <code>hashcode</code> methods for the objects compared.
-<p>
- The basic algorithm is described in: </br>
- "An O(ND) Difference Algorithm and its Variations", Eugene Myers,
- Algorithmica Vol. 1 No. 2, 1986, p 251.
-<p>
- This class outputs different results from GNU diff 1.15 on some
- inputs. Our results are actually better (smaller change list, smaller
- total size of changes), but it would be nice to know why. Perhaps
- there is a memory overwrite bug in GNU diff 1.15.
-
- @author Stuart D. Gathman, translated from GNU diff 1.15
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the <a href=COPYING.txt>
- GNU General Public License</a>
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-
- */
-
-public class Diff {
-
- /** Prepare to find differences between two arrays. Each element of
- the arrays is translated to an "equivalence number" based on
- the result of <code>equals</code>. The original Object arrays
- are no longer needed for computing the differences. They will
- be needed again later to print the results of the comparison as
- an edit script, if desired.
- */
- public Diff(Object[] a,Object[] b) {
- Hashtable<Object, Integer> h = new Hashtable<Object, Integer>(a.length + b.length);
- filevec[0] = new file_data(a,h);
- filevec[1] = new file_data(b,h);
- }
-
- /** 1 more than the maximum equivalence value used for this or its
- sibling file. */
- private int equiv_max = 1;
-
- /** When set to true, the comparison uses a heuristic to speed it up.
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
- public boolean heuristic = false;
-
- /** When set to true, the algorithm returns a guarranteed minimal
- set of changes. This makes things slower, sometimes much slower. */
- public boolean no_discards = false;
-
- private int[] xvec, yvec; /* IndexedSeqs being compared. */
- private int[] fdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the forward
- search of the edit matrix. */
- private int[] bdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the backward
- search of the edit matrix. */
- private int fdiagoff, bdiagoff;
- private final file_data[] filevec = new file_data[2];
- private int cost;
-
- /** Find the midpoint of the shortest edit script for a specified
- portion of the two files.
-
- We scan from the beginnings of the files, and simultaneously from the ends,
- doing a breadth-first search through the space of edit-sequence.
- When the two searches meet, we have found the midpoint of the shortest
- edit sequence.
-
- The value returned is the number of the diagonal on which the midpoint lies.
- The diagonal number equals the number of inserted lines minus the number
- of deleted lines (counting only lines before the midpoint).
- The edit cost is stored into COST; this is the total number of
- lines inserted or deleted (counting only lines before the midpoint).
-
- This function assumes that the first lines of the specified portions
- of the two files do not match, and likewise that the last lines do not
- match. The caller must trim matching lines from the beginning and end
- of the portions it is going to specify.
-
- Note that if we return the "wrong" diagonal value, or if
- the value of bdiag at that diagonal is "wrong",
- the worst this can do is cause suboptimal diff output.
- It cannot cause incorrect diff output. */
-
- private int diag (int xoff, int xlim, int yoff, int ylim) {
- final int[] fd = fdiag; // Give the compiler a chance.
- final int[] bd = bdiag; // Additional help for the compiler.
- final int[] xv = xvec; // Still more help for the compiler.
- final int[] yv = yvec; // And more and more . . .
- final int dmin = xoff - ylim; // Minimum valid diagonal.
- final int dmax = xlim - yoff; // Maximum valid diagonal.
- final int fmid = xoff - yoff; // Center diagonal of top-down search.
- final int bmid = xlim - ylim; // Center diagonal of bottom-up search.
- int fmin = fmid, fmax = fmid; // Limits of top-down search.
- int bmin = bmid, bmax = bmid; // Limits of bottom-up search.
- /* True if southeast corner is on an odd
- diagonal with respect to the northwest. */
- final boolean odd = (fmid - bmid & 1) != 0;
-
- fd[fdiagoff + fmid] = xoff;
- bd[bdiagoff + bmid] = xlim;
-
- for (int c = 1;; ++c)
- {
- int d; /* Active diagonal. */
- boolean big_snake = false;
-
- /* Extend the top-down search by an edit step in each diagonal. */
- if (fmin > dmin)
- fd[fdiagoff + --fmin - 1] = -1;
- else
- ++fmin;
- if (fmax < dmax)
- fd[fdiagoff + ++fmax + 1] = -1;
- else
- --fmax;
- for (d = fmax; d >= fmin; d -= 2)
- {
- int x, y, oldx, tlo = fd[fdiagoff + d - 1], thi = fd[fdiagoff + d + 1];
-
- if (tlo >= thi)
- x = tlo + 1;
- else
- x = thi;
- oldx = x;
- y = x - d;
- while (x < xlim && y < ylim && xv[x] == yv[y]) {
- ++x; ++y;
- }
- if (x - oldx > 20)
- big_snake = true;
- fd[fdiagoff + d] = x;
- if (odd && bmin <= d && d <= bmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c - 1;
- return d;
- }
- }
-
- /* Similar extend the bottom-up search. */
- if (bmin > dmin)
- bd[bdiagoff + --bmin - 1] = Integer.MAX_VALUE;
- else
- ++bmin;
- if (bmax < dmax)
- bd[bdiagoff + ++bmax + 1] = Integer.MAX_VALUE;
- else
- --bmax;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int x, y, oldx, tlo = bd[bdiagoff + d - 1], thi = bd[bdiagoff + d + 1];
-
- if (tlo < thi)
- x = tlo;
- else
- x = thi - 1;
- oldx = x;
- y = x - d;
- while (x > xoff && y > yoff && xv[x - 1] == yv[y - 1]) {
- --x; --y;
- }
- if (oldx - x > 20)
- big_snake = true;
- bd[bdiagoff + d] = x;
- if (!odd && fmin <= d && d <= fmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c;
- return d;
- }
- }
-
- /* Heuristic: check occasionally for a diagonal that has made
- lots of progress compared with the edit distance.
- If we have any such, find the one that has made the most
- progress and return it as if it had succeeded.
-
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
-
- if (c > 200 && big_snake && heuristic)
- {
- int best = 0;
- int bestpos = -1;
-
- for (d = fmax; d >= fmin; d -= 2)
- {
- int dd = d - fmid;
- if ((fd[fdiagoff + d] - xoff)*2 - dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if (fd[fdiagoff + d] * 2 - dd > best
- && fd[fdiagoff + d] - xoff > 20
- && fd[fdiagoff + d] - d - yoff > 20)
- {
- int k;
- int x = fd[fdiagoff + d];
-
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- for (k = 1; k <= 20; k++)
- if (xvec[x - k] != yvec[x - d - k])
- break;
-
- if (k == 21)
- {
- best = fd[fdiagoff + d] * 2 - dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
-
- best = 0;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int dd = d - bmid;
- if ((xlim - bd[bdiagoff + d])*2 + dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if ((xlim - bd[bdiagoff + d]) * 2 + dd > best
- && xlim - bd[bdiagoff + d] > 20
- && ylim - (bd[bdiagoff + d] - d) > 20)
- {
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- int k;
- int x = bd[bdiagoff + d];
-
- for (k = 0; k < 20; k++)
- if (xvec[x + k] != yvec[x - d + k])
- break;
- if (k == 20)
- {
- best = (xlim - bd[bdiagoff + d]) * 2 + dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
- }
- }
- }
-
- /** Compare in detail contiguous subsequences of the two files
- which are known, as a whole, to match each other.
-
- The results are recorded in the IndexedSeqs filevec[N].changed_flag, by
- storing a 1 in the element for each line that is an insertion or deletion.
-
- The subsequence of file 0 is [XOFF, XLIM) and likewise for file 1.
-
- Note that XLIM, YLIM are exclusive bounds.
- All line numbers are origin-0 and discarded lines are not counted. */
-
- private void compareseq (int xoff, int xlim, int yoff, int ylim) {
- /* Slide down the bottom initial diagonal. */
- while (xoff < xlim && yoff < ylim && xvec[xoff] == yvec[yoff]) {
- ++xoff; ++yoff;
- }
- /* Slide up the top initial diagonal. */
- while (xlim > xoff && ylim > yoff && xvec[xlim - 1] == yvec[ylim - 1]) {
- --xlim; --ylim;
- }
-
- /* Handle simple cases. */
- if (xoff == xlim)
- while (yoff < ylim)
- filevec[1].changed_flag[1+filevec[1].realindexes[yoff++]] = true;
- else if (yoff == ylim)
- while (xoff < xlim)
- filevec[0].changed_flag[1+filevec[0].realindexes[xoff++]] = true;
- else
- {
- /* Find a point of correspondence in the middle of the files. */
-
- int d = diag (xoff, xlim, yoff, ylim);
- int c = cost;
- int f = fdiag[fdiagoff + d];
- int b = bdiag[bdiagoff + d];
-
- if (c == 1)
- {
- /* This should be impossible, because it implies that
- one of the two subsequences is empty,
- and that case was handled above without calling `diag'.
- Let's verify that this is true. */
- throw new IllegalArgumentException("Empty subsequence");
- }
- else
- {
- /* Use that point to split this problem into two subproblems. */
- compareseq (xoff, b, yoff, b - d);
- /* This used to use f instead of b,
- but that is incorrect!
- It is not necessarily the case that diagonal d
- has a snake from b to f. */
- compareseq (b, xlim, b - d, ylim);
- }
- }
- }
-
- /** Discard lines from one file that have no matches in the other file.
- */
-
- private void discard_confusing_lines() {
- filevec[0].discard_confusing_lines(filevec[1]);
- filevec[1].discard_confusing_lines(filevec[0]);
- }
-
- private boolean inhibit = false;
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
- */
-
- private void shift_boundaries() {
- if (inhibit)
- return;
- filevec[0].shift_boundaries(filevec[1]);
- filevec[1].shift_boundaries(filevec[0]);
- }
-
- public interface ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script.
- @param changed0 true for lines in first file which do not match 2nd
- @param len0 number of lines in first file
- @param changed1 true for lines in 2nd file which do not match 1st
- @param len1 number of lines in 2nd file
- @return a linked list of changes - or null
- */
- public change build_script(
- boolean[] changed0,int len0,
- boolean[] changed1,int len1
- );
- }
-
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in reverse order. */
-
- static class ReverseScript implements ScriptBuilder {
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = 0, i1 = 0;
- while (i0 < len0 || i1 < len1) {
- if (changed0[1+i0] || changed1[1+i1]) {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[1+i0]) ++i0;
- while (changed1[1+i1]) ++i1;
-
- /* Record this change. */
- script = new change(line0, line1, i0 - line0, i1 - line1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0++; i1++;
- }
-
- return script;
- }
- }
-
- static class ForwardScript implements ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in forward order. */
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = len0, i1 = len1;
-
- while (i0 >= 0 || i1 >= 0)
- {
- if (changed0[i0] || changed1[i1])
- {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[i0]) --i0;
- while (changed1[i1]) --i1;
-
- /* Record this change. */
- script = new change(i0, i1, line0 - i0, line1 - i1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0--; i1--;
- }
-
- return script;
- }
- }
-
- /** Standard ScriptBuilders. */
- public final static ScriptBuilder
- forwardScript = new ForwardScript(),
- reverseScript = new ReverseScript();
-
- /* Report the differences of two files. DEPTH is the current directory
- depth. */
- public final change diff_2(final boolean reverse) {
- return diff(reverse ? reverseScript : forwardScript);
- }
-
- /** Get the results of comparison as an edit script. The script
- is described by a list of changes. The standard ScriptBuilder
- implementations provide for forward and reverse edit scripts.
- Alternate implementations could, for instance, list common elements
- instead of differences.
- @param bld an object to build the script from change flags
- @return the head of a list of changes
- */
- public change diff(final ScriptBuilder bld) {
-
- /* Some lines are obviously insertions or deletions
- because they don't match anything. Detect them now,
- and avoid even thinking about them in the main comparison algorithm. */
-
- discard_confusing_lines ();
-
- /* Now do the main comparison algorithm, considering just the
- undiscarded lines. */
-
- xvec = filevec[0].undiscarded;
- yvec = filevec[1].undiscarded;
-
- int diags =
- filevec[0].nondiscarded_lines + filevec[1].nondiscarded_lines + 3;
- fdiag = new int[diags];
- fdiagoff = filevec[1].nondiscarded_lines + 1;
- bdiag = new int[diags];
- bdiagoff = filevec[1].nondiscarded_lines + 1;
-
- compareseq (0, filevec[0].nondiscarded_lines,
- 0, filevec[1].nondiscarded_lines);
- fdiag = null;
- bdiag = null;
-
- /* Modify the results slightly to make them prettier
- in cases where that can validly be done. */
-
- shift_boundaries ();
-
- /* Get the results of comparison in the form of a chain
- of `struct change's -- an edit script. */
- return bld.build_script(
- filevec[0].changed_flag,
- filevec[0].buffered_lines,
- filevec[1].changed_flag,
- filevec[1].buffered_lines
- );
-
- }
-
- /** The result of comparison is an "edit script": a chain of change objects.
- Each change represents one place where some lines are deleted
- and some are inserted.
-
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
-
- public static class change {
- /** Previous or next edit command. */
- public change link;
- /** # lines of file 1 changed here. */
- public final int inserted;
- /** # lines of file 0 changed here. */
- public final int deleted;
- /** Line number of 1st deleted line. */
- public final int line0;
- /** Line number of 1st inserted line. */
- public final int line1;
-
- /** Cons an additional entry onto the front of an edit script OLD.
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
- public change(int line0, int line1, int deleted, int inserted, change old) {
- this.line0 = line0;
- this.line1 = line1;
- this.inserted = inserted;
- this.deleted = deleted;
- this.link = old;
- //System.err.println(line0+","+line1+","+inserted+","+deleted);
- }
- }
-
- /** Data on one input file being compared.
- */
-
- class file_data {
-
- /** Allocate changed array for the results of comparison. */
- void clear() {
- /* Allocate a flag for each line of each file, saying whether that line
- is an insertion or deletion.
- Allocate an extra element, always zero, at each end of each IndexedSeq.
- */
- changed_flag = new boolean[buffered_lines + 2];
- }
-
- /** Return equiv_count[I] as the number of lines in this file
- that fall in equivalence class I.
- @return the array of equivalence class counts.
- */
- int[] equivCount() {
- int[] equiv_count = new int[equiv_max];
- for (int i = 0; i < buffered_lines; ++i)
- ++equiv_count[equivs[i]];
- return equiv_count;
- }
-
- /** Discard lines that have no matches in another file.
-
- A line which is discarded will not be considered by the actual
- comparison algorithm; it will be as if that line were not in the file.
- The file's `realindexes' table maps virtual line numbers
- (which don't count the discarded lines) into real line numbers;
- this is how the actual comparison algorithm produces results
- that are comprehensible when the discarded lines are counted.
-<p>
- When we discard a line, we also mark it as a deletion or insertion
- so that it will be printed in the output.
- @param f the other file
- */
- void discard_confusing_lines(file_data f) {
- clear();
- /* Set up table of which lines are going to be discarded. */
- final byte[] discarded = discardable(f.equivCount());
-
- /* Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
- filterDiscards(discarded);
-
- /* Actually discard the lines. */
- discard(discarded);
- }
-
- /** Mark to be discarded each line that matches no line of another file.
- If a line matches many lines, mark it as provisionally discardable.
- @see equivCount()
- @param counts The count of each equivalence number for the other file.
- @return 0=nondiscardable, 1=discardable or 2=provisionally discardable
- for each line
- */
-
- private byte[] discardable(final int[] counts) {
- final int end = buffered_lines;
- final byte[] discards = new byte[end];
- final int[] equivs = this.equivs;
- int many = 5;
- int tem = end / 64;
-
- /* Multiply MANY by approximate square root of number of lines.
- That is the threshold for provisionally discardable lines. */
- while ((tem = tem >> 2) > 0)
- many *= 2;
-
- for (int i = 0; i < end; i++)
- {
- int nmatch;
- if (equivs[i] == 0)
- continue;
- nmatch = counts[equivs[i]];
- if (nmatch == 0)
- discards[i] = 1;
- else if (nmatch > many)
- discards[i] = 2;
- }
- return discards;
- }
-
- /** Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
-
- private void filterDiscards(final byte[] discards) {
- final int end = buffered_lines;
-
- for (int i = 0; i < end; i++)
- {
- /* Cancel provisional discards not in middle of run of discards. */
- if (discards[i] == 2)
- discards[i] = 0;
- else if (discards[i] != 0)
- {
- /* We have found a nonprovisional discard. */
- int j;
- int length;
- int provisional = 0;
-
- /* Find end of this run of discardable lines.
- Count how many are provisionally discardable. */
- for (j = i; j < end; j++)
- {
- if (discards[j] == 0)
- break;
- if (discards[j] == 2)
- ++provisional;
- }
-
- /* Cancel provisional discards at end, and shrink the run. */
- while (j > i && discards[j - 1] == 2) {
- discards[--j] = 0; --provisional;
- }
-
- /* Now we have the length of a run of discardable lines
- whose first and last are not provisional. */
- length = j - i;
-
- /* If 1/4 of the lines in the run are provisional,
- cancel discarding of all provisional lines in the run. */
- if (provisional * 4 > length)
- {
- while (j > i)
- if (discards[--j] == 2)
- discards[j] = 0;
- }
- else
- {
- int consec;
- int minimum = 1;
- int tem = length / 4;
-
- /* MINIMUM is approximate square root of LENGTH/4.
- A subrun of two or more provisionals can stand
- when LENGTH is at least 16.
- A subrun of 4 or more can stand when LENGTH >= 64. */
- while ((tem = tem >> 2) > 0)
- minimum *= 2;
- minimum++;
-
- /* Cancel any subrun of MINIMUM or more provisionals
- within the larger run. */
- for (j = 0, consec = 0; j < length; j++)
- if (discards[i + j] != 2)
- consec = 0;
- else if (minimum == ++consec)
- /* Back up to start of subrun, to cancel it all. */
- j -= consec;
- else if (minimum < consec)
- discards[i + j] = 0;
-
- /* Scan from beginning of run
- until we find 3 or more nonprovisionals in a row
- or until the first nonprovisional at least 8 lines in.
- Until that point, cancel any provisionals. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i + j] == 1)
- break;
- if (discards[i + j] == 2) {
- consec = 0; discards[i + j] = 0;
- }
- else if (discards[i + j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
-
- /* I advances to the last line of the run. */
- i += length - 1;
-
- /* Same thing, from end. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i - j] == 1)
- break;
- if (discards[i - j] == 2) {
- consec = 0; discards[i - j] = 0;
- }
- else if (discards[i - j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
- }
- }
- }
- }
-
- /** Actually discard the lines.
- @param discards flags lines to be discarded
- */
- private void discard(final byte[] discards) {
- final int end = buffered_lines;
- int j = 0;
- for (int i = 0; i < end; ++i)
- if (no_discards || discards[i] == 0)
- {
- undiscarded[j] = equivs[i];
- realindexes[j++] = i;
- }
- else
- changed_flag[1+i] = true;
- nondiscarded_lines = j;
- }
-
- file_data(Object[] data, Hashtable<Object, Integer> h) {
- buffered_lines = data.length;
-
- equivs = new int[buffered_lines];
- undiscarded = new int[buffered_lines];
- realindexes = new int[buffered_lines];
-
- for (int i = 0; i < data.length; ++i) {
- Integer ir = h.get(data[i]);
- if (ir == null)
- h.put(data[i], new Integer(equivs[i] = equiv_max++));
- else
- equivs[i] = ir.intValue();
- }
- }
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
-
- We do something when a run of changed lines include a blank
- line at one end and have an excluded blank line at the other.
- We are free to choose which blank line is included.
- `compareseq' always chooses the one at the beginning,
- but usually it is cleaner to consider the following blank line
- to be the "change". The only exception is if the preceding blank line
- would join this change to other changes.
- @param f the file being compared against
- */
-
- void shift_boundaries(file_data f) {
- final boolean[] changed = changed_flag;
- final boolean[] other_changed = f.changed_flag;
- int i = 0;
- int j = 0;
- int i_end = buffered_lines;
- int preceding = -1;
- int other_preceding = -1;
-
- for (;;)
- {
- int start, end, other_start;
-
- /* Scan forwards to find beginning of another run of changes.
- Also keep track of the corresponding point in the other file. */
-
- while (i < i_end && !changed[1+i])
- {
- while (other_changed[1+j++])
- /* Non-corresponding lines in the other file
- will count as the preceding batch of changes. */
- other_preceding = j;
- i++;
- }
-
- if (i == i_end)
- break;
-
- start = i;
- other_start = j;
-
- for (;;)
- {
- /* Now find the end of this run of changes. */
-
- while (i < i_end && changed[1+i]) i++;
- end = i;
-
- /* If the first changed line matches the following unchanged one,
- and this run does not follow right after a previous run,
- and there are no lines deleted from the other file here,
- then classify the first changed line as unchanged
- and the following line as changed in its place. */
-
- /* You might ask, how could this run follow right after another?
- Only because the previous run was shifted here. */
-
- if (end != i_end
- && equivs[start] == equivs[end]
- && !other_changed[1+j]
- && end != i_end
- && !((preceding >= 0 && start == preceding)
- || (other_preceding >= 0
- && other_start == other_preceding)))
- {
- changed[1+end++] = true;
- changed[1+start++] = false;
- ++i;
- /* Since one line-that-matches is now before this run
- instead of after, we must advance in the other file
- to keep in synch. */
- ++j;
- }
- else
- break;
- }
-
- preceding = i;
- other_preceding = j;
- }
- }
-
- /** Number of elements (lines) in this file. */
- final int buffered_lines;
-
- /** IndexedSeq, indexed by line number, containing an equivalence code for
- each line. It is this IndexedSeq that is actually compared with that
- of another file to generate differences. */
- private final int[] equivs;
-
- /** IndexedSeq, like the previous one except that
- the elements for discarded lines have been squeezed out. */
- final int[] undiscarded;
-
- /** IndexedSeq mapping virtual line numbers (not counting discarded lines)
- to real ones (counting those lines). Both are origin-0. */
- final int[] realindexes;
-
- /** Total number of nondiscarded lines. */
- int nondiscarded_lines;
-
- /** Array, indexed by real origin-1 line number,
- containing true for a line that is an insertion or a deletion.
- The results of comparison are stored here. */
- boolean[] changed_flag;
-
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/DiffPrint.java b/src/partest/scala/tools/partest/nest/DiffPrint.java
deleted file mode 100644
index 31f9a1bc79..0000000000
--- a/src/partest/scala/tools/partest/nest/DiffPrint.java
+++ /dev/null
@@ -1,606 +0,0 @@
-
-package scala.tools.partest.nest;
-
-import java.io.*;
-import java.util.Vector;
-import java.util.Date;
-//import com.objectspace.jgl.predicates.UnaryPredicate;
-
-interface UnaryPredicate {
- boolean execute(Object obj);
-}
-
-/** A simple framework for printing change lists produced by <code>Diff</code>.
- @see bmsi.util.Diff
- @author Stuart D. Gathman
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
- */
-public class DiffPrint {
- /** A Base class for printing edit scripts produced by Diff.
- This class divides the change list into "hunks", and calls
- <code>print_hunk</code> for each hunk. Various utility methods
- are provided as well.
- */
- public static abstract class Base {
- protected Base(Object[] a,Object[] b, Writer w) {
- outfile = new PrintWriter(w);
- file0 = a;
- file1 = b;
- }
- /** Set to ignore certain kinds of lines when printing
- an edit script. For example, ignoring blank lines or comments.
- */
- protected UnaryPredicate ignore = null;
-
- /** Set to the lines of the files being compared.
- */
- protected Object[] file0, file1;
-
- /** Divide SCRIPT into pieces by calling HUNKFUN and
- print each piece with PRINTFUN.
- Both functions take one arg, an edit script.
-
- PRINTFUN takes a subscript which belongs together (with a null
- link at the end) and prints it. */
- public void print_script(Diff.change script) {
- Diff.change next = script;
-
- while (next != null)
- {
- Diff.change t, end;
-
- /* Find a set of changes that belong together. */
- t = next;
- end = hunkfun(next);
-
- /* Disconnect them from the rest of the changes,
- making them a hunk, and remember the rest for next iteration. */
- next = end.link;
- end.link = null;
- //if (DEBUG)
- // debug_script(t);
-
- /* Print this hunk. */
- print_hunk(t);
-
- /* Reconnect the script so it will all be freed properly. */
- end.link = next;
- }
- outfile.flush();
- }
-
- /** Called with the tail of the script
- and returns the last link that belongs together with the start
- of the tail. */
-
- protected Diff.change hunkfun(Diff.change hunk) {
- return hunk;
- }
-
- protected int first0, last0, first1, last1, deletes, inserts;
- protected PrintWriter outfile;
-
- /** Look at a hunk of edit script and report the range of lines in each file
- that it applies to. HUNK is the start of the hunk, which is a chain
- of `struct change'. The first and last line numbers of file 0 are stored
- in *FIRST0 and *LAST0, and likewise for file 1 in *FIRST1 and *LAST1.
- Note that these are internal line numbers that count from 0.
-
- If no lines from file 0 are deleted, then FIRST0 is LAST0+1.
-
- Also set *DELETES nonzero if any lines of file 0 are deleted
- and set *INSERTS nonzero if any lines of file 1 are inserted.
- If only ignorable lines are inserted or deleted, both are
- set to 0. */
-
- protected void analyze_hunk(Diff.change hunk) {
- int f0, l0 = 0, f1, l1 = 0, show_from = 0, show_to = 0;
- int i;
- Diff.change next;
- boolean nontrivial = (ignore == null);
-
- show_from = show_to = 0;
-
- f0 = hunk.line0;
- f1 = hunk.line1;
-
- for (next = hunk; next != null; next = next.link)
- {
- l0 = next.line0 + next.deleted - 1;
- l1 = next.line1 + next.inserted - 1;
- show_from += next.deleted;
- show_to += next.inserted;
- for (i = next.line0; i <= l0 && ! nontrivial; i++)
- if (!ignore.execute(file0[i]))
- nontrivial = true;
- for (i = next.line1; i <= l1 && ! nontrivial; i++)
- if (!ignore.execute(file1[i]))
- nontrivial = true;
- }
-
- first0 = f0;
- last0 = l0;
- first1 = f1;
- last1 = l1;
-
- /* If all inserted or deleted lines are ignorable,
- tell the caller to ignore this hunk. */
-
- if (!nontrivial)
- show_from = show_to = 0;
-
- deletes = show_from;
- inserts = show_to;
- }
-
- /** Print the script header which identifies the files compared. */
- protected void print_header(String filea, String fileb) { }
-
- protected abstract void print_hunk(Diff.change hunk);
-
- protected void print_1_line(String pre,Object linbuf) {
- outfile.println(pre + linbuf.toString());
- }
-
- /** Print a pair of line numbers with SEPCHAR, translated for file FILE.
- If the two numbers are identical, print just one number.
-
- Args A and B are internal line numbers.
- We print the translated (real) line numbers. */
-
- protected void print_number_range (char sepchar, int a, int b) {
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (++b > ++a)
- outfile.print("" + a + sepchar + b);
- else
- outfile.print(b);
- }
-
- public static char change_letter(int inserts, int deletes) {
- if (inserts == 0)
- return 'd';
- else if (deletes == 0)
- return 'a';
- else
- return 'c';
- }
- }
-
- /** Print a change list in the standard diff format.
- */
- public static class NormalPrint extends Base {
-
- public NormalPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of a normal diff.
- This is a contiguous portion of a complete edit script,
- describing changes in consecutive lines. */
-
- protected void print_hunk (Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk(hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.print(change_letter(inserts, deletes));
- print_number_range (',', first1, last1);
- outfile.println();
-
- /* Print the lines that the first file has. */
- if (deletes != 0)
- for (int i = first0; i <= last0; i++)
- print_1_line ("< ", file0[i]);
-
- if (inserts != 0 && deletes != 0)
- outfile.println("---");
-
- /* Print the lines that the second file has. */
- if (inserts != 0)
- for (int i = first1; i <= last1; i++)
- print_1_line ("> ", file1[i]);
- }
- }
-
- /** Prints an edit script in a format suitable for input to <code>ed</code>.
- The edit script must be generated with the reverse option to
- be useful as actual <code>ed</code> input.
- */
- public static class EdPrint extends Base {
-
- public EdPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of an ed diff */
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.println(change_letter(inserts, deletes));
-
- /* Print new/changed lines from second file, if needed */
- if (inserts != 0)
- {
- boolean inserting = true;
- for (int i = first1; i <= last1; i++)
- {
- /* Resume the insert, if we stopped. */
- if (! inserting)
- outfile.println(i - first1 + first0 + "a");
- inserting = true;
-
- /* If the file's line is just a dot, it would confuse `ed'.
- So output it with a double dot, and set the flag LEADING_DOT
- so that we will output another ed-command later
- to change the double dot into a single dot. */
-
- if (".".equals(file1[i]))
- {
- outfile.println("..");
- outfile.println(".");
- /* Now change that double dot to the desired single dot. */
- outfile.println(i - first1 + first0 + 1 + "s/^\\.\\././");
- inserting = false;
- }
- else
- /* Line is not `.', so output it unmodified. */
- print_1_line ("", file1[i]);
- }
-
- /* End insert mode, if we are still in it. */
- if (inserting)
- outfile.println(".");
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class ContextPrint extends Base {
-
- protected int context = 3;
-
- public ContextPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- protected void print_context_label (String mark, File inf, String label) {
- if (label != null)
- outfile.println(mark + ' ' + label);
- else if (inf.lastModified() > 0)
- // FIXME: use DateFormat to get precise format needed.
- outfile.println(
- mark + ' ' + inf.getPath() + '\t' + new Date(inf.lastModified())
- );
- else
- /* Don't pretend that standard input is ancient. */
- outfile.println(mark + ' ' + inf.getPath());
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("***", new File(filea), filea);
- print_context_label ("---", new File(fileb), fileb);
- }
-
- /** If function_regexp defined, search for start of function. */
- private String find_function(Object[] lines, int start) {
- return null;
- }
-
- protected void print_function(Object[] file,int start) {
- String function = find_function (file0, first0);
- if (function != null) {
- outfile.print(" ");
- outfile.print(
- (function.length() < 40) ? function : function.substring(0,40)
- );
- }
- }
-
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
-
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
- outfile.print("***************");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function (file0, first0);
-
- outfile.println();
- outfile.print("*** ");
- print_number_range (',', first0, last0);
- outfile.println(" ****");
-
- if (deletes != 0) {
- Diff.change next = hunk;
-
- for (int i = first0; i <= last0; i++) {
- /* Skip past changes that apply (in file 0)
- only to lines before line I. */
-
- while (next != null && next.line0 + next.deleted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line0 <= i)
- /* The change NEXT covers this line.
- If lines were inserted here in file 1, this is "changed".
- Otherwise it is "deleted". */
- prefix = (next.inserted > 0) ? "!" : "-";
-
- print_1_line (prefix, file0[i]);
- }
- }
-
- outfile.print("--- ");
- print_number_range (',', first1, last1);
- outfile.println(" ----");
-
- if (inserts != 0) {
- Diff.change next = hunk;
-
- for (int i = first1; i <= last1; i++) {
- /* Skip past changes that apply (in file 1)
- only to lines before line I. */
-
- while (next != null && next.line1 + next.inserted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line1 <= i)
- /* The change NEXT covers this line.
- If lines were deleted here in file 0, this is "changed".
- Otherwise it is "inserted". */
- prefix = (next.deleted > 0) ? "!" : "+";
-
- print_1_line (prefix, file1[i]);
- }
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class UnifiedPrint extends ContextPrint {
-
- public UnifiedPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("---", new File(filea), filea);
- print_context_label ("+++", new File(fileb), fileb);
- }
-
- private void print_number_range (int a, int b) {
- //translate_range (file, a, b, &trans_a, &trans_b);
-
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (b < a)
- outfile.print(b + ",0");
- else
- super.print_number_range(',',a,b);
- }
-
- protected void print_hunk(Diff.change hunk) {
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
-
- outfile.print("@@ -");
- print_number_range (first0, last0);
- outfile.print(" +");
- print_number_range (first1, last1);
- outfile.print(" @@");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function(file0,first0);
-
- outfile.println();
-
- Diff.change next = hunk;
- int i = first0;
- int j = first1;
-
- while (i <= last0 || j <= last1) {
-
- /* If the line isn't a difference, output the context from file 0. */
-
- if (next == null || i < next.line0) {
- outfile.print(' ');
- print_1_line ("", file0[i++]);
- j++;
- }
- else {
- /* For each difference, first output the deleted part. */
-
- int k = next.deleted;
- while (k-- > 0) {
- outfile.print('-');
- print_1_line ("", file0[i++]);
- }
-
- /* Then output the inserted part. */
-
- k = next.inserted;
- while (k-- > 0) {
- outfile.print('+');
- print_1_line ("", file1[j++]);
- }
-
- /* We're done with this hunk, so on to the next! */
-
- next = next.link;
- }
- }
- }
- }
-
-
- /** Read a text file into an array of String. This provides basic diff
- functionality. A more advanced diff utility will use specialized
- objects to represent the text lines, with options to, for example,
- convert sequences of whitespace to a single space for comparison
- purposes.
- */
- static String[] slurp(String file) throws IOException {
- BufferedReader rdr = new BufferedReader(new FileReader(file));
- Vector<String> s = new Vector<String>();
- for (;;) {
- String line = rdr.readLine();
- if (line == null) break;
- s.addElement(line);
- }
- String[] a = new String[s.size()];
- s.copyInto(a);
- return a;
- }
-
- public static void main(String[] argv) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- System.err.println("No differences");
- else {
- Base p;
- Writer w = new OutputStreamWriter(System.out);
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
- public static void doDiff(String[] argv, Writer w) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- w.write("No differences\n");
- else {
- Base p;
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 2823967ecf..70fdb33c6a 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -18,34 +18,31 @@ import scala.collection.mutable
trait FileUtil {
/**
- * Compares two files using a Java implementation of the GNU diff
- * available at http://www.bmsi.com/java/#diff.
+ * Compares two files using difflib to produce a unified diff.
*
* @param f1 the first file to be compared
* @param f2 the second file to be compared
- * @return the text difference between the compared files
+ * @return the unified diff of the compared files or the empty string if they're equal
*/
def compareFiles(f1: File, f2: File): String = {
- val diffWriter = new StringWriter
- val args = Array(f1.getAbsolutePath(), f2.getAbsolutePath())
-
- DiffPrint.doDiff(args, diffWriter)
- val res = diffWriter.toString
- if (res startsWith "No") "" else res
+ compareContents(io.Source.fromFile(f1).getLines.toSeq, io.Source.fromFile(f2).getLines.toSeq, f1.getName, f2.getName)
}
- def compareContents(lines1: Seq[String], lines2: Seq[String]): String = {
- val xs1 = lines1.toArray[AnyRef]
- val xs2 = lines2.toArray[AnyRef]
-
- val diff = new Diff(xs1, xs2)
- val change = diff.diff_2(false)
- val writer = new StringWriter
- val p = new DiffPrint.NormalPrint(xs1, xs2, writer)
-
- p.print_script(change)
- val res = writer.toString
- if (res startsWith "No ") ""
- else res
+
+ /**
+ * Compares two lists of lines using difflib to produce a unified diff.
+ *
+ * @param origLines the first seq of lines to be compared
+ * @param newLines the second seq of lines to be compared
+ * @param origName file name to be used in unified diff for `origLines`
+ * @param newName file name to be used in unified diff for `newLines`
+ * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal
+ */
+ def compareContents(origLines: Seq[String], newLines: Seq[String], origName: String = "a", newName: String = "b"): String = {
+ import collection.JavaConverters._
+
+ val diff = difflib.DiffUtils.diff(origLines.asJava, newLines.asJava)
+ if (diff.getDeltas.isEmpty) ""
+ else difflib.DiffUtils.generateUnifiedDiff(origName, newName, origLines.asJava, diff, 1).asScala.mkString("\n")
}
}
object FileUtil extends FileUtil { }
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index a42c2219b1..0ba34777a0 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -72,6 +72,9 @@ object PathSettings {
findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse {
sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
}
+
+ lazy val diffUtils: File =
+ findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
}
class PathSettings() {
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 5cb8589d66..700667afcf 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -53,13 +53,14 @@ class ReflectiveRunner {
Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
+ // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
var sepLoader = new URLClassLoader(sepUrls, null)
// this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, this paragraph of code can be safely removed
// we hack into the classloader that will become parent classloader for scalac
// this way we ensure that reflective macro lookup will pick correct Code.lift
- sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: files) map (_.toURL), null)
+ // it's also used to inject diffutils into the classpath when running partest from the test/partest script
+ sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: (PathSettings.diffUtils +: files)) map (_.toURL), null)
if (isPartestDebug)
println("Loading classes from:\n" + sepUrls.mkString("\n"))
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
index c5e944fbc0..f80f6f38fd 100644
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala
@@ -79,6 +79,7 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
val compileMgr = new CompileManager(fileManager)
fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
+ fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
private def compareFiles(f1: File, f2: File): String =
try fileManager.compareFiles(f1, f2)
@@ -817,7 +818,7 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
if (passed exists (x => !x)) {
if (fileManager.showDiff || isPartestDebug)
NestUI.normal(testDiff)
- else if (fileManager.showLog)
+ if (fileManager.showLog)
showLog(logFile)
}
}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index b53c700701..c8e03f1d91 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -946,7 +946,7 @@ trait Symbols { self: Universe =>
def knownDirectSubclasses: Set[Symbol]
/** The list of all base classes of this type (including its own typeSymbol)
- * in reverse linearization order, starting with the class itself and ending
+ * in linearization order, starting with the class itself and ending
* in class Any.
*
* @group Class
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
index e988971ace..7457910226 100644
--- a/src/reflect/scala/reflect/api/TypeTags.scala
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -221,24 +221,7 @@ trait TypeTags { self: Universe =>
def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): WeakTypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => WeakTypeTag.Byte.asInstanceOf[WeakTypeTag[T]]
- case ShortTpe => WeakTypeTag.Short.asInstanceOf[WeakTypeTag[T]]
- case CharTpe => WeakTypeTag.Char.asInstanceOf[WeakTypeTag[T]]
- case IntTpe => WeakTypeTag.Int.asInstanceOf[WeakTypeTag[T]]
- case LongTpe => WeakTypeTag.Long.asInstanceOf[WeakTypeTag[T]]
- case FloatTpe => WeakTypeTag.Float.asInstanceOf[WeakTypeTag[T]]
- case DoubleTpe => WeakTypeTag.Double.asInstanceOf[WeakTypeTag[T]]
- case BooleanTpe => WeakTypeTag.Boolean.asInstanceOf[WeakTypeTag[T]]
- case UnitTpe => WeakTypeTag.Unit.asInstanceOf[WeakTypeTag[T]]
- case AnyTpe => WeakTypeTag.Any.asInstanceOf[WeakTypeTag[T]]
- case AnyValTpe => WeakTypeTag.AnyVal.asInstanceOf[WeakTypeTag[T]]
- case AnyRefTpe => WeakTypeTag.AnyRef.asInstanceOf[WeakTypeTag[T]]
- case ObjectTpe => WeakTypeTag.Object.asInstanceOf[WeakTypeTag[T]]
- case NothingTpe => WeakTypeTag.Nothing.asInstanceOf[WeakTypeTag[T]]
- case NullTpe => WeakTypeTag.Null.asInstanceOf[WeakTypeTag[T]]
- case _ => new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
+ new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
def unapply[T](ttag: WeakTypeTag[T]): Option[Type] = Some(ttag.tpe)
}
@@ -299,24 +282,7 @@ trait TypeTags { self: Universe =>
val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => TypeTag.Byte.asInstanceOf[TypeTag[T]]
- case ShortTpe => TypeTag.Short.asInstanceOf[TypeTag[T]]
- case CharTpe => TypeTag.Char.asInstanceOf[TypeTag[T]]
- case IntTpe => TypeTag.Int.asInstanceOf[TypeTag[T]]
- case LongTpe => TypeTag.Long.asInstanceOf[TypeTag[T]]
- case FloatTpe => TypeTag.Float.asInstanceOf[TypeTag[T]]
- case DoubleTpe => TypeTag.Double.asInstanceOf[TypeTag[T]]
- case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]]
- case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]]
- case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]]
- case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]]
- case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]]
- case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]]
- case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
- case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
- case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
+ new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
}
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index 143438b8f5..72163ef0e9 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -149,7 +149,7 @@ trait Types { self: Universe =>
def =:= (that: Type): Boolean
/** The list of all base classes of this type (including its own typeSymbol)
- * in reverse linearization order, starting with the class itself and ending
+ * in linearization order, starting with the class itself and ending
* in class Any.
*/
def baseClasses: List[Symbol]
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index 7ccb661426..c198271fb1 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -72,6 +72,9 @@ object ClassfileConstants {
final val CONSTANT_METHODREF = 10
final val CONSTANT_INTFMETHODREF = 11
final val CONSTANT_NAMEANDTYPE = 12
+ final val CONSTANT_METHODHANDLE = 15
+ final val CONSTANT_METHODTYPE = 16
+ final val CONSTANT_INVOKEDYNAMIC = 18
// tags describing the type of a literal in attribute values
final val BYTE_TAG = 'B'
@@ -306,7 +309,7 @@ object ClassfileConstants {
final val invokespecial = 0xb7
final val invokestatic = 0xb8
final val invokeinterface = 0xb9
- final val xxxunusedxxxx = 0xba
+ final val invokedynamic = 0xba
final val new_ = 0xbb
final val newarray = 0xbc
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 6e4ca76382..e5d9e54a16 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -1046,7 +1046,7 @@ trait Definitions extends api.StandardDefinitions {
getMemberIfDefined(owner, name) orElse {
if (phase.flatClasses && name.isTypeName && !owner.isPackageObjectOrClass) {
val pkg = owner.owner
- val flatname = nme.flattenedName(owner.name, name)
+ val flatname = tpnme.flattenedName(owner.name, name)
getMember(pkg, flatname)
}
else fatalMissingSymbol(owner, name)
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 8b24678fd6..3bcb793926 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -32,4 +32,81 @@ trait ExistentialsAndSkolems {
}
(new Deskolemizer).typeSkolems
}
+
+ def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
+ sym.isTypeParameter && sym.owner.isJavaDefined
+
+ /** If we map a set of hidden symbols to their existential bounds, we
+ * have a problem: the bounds may themselves contain references to the
+ * hidden symbols. So this recursively calls existentialBound until
+ * the typeSymbol is not amongst the symbols being hidden.
+ */
+ private def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
+ def safeBound(t: Type): Type =
+ if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
+
+ def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
+ case tp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve safeBound
+ if (parents eq parents1) tp
+ else copyRefinedType(tp, parents1, decls)
+ case tp => tp
+ }
+
+ // Hanging onto lower bound in case anything interesting
+ // happens with it.
+ mapFrom(hidden)(s => s.existentialBound match {
+ case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
+ case _ => hiBound(s)
+ })
+ }
+
+ /** Given a set `rawSyms` of term- and type-symbols, and a type
+ * `tp`, produce a set of fresh type parameters and a type so that
+ * it can be abstracted to an existential type. Every type symbol
+ * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
+ * type `T` in `rawSyms` is given an associated type symbol of the
+ * following form:
+ *
+ * type x.type <: T with Singleton
+ *
+ * The name of the type parameter is `x.type`, to produce nice
+ * diagnostics. The Singleton parent ensures that the type
+ * parameter is still seen as a stable type. Type symbols in
+ * rawSyms are fully replaced by the new symbols. Term symbols are
+ * also replaced, except for term symbols of an Ident tree, where
+ * only the type of the Ident is changed.
+ */
+ final def existentialTransform[T](rawSyms: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None)(creator: (List[Symbol], Type) => T): T = {
+ val allBounds = existentialBoundsExcludingHidden(rawSyms)
+ val typeParams: List[Symbol] = rawSyms map { sym =>
+ val name = sym.name match {
+ case x: TypeName => x
+ case x => tpnme.singletonName(x)
+ }
+ def rawOwner0 = rawOwner.getOrElse(abort(s"no owner provided for existential transform over raw parameter: $sym"))
+ val bound = allBounds(sym)
+ val sowner = if (isRawParameter(sym)) rawOwner0 else sym.owner
+ val quantified = sowner.newExistential(name, sym.pos)
+
+ quantified setInfo bound.cloneInfo(quantified)
+ }
+ // Higher-kinded existentials are not yet supported, but this is
+ // tpeHK for when they are: "if a type constructor is expected/allowed,
+ // tpeHK must be called instead of tpe."
+ val typeParamTypes = typeParams map (_.tpeHK)
+ def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
+
+ creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
+ }
+
+ /**
+ * Compute an existential type from hidden symbols `hidden` and type `tp`.
+ * @param hidden The symbols that will be existentially abstracted
+ * @param hidden The original type
+ * @param rawOwner The owner for Java raw types.
+ */
+ final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None): Type =
+ if (hidden.isEmpty) tp
+ else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction)
}
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index 5853315479..5ebe02d95d 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -59,9 +59,9 @@ import scala.collection.{ mutable, immutable }
// 42: VBRIDGE
// 43: VARARGS
// 44: TRIEDCOOKING
-// 45:
-// 46:
-// 47:
+// 45: SYNCHRONIZED/M
+// 46: ARTIFACT
+// 47: DEFAULTMETHOD/M
// 48:
// 49:
// 50:
@@ -116,6 +116,8 @@ class ModifierFlags {
final val LAZY = 1L << 31 // symbol is a lazy val. can't have MUTABLE unless transformed by typer
final val PRESUPER = 1L << 37 // value is evaluated before super call
final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
+ // ARTIFACT at #46 in 2.11+
+ final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
// Overridden.
def flagToString(flag: Long): String = ""
@@ -239,7 +241,7 @@ class Flags extends ModifierFlags {
*/
final val ExplicitFlags =
PRIVATE | PROTECTED | ABSTRACT | FINAL | SEALED |
- OVERRIDE | CASE | IMPLICIT | ABSOVERRIDE | LAZY
+ OVERRIDE | CASE | IMPLICIT | ABSOVERRIDE | LAZY | DEFAULTMETHOD
/** The two bridge flags */
final val BridgeFlags = BRIDGE | VBRIDGE
@@ -274,7 +276,7 @@ class Flags extends ModifierFlags {
* from Modifiers. Others which may be applied at creation time are:
* SYNTHETIC.
*/
- final val ValueParameterFlags = BYNAMEPARAM | IMPLICIT | DEFAULTPARAM | STABLE
+ final val ValueParameterFlags = BYNAMEPARAM | IMPLICIT | DEFAULTPARAM | STABLE | SYNTHETIC
final val BeanPropertyFlags = DEFERRED | OVERRIDE | STATIC
final val VarianceFlags = COVARIANT | CONTRAVARIANT
@@ -421,7 +423,7 @@ class Flags extends ModifierFlags {
case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
case SYNCHRONIZED => "<synchronized>" // (1L << 45)
case 0x400000000000L => "" // (1L << 46)
- case 0x800000000000L => "" // (1L << 47)
+ case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
case 0x1000000000000L => "" // (1L << 48)
case 0x2000000000000L => "" // (1L << 49)
case 0x4000000000000L => "" // (1L << 50)
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index bcda2bc1ae..de7af4340d 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -381,13 +381,16 @@ trait StdNames {
/** If `name` is an expandedName name, the original name.
* Otherwise `name` itself.
*/
- def originalName(name: Name): Name = {
- var i = name.length
- while (i >= 2 && !(name.charAt(i - 1) == '$' && name.charAt(i - 2) == '$')) i -= 1
- if (i >= 2) {
- while (i >= 3 && name.charAt(i - 3) == '$') i -= 1
- name.subName(i, name.length)
- } else name
+ def originalName(name: Name): Name = name.toString lastIndexOf "$$" match {
+ case -1 | 0 => name
+ case idx0 =>
+ // Sketchville - We've found $$ but if it's part of $$$ or $$$$
+ // or something we need to keep the bonus dollars, so e.g. foo$$$outer
+ // has an original name of $outer.
+ var idx = idx0
+ while (idx > 0 && name.charAt(idx - 1) == '$')
+ idx -= 1
+ name drop idx + 2
}
def unspecializedName(name: Name): Name = (
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index d9eb48ff2d..45c16b7302 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -3022,7 +3022,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (Statistics.canEnable) Statistics.incCounter(nameCount)
if (needsFlatClasses) {
if (flatname eq null)
- flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
+ flatname = tpnme.flattenedName(rawowner.name, rawname)
flatname
}
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 3040486076..fa4441e513 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -17,7 +17,7 @@ abstract class TreeInfo {
val global: SymbolTable
import global._
- import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
+ import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
/* Does not seem to be used. Not sure what it does anyway.
def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -98,7 +98,8 @@ abstract class TreeInfo {
// However, before typing, applications of nullary functional values are also
// Apply(function, Nil) trees. To prevent them from being treated as pure,
// we check that the callee is a method.
- fn.symbol.isMethod && !fn.symbol.isLazy && isExprSafeToInline(fn)
+ // The callee might also be a Block, which has a null symbol, so we guard against that (SI-7185)
+ fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isLazy && isExprSafeToInline(fn)
case Typed(expr, _) =>
isExprSafeToInline(expr)
case Block(stats, expr) =>
@@ -514,6 +515,20 @@ abstract class TreeInfo {
case _ => false
}
+ /**
+ * {{{
+ * //------------------------ => effectivePatternArity(args)
+ * case Extractor(a) => 1
+ * case Extractor(a, b) => 2
+ * case Extractor((a, b)) => 2
+ * case Extractor(a @ (b, c)) => 2
+ * }}}
+ */
+ def effectivePatternArity(args: List[Tree]): Int = (args.map(unbind) match {
+ case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
+ case xs => xs
+ }).length
+
// used in the symbols for labeldefs and valdefs emitted by the pattern matcher
// tailcalls, cps,... use this flag combination to detect translated matches
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 754adcb80d..2585b541ed 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -1434,6 +1434,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
/** Substitute symbols in `from` with symbols in `to`. Returns a new
* tree using the new symbols and whose Ident and Select nodes are
* name-consistent with the new symbols.
+ *
+ * Note: This is currently a destructive operation on the original Tree.
+ * Trees currently assigned a symbol in `from` will be assigned the new symbols
+ * without copying, and trees that define symbols with an `info` that refer
+ * a symbol in `from` will have a new type assigned.
*/
class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
val symSubst = new SubstSymMap(from, to)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 7299b439f8..ee584bed2c 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -391,8 +391,8 @@ trait Types extends api.Types { self: SymbolTable =>
* This is assessed to be the case if the class is final,
* and all type parameters (if any) are invariant.
*/
- def isFinalType =
- typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant)
+ def isFinalType: Boolean =
+ typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant) && prefix.isStable
/** Is this type completed (i.e. not a lazy type)? */
def isComplete: Boolean = true
@@ -3052,6 +3052,12 @@ trait Types extends api.Types { self: SymbolTable =>
val origin: Type,
var constr: TypeConstraint
) extends Type {
+
+ // We don't want case class equality/hashing as TypeVar-s are mutable,
+ // and TypeRefs based on them get wrongly `uniqued` otherwise. See SI-7226.
+ override def hashCode(): Int = System.identityHashCode(this)
+ override def equals(other: Any): Boolean = this eq other.asInstanceOf[AnyRef]
+
def untouchable = false // by other typevars
override def params: List[Symbol] = Nil
override def typeArgs: List[Type] = Nil
@@ -3070,12 +3076,14 @@ trait Types extends api.Types { self: SymbolTable =>
/** The variable's skolemization level */
val level = skolemizationLevel
- /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
- * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
+ /** Applies this TypeVar to type arguments, if arity matches.
+ *
+ * Different applications of the same type constructor variable `?CC`,
+ * e.g. `?CC[Int]` and `?CC[String]`, are modeled as distinct instances of `TypeVar`
+ * that share a `TypeConstraint`, so that the comparisons `?CC[Int] <:< List[Int]`
+ * and `?CC[String] <:< Iterable[String]` result in `?CC` being upper-bounded by `List` and `Iterable`.
*
- * `constr` for `?CC` only tracks type constructors anyway,
- * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]`
- * `?CC's` hibounds contains List and Iterable.
+ * Applying the wrong number of type args results in a TypeVar whose instance is set to `ErrorType`.
*/
def applyArgs(newArgs: List[Type]): TypeVar = (
if (newArgs.isEmpty && typeArgs.isEmpty)
@@ -3085,7 +3093,7 @@ trait Types extends api.Types { self: SymbolTable =>
TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
}
else
- throw new Error("Invalid type application in TypeVar: " + params + ", " + newArgs)
+ TypeVar(typeSymbol).setInst(ErrorType)
)
// newArgs.length may differ from args.length (could've been empty before)
//
@@ -3115,13 +3123,14 @@ trait Types extends api.Types { self: SymbolTable =>
// <region name="constraint mutators + undoLog">
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
- def setInst(tp: Type) {
+ def setInst(tp: Type): this.type = {
// assert(!(tp containsTp this), this)
undoLog record this
// if we were compared against later typeskolems, repack the existential,
// because skolems are only compatible if they were created at the same level
val res = if (shouldRepackType) repackExistential(tp) else tp
constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res)
+ this
}
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
@@ -5030,10 +5039,11 @@ trait Types extends api.Types { self: SymbolTable =>
if (tp.isTrivial) tp
else if (tp.prefix.typeSymbol isNonBottomSubClass owner) {
val widened = tp match {
- case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
- case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
+ case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
+ case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
}
- widened asSeenFrom (pre, tp.typeSymbol.owner)
+ val memType = widened asSeenFrom (pre, tp.typeSymbol.owner)
+ if (tp eq widened) memType else memType.narrow
}
else loop(tp.prefix) memberType tp.typeSymbol
@@ -5365,7 +5375,9 @@ trait Types extends api.Types { self: SymbolTable =>
case _ =>
NoType
}
- patType match {
+ // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
+ // generates an outer test based on `patType.prefix` with automatically dealises.
+ patType.dealias match {
case TypeRef(pre, sym, args) =>
val pre1 = maybeCreateDummyClone(pre, sym)
(pre1 ne NoType) && isPopulated(copyTypeRef(patType, pre1, sym, args), selType)
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index 6dc6a0f7b8..00c7c3de9a 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -17,7 +17,14 @@ trait UnCurry {
val tp = expandAlias(tp0)
tp match {
case MethodType(params, MethodType(params1, restpe)) =>
- apply(MethodType(params ::: params1, restpe))
+ // This transformation is described in UnCurryTransformer.dependentParamTypeErasure
+ val packSymbolsMap = new TypeMap {
+ // Wrapping in a TypeMap to reuse the code that opts for a fast path if the function is an identity.
+ def apply(tp: Type): Type = packSymbols(params, tp)
+ }
+ val existentiallyAbstractedParam1s = packSymbolsMap.mapOver(params1)
+ val substitutedResult = restpe.substSym(params1, existentiallyAbstractedParam1s)
+ apply(MethodType(params ::: existentiallyAbstractedParam1s, substitutedResult))
case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
abort("unexpected curried method types with intervening existential")
case MethodType(h :: t, restpe) if h.isImplicit =>
@@ -60,4 +67,4 @@ trait UnCurry {
*/
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isType) uncurryType(tp) else uncurry(tp)
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index 2c90d2d525..cbd27b0d65 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -103,13 +103,13 @@ quant)
r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
private def showPercent(x: Double, base: Double) =
- if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+ if (base == 0) "" else f" (${x / base * 100}%2.1f%%)"
/** The base trait for quantities.
* Quantities with non-empty prefix are printed in the statistics info.
*/
trait Quantity {
- if (prefix.nonEmpty) {
+ if (enabled && prefix.nonEmpty) {
val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
qs(key) = this
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index e18435d5b0..1b69ca4e89 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -17,7 +17,7 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def forInteractive = false
def forScaladoc = false
- def log(msg: => AnyRef): Unit = println(" [] "+msg)
+ def log(msg: => AnyRef): Unit = if (settings.debug.value) println(" [] "+msg)
type TreeCopier = InternalTreeCopierOps
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier