summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.xml1
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala2
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala22
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala60
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala122
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala15
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineReader.scala5
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala23
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala30
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala194
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala8
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala36
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala193
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala41
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala92
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala22
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala225
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala13
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala36
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala42
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala510
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala235
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala97
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala9
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala45
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala199
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala2
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala31
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala1
-rw-r--r--src/library/scala/annotation/migration.scala3
-rw-r--r--src/library/scala/collection/immutable/List.scala3
-rw-r--r--src/library/scala/collection/immutable/Stream.scala13
-rw-r--r--src/library/scala/collection/mutable/ListMap.scala17
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala34
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala2
-rw-r--r--src/library/scala/package.scala3
-rw-r--r--src/partest/scala/tools/partest/ASMConverters.scala71
-rw-r--r--src/partest/scala/tools/partest/BytecodeTest.scala102
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala2
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationCheckers.scala179
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala20
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala15
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala50
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala25
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala18
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala25
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala74
-rw-r--r--src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala31
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala8
-rw-r--r--src/reflect/scala/reflect/macros/compileTimeOnly.scala16
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala2
-rw-r--r--test/benchmarking/t6726-patmat-analysis.scala4005
-rw-r--r--test/disabled/run/t6987.check (renamed from test/files/run/t6987.check)0
-rw-r--r--test/disabled/run/t6987.scala (renamed from test/files/run/t6987.scala)0
-rw-r--r--test/files/disabled/run/t4602.scala57
-rw-r--r--test/files/jvm/bytecode-test-example.check1
-rw-r--r--test/files/jvm/bytecode-test-example/Foo_1.scala9
-rw-r--r--test/files/jvm/bytecode-test-example/Test.scala32
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala14
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore.check1
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore.flags1
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala29
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore/test.scala15
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck.check1
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck.flags1
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala24
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck/test.scala8
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest.check1
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest.flags1
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala24
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest/test.scala8
-rw-r--r--test/files/jvm/t6941.check1
-rw-r--r--test/files/jvm/t6941.flags1
-rw-r--r--test/files/jvm/t6941/Analyzed_1.scala11
-rw-r--r--test/files/jvm/t6941/test.scala15
-rw-r--r--test/files/jvm/throws-annot-from-java.check47
-rw-r--r--test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala3
-rw-r--r--test/files/jvm/throws-annot-from-java/Test_3.scala29
-rw-r--r--test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java6
-rw-r--r--test/files/neg/macro-false-deprecation-warning.check4
-rw-r--r--test/files/neg/macro-false-deprecation-warning.flags1
-rw-r--r--test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala15
-rw-r--r--test/files/neg/t2968.check10
-rw-r--r--test/files/neg/t2968.scala26
-rw-r--r--test/files/neg/t2968b.check4
-rw-r--r--test/files/neg/t2968b.scala7
-rw-r--r--test/files/neg/t5353.check4
-rw-r--r--test/files/neg/t5353.scala3
-rw-r--r--test/files/neg/t5378.check31
-rw-r--r--test/files/neg/t5378.scala54
-rw-r--r--test/files/neg/t5543.check10
-rw-r--r--test/files/neg/t5543.scala19
-rw-r--r--test/files/neg/t5692a.check2
-rw-r--r--test/files/neg/t5692b.check2
-rw-r--r--test/files/neg/t6426.check7
-rw-r--r--test/files/neg/t6426.scala5
-rw-r--r--test/files/neg/t6539/Macro_1.scala2
-rw-r--r--test/files/neg/t6539/Test_2.scala6
-rw-r--r--test/files/neg/t6601.check4
-rw-r--r--test/files/neg/t6601/AccessPrivateConstructor_2.scala3
-rw-r--r--test/files/neg/t6601/PrivateConstructor_1.scala1
-rw-r--r--test/files/neg/t6728.check4
-rw-r--r--test/files/neg/t6728.scala5
-rw-r--r--test/files/neg/t6829.check6
-rw-r--r--test/files/neg/t6963.check2
-rw-r--r--test/files/neg/t6963.flags1
-rw-r--r--test/files/neg/t6963.scala3
-rw-r--r--test/files/neg/t6963a.check5
-rw-r--r--test/files/neg/t6963a.flags1
-rw-r--r--test/files/neg/t6963a.scala5
-rw-r--r--test/files/neg/t6963b.check13
-rw-r--r--test/files/neg/t6963b.flags1
-rw-r--r--test/files/neg/t6963b.scala20
-rw-r--r--test/files/pos/lubs.scala3
-rw-r--r--test/files/pos/presuperContext.scala13
-rw-r--r--test/files/pos/t1014.scala4
-rw-r--r--test/files/pos/t1803.flags1
-rw-r--r--test/files/pos/t1803.scala2
-rw-r--r--test/files/pos/t3577.scala29
-rw-r--r--test/files/pos/t5130.scala46
-rw-r--r--test/files/pos/t5604b/T_1.scala6
-rw-r--r--test/files/pos/t5604b/T_2.scala6
-rw-r--r--test/files/pos/t5604b/Test_1.scala7
-rw-r--r--test/files/pos/t5604b/Test_2.scala7
-rw-r--r--test/files/pos/t5604b/pack_1.scala5
-rw-r--r--test/files/pos/t5859.scala15
-rw-r--r--test/files/pos/t6072.scala3
-rw-r--r--test/files/pos/t6482.scala11
-rw-r--r--test/files/pos/t6516.scala19
-rw-r--r--test/files/pos/t6595.flags1
-rw-r--r--test/files/pos/t6595.scala18
-rw-r--r--test/files/pos/t6601/PrivateValueClass_1.scala1
-rw-r--r--test/files/pos/t6601/UsePrivateValueClass_2.scala10
-rw-r--r--test/files/pos/t6651.scala33
-rw-r--r--test/files/pos/t6891.flags1
-rw-r--r--test/files/pos/t6891.scala26
-rw-r--r--test/files/pos/t6963c.flags1
-rw-r--r--test/files/pos/t6963c.scala25
-rw-r--r--test/files/pos/t7022.scala9
-rw-r--r--test/files/pos/t7035.scala15
-rw-r--r--test/files/run/analyzerPlugins.check197
-rw-r--r--test/files/run/analyzerPlugins.scala121
-rw-r--r--test/files/run/idempotency-case-classes.check2
-rw-r--r--test/files/run/inline-ex-handlers.check214
-rw-r--r--test/files/run/reify-staticXXX.scala36
-rw-r--r--test/files/run/t2418.check1
-rw-r--r--test/files/run/t2418.scala10
-rw-r--r--test/files/run/t2818.check4
-rw-r--r--test/files/run/t2818.scala6
-rw-r--r--test/files/run/t3353.check1
-rw-r--r--test/files/run/t3353.scala10
-rw-r--r--test/files/run/t4729.check4
-rw-r--r--test/files/run/t4729/J_1.java4
-rw-r--r--test/files/run/t4729/S_2.scala29
-rw-r--r--test/files/run/t5313.check12
-rw-r--r--test/files/run/t5313.scala54
-rw-r--r--test/files/run/t5543.check6
-rw-r--r--test/files/run/t5543.scala19
-rw-r--r--test/files/run/t5604.check8
-rw-r--r--test/files/run/t5604.scala50
-rw-r--r--test/files/run/t6154.check1
-rw-r--r--test/files/run/t6154.scala10
-rw-r--r--test/files/run/t6206.check4
-rw-r--r--test/files/run/t6206.scala37
-rw-r--r--test/files/run/t6288.check10
-rw-r--r--test/files/run/t6288b-jump-position.check6
-rw-r--r--test/files/run/t6572/bar_1.scala19
-rw-r--r--test/files/run/t6572/foo_2.scala17
-rw-r--r--test/files/run/t6584.check8
-rw-r--r--test/files/run/t6584.scala16
-rw-r--r--test/files/run/t6669.scala26
-rw-r--r--test/files/run/t6853.scala18
-rw-r--r--test/files/run/t6863.scala114
-rw-r--r--test/files/run/t6968.check1
-rw-r--r--test/files/run/t6968.scala7
-rw-r--r--test/files/run/t7046.check2
-rw-r--r--test/files/run/t7046.scala13
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala2
-rw-r--r--test/pending/neg/t5378.scala19
-rw-r--r--test/pending/neg/t5589neg.check (renamed from test/files/neg/t5589neg.check)0
-rw-r--r--test/pending/neg/t5589neg.scala (renamed from test/files/neg/t5589neg.scala)0
-rw-r--r--test/pending/neg/t5589neg2.scala (renamed from test/files/neg/t5589neg2.scala)0
-rw-r--r--test/pending/pos/t1336.scala (renamed from test/files/pos/t1336.scala)0
-rw-r--r--test/pending/pos/t5589.scala (renamed from test/files/pos/t5589.scala)0
-rw-r--r--test/pending/run/t4574.scala (renamed from test/files/run/t4574.scala)0
-rw-r--r--test/scaladoc/run/SI-6017.check1
-rw-r--r--test/scaladoc/run/SI-6017.scala23
-rw-r--r--test/scaladoc/run/SI-6812.check1
-rw-r--r--test/scaladoc/run/SI-6812.scala24
-rw-r--r--test/scaladoc/run/package-object.check1
-rw-r--r--test/scaladoc/run/package-object.scala1
209 files changed, 8174 insertions, 1091 deletions
diff --git a/build.xml b/build.xml
index 113923db6b..af577afbaa 100644
--- a/build.xml
+++ b/build.xml
@@ -1377,6 +1377,7 @@ QUICK BUILD (QUICK)
<pathelement location="${ant.jar}"/>
<path refid="forkjoin.classpath"/>
<path refid="fjbg.classpath"/>
+ <path refid="asm.classpath"/>
<pathelement location="${scalacheck.jar}"/>
</compilationpath>
</scalacfork>
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 9a3e8d1530..4051bda914 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -72,7 +72,7 @@ class CompileSocket extends CompileOutputCommon {
/** A temporary directory to use */
val tmpDir = {
val udir = Option(Properties.userName) getOrElse "shared"
- val f = (Path(Properties.tmpDir) / "scala-devel" / udir).createDirectory()
+ val f = (Path(Properties.tmpDir) / ("scala-devel" + udir)).createDirectory()
if (f.isDirectory && f.canWrite) {
info("[Temp directory: " + f + "]")
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index 8a3c531ff0..caf6ad14cf 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -33,7 +33,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
}
else {
// Otherwise we're on the server and will use it to absolutize the paths.
- settings.absolutize(currentDir.value)
+ settings.absolutize()
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 54402f0903..def1198dae 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -111,7 +111,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
rhs = EmptyTree
)
}
- val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = Modifiers(PRESUPER)) }
+ val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
val constrs = {
if (constrMods hasFlag TRAIT) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 33db4ee2d5..c508e14343 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -980,11 +980,8 @@ self =>
/** Assumed (provisionally) to be TermNames. */
def ident(skipIt: Boolean): Name =
- if (isIdent) {
- val name = in.name.encode
- in.nextToken()
- name
- } else {
+ if (isIdent) rawIdent().encode
+ else {
syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
nme.ERROR
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 4f564c5d0b..79f0bcf149 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -113,6 +113,11 @@ trait Scanners extends ScannersCommon {
cbuf.append(c)
}
+ /** Determines whether this scanner should emit identifier deprecation warnings,
+ * e.g. when seeing `macro` or `then`, which are planned to become keywords in future versions of Scala.
+ */
+ protected def emitIdentifierDeprecationWarnings = true
+
/** Clear buffer and set name and token */
private def finishNamed(idtoken: Int = IDENTIFIER) {
name = newTermName(cbuf.toString)
@@ -122,7 +127,7 @@ trait Scanners extends ScannersCommon {
val idx = name.start - kwOffset
if (idx >= 0 && idx < kwArray.length) {
token = kwArray(idx)
- if (token == IDENTIFIER && allowIdent != name)
+ if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings)
deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
}
}
@@ -283,10 +288,16 @@ trait Scanners extends ScannersCommon {
prev copyFrom this
val nextLastOffset = charOffset - 1
fetchToken()
+ def resetOffset() {
+ offset = prev.offset
+ lastOffset = prev.lastOffset
+ }
if (token == CLASS) {
token = CASECLASS
+ resetOffset()
} else if (token == OBJECT) {
token = CASEOBJECT
+ resetOffset()
} else {
lastOffset = nextLastOffset
next copyFrom this
@@ -607,7 +618,10 @@ trait Scanners extends ScannersCommon {
if (ch == '`') {
nextChar()
finishNamed(BACKQUOTED_IDENT)
- if (name.length == 0) syntaxError("empty quoted identifier")
+ if (name.length == 0)
+ syntaxError("empty quoted identifier")
+ else if (name == nme.WILDCARD)
+ syntaxError("wildcard invalid as backquoted identifier")
}
else syntaxError("unclosed quoted identifier")
}
@@ -1488,6 +1502,10 @@ trait Scanners extends ScannersCommon {
def improves(patches1: List[BracePatch]): Boolean =
imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
+ // don't emit deprecation warnings about identifiers like `macro` or `then`
+ // when skimming through the source file trying to heal braces
+ override def emitIdentifierDeprecationWarnings = false
+
override def error(offset: Int, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index f94055f666..ac8ab493e0 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -258,7 +258,7 @@ abstract class TreeBuilder {
/** Create tree representing a while loop */
def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
- val continu = atPos(o2p(body.pos.endOrPoint)) { Apply(Ident(lname), Nil) }
+ val continu = atPos(o2p(body.pos pointOrElse wrappingPos(List(cond, body)).pos.endOrPoint)) { Apply(Ident(lname), Nil) }
val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
LabelDef(lname, Nil, rhs)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index fd2b11898c..44d7a1929b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1164,34 +1164,28 @@ abstract class GenICode extends SubComponent {
resCtx
}
- private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position): Unit = {
- if (!(from <:< to) && !(from == NullReference && to == NothingReference)) {
- to match {
- case UNIT =>
- ctx.bb.emit(DROP(from), pos)
- debuglog("Dropped an " + from);
-
- case _ =>
- debugassert(from != UNIT, "Can't convert from UNIT to " + to + " at: " + pos)
- assert(!from.isReferenceType && !to.isReferenceType,
- "type error: can't convert from " + from + " to " + to +" in unit " + unit.source + " at " + pos)
-
- ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
- }
- } else if (from == NothingReference) {
- ctx.bb.emit(THROW(ThrowableClass))
- ctx.bb.enterIgnoreMode
- } else if (from == NullReference) {
- ctx.bb.emit(DROP(from))
- ctx.bb.emit(CONSTANT(Constant(null)))
+ private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+ // An awful lot of bugs explode here - let's leave ourselves more clues.
+ // A typical example is an overloaded type assigned after typer.
+ log(s"GenICode#adapt($from, $to, $ctx, $pos)")
+
+ val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
+ def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
+ def checkAssertions() {
+ def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
+ debugassert(from != UNIT, msg)
+ assert(!from.isReferenceType && !to.isReferenceType, msg)
}
- else if (from == ThrowableReference && !(ThrowableClass.tpe <:< to.toType)) {
- log("Inserted check-cast on throwable to " + to + " at " + pos)
- ctx.bb.emit(CHECK_CAST(to))
+ if (conforms) from match {
+ case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
+ case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
+ case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
+ case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types
+ case _ => ()
}
- else (from, to) match {
- case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, LONG)))
- case _ => ()
+ else to match {
+ case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding
+ case _ => checkAssertions() ; coerce(from, to) // other primitive coercions
}
}
@@ -1907,18 +1901,8 @@ abstract class GenICode extends SubComponent {
var handlerCount = 0
- override def toString(): String = {
- val buf = new StringBuilder()
- buf.append("\tpackage: ").append(packg).append('\n')
- buf.append("\tclazz: ").append(clazz).append('\n')
- buf.append("\tmethod: ").append(method).append('\n')
- buf.append("\tbb: ").append(bb).append('\n')
- buf.append("\tlabels: ").append(labels).append('\n')
- buf.append("\texception handlers: ").append(handlers).append('\n')
- buf.append("\tcleanups: ").append(cleanups).append('\n')
- buf.append("\tscope: ").append(scope).append('\n')
- buf.toString()
- }
+ override def toString =
+ s"package $packg { class $clazz { def $method { bb=$bb } } }"
def loadException(ctx: Context, exh: ExceptionHandler, pos: Position) = {
debuglog("Emitting LOAD_EXCEPTION for class: " + exh.loadExceptionClass)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index d185ed0c34..0abbe44b02 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -1018,7 +1018,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (needsAnnotation) {
val c = Constant(RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
- meth.addAnnotation(ThrowsClass, arg)
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index fe0020e074..598965b982 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -888,7 +888,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (needsAnnotation) {
val c = Constant(RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
- meth.addAnnotation(ThrowsClass, arg)
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index fee683ce3a..d4a6d18c60 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -18,6 +18,9 @@ abstract class DeadCodeElimination extends SubComponent {
import icodes.opcodes._
import definitions.RuntimePackage
+ /** The block and index where an instruction is located */
+ type InstrLoc = (BasicBlock, Int)
+
val phaseName = "dce"
/** Create a new phase */
@@ -55,27 +58,35 @@ abstract class DeadCodeElimination extends SubComponent {
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
- var defs: immutable.Map[(BasicBlock, Int), immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
+ var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
/** Useful instructions which have not been scanned yet. */
- val worklist: mutable.Set[(BasicBlock, Int)] = new mutable.LinkedHashSet
+ val worklist: mutable.Set[InstrLoc] = new mutable.LinkedHashSet
/** what instructions have been marked as useful? */
val useful: mutable.Map[BasicBlock, mutable.BitSet] = perRunCaches.newMap()
/** what local variables have been accessed at least once? */
var accessedLocals: List[Local] = Nil
+
+ /** Map from a local and a basic block to the instructions that store to that local in that basic block */
+ val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()}
+
+ /** Stores that clobber previous stores to array or ref locals. See SI-5313 */
+ val clobbers = mutable.Set[InstrLoc]()
/** the current method. */
var method: IMethod = _
/** Map instructions who have a drop on some control path, to that DROP instruction. */
- val dropOf: mutable.Map[(BasicBlock, Int), List[(BasicBlock, Int)]] = perRunCaches.newMap()
+ val dropOf: mutable.Map[InstrLoc, List[InstrLoc]] = perRunCaches.newMap()
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
debuglog("dead code elimination on " + m);
dropOf.clear()
+ localStores.clear()
+ clobbers.clear()
m.code.blocks.clear()
accessedLocals = m.params.reverse
m.code.blocks ++= linearizer.linearize(m)
@@ -104,10 +115,10 @@ abstract class DeadCodeElimination extends SubComponent {
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
i match {
- case LOAD_LOCAL(l) =>
+ case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
- case STORE_LOCAL(_) =>
+ case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
* (otherwise any side-effects of the module's constructor go lost).
* (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
@@ -125,6 +136,11 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
if (necessary) worklist += ((bb, idx))
+ // add it to the localStores map
+ val key = (l, bb)
+ val set = localStores(key)
+ set += idx
+ localStores(key) = set
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
@@ -162,11 +178,18 @@ abstract class DeadCodeElimination extends SubComponent {
def mark() {
// log("Starting with worklist: " + worklist)
while (!worklist.isEmpty) {
- val (bb, idx) = worklist.iterator.next
+ val (bb, idx) = worklist.head
worklist -= ((bb, idx))
debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
+ // adds the instrutions that define the stack values about to be consumed to the work list to
+ // be marked useful
+ def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
+ debuglog(s"\t${bb1(idx1)} is consumed by $instr")
+ worklist += ((bb1, idx1))
+ }
+
if (!useful(bb)(idx)) {
useful(bb) += idx
dropOf.get(bb, idx) foreach {
@@ -180,6 +203,15 @@ abstract class DeadCodeElimination extends SubComponent {
worklist += ((bb1, idx1))
}
+ case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
+ addDefs()
+ // see SI-5313
+ // search for clobbers of this store if we aren't doing l1 = null
+ // this doesn't catch the second store in x=null;l1=x; but in practice this catches
+ // a lot of null stores very cheaply
+ if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null)))
+ findClobbers(l1, bb, idx + 1)
+
case nw @ NEW(REFERENCE(sym)) =>
assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")")
worklist += findInstruction(bb, nw.init)
@@ -199,14 +231,72 @@ abstract class DeadCodeElimination extends SubComponent {
()
case _ =>
- for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- debuglog("\tAdding " + bb1(idx1))
- worklist += ((bb1, idx1))
- }
+ addDefs()
}
}
}
}
+
+ /**
+ * Finds and marks all clobbers of the given local starting in the given
+ * basic block at the given index
+ *
+ * Storing to local variables of reference or array type may be indirectly
+ * observable because it may remove a reference to an object which may allow the object
+ * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
+ * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
+ * useful then its clobbers must go into the set of clobbers, which will be
+ * compensated for later
+ */
+ def findClobbers(l: Local, bb: BasicBlock, idx: Int) {
+ // previously visited blocks tracked to prevent searching forever in a cycle
+ val inspected = mutable.Set[BasicBlock]()
+ // our worklist of blocks that still need to be checked
+ val blocksToBeInspected = mutable.Set[BasicBlock]()
+
+ // Tries to find the next clobber of l1 in bb1 starting at idx1.
+ // if it finds one it adds the clobber to clobbers set for later
+ // handling. If not it adds the direct successor blocks to
+ // the uninspectedBlocks to try to find clobbers there. Either way
+ // it adds the exception successor blocks for further search
+ def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
+ val key = ((l, bb1))
+ val foundClobber = (localStores contains key) && {
+ def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min
+
+ // find the smallest index greater than or equal to idx1
+ val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1))
+ if (clobberIdx == -1)
+ false
+ else {
+ debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}")
+ clobbers += ((bb1, clobberIdx))
+ true
+ }
+ }
+
+ // always need to look into the exception successors for additional clobbers
+ // because we don't know when flow might enter an exception handler
+ blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected)
+ // If we didn't find a clobber here then we need to look at successor blocks.
+ // if we found a clobber then we don't need to search in the direct successors
+ if (!foundClobber) {
+ blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected)
+ }
+ }
+
+ // first search starting at the current index
+ // note we don't put bb in the inspected list yet because a loop may later force
+ // us back around to search from the beginning of bb
+ findClobberInBlock(idx, bb)
+ // then loop until we've exhausted the set of uninspected blocks
+ while(!blocksToBeInspected.isEmpty) {
+ val bb1 = blocksToBeInspected.head
+ blocksToBeInspected -= bb1
+ inspected += bb1
+ findClobberInBlock(0, bb1)
+ }
+ }
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
@@ -236,6 +326,12 @@ abstract class DeadCodeElimination extends SubComponent {
i match {
case NEW(REFERENCE(sym)) =>
log(s"Eliminated instantation of $sym inside $m")
+ case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
+ // if an unused instruction was a clobber of a used store to a reference or array type
+ // then we'll replace it with the store of a null to make sure the reference is
+ // eliminated. See SI-5313
+ bb emit CONSTANT(Constant(null))
+ bb emit STORE_LOCAL(l)
case _ => ()
}
debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
@@ -247,8 +343,8 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
- val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
+ private def computeCompensations(m: IMethod): mutable.Map[InstrLoc, List[Instruction]] = {
+ val compensations: mutable.Map[InstrLoc, List[Instruction]] = new mutable.HashMap
m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
@@ -287,7 +383,7 @@ abstract class DeadCodeElimination extends SubComponent {
res
}
- private def findInstruction(bb: BasicBlock, i: Instruction): (BasicBlock, Int) = {
+ private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
if (idx != -1)
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 10e2f23142..4ee6daf73e 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -20,7 +20,7 @@ object IndexModelFactory {
/* Owner template ordering */
implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
- implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase }
+ implicit def orderingMap = math.Ordering.String
def addMember(d: MemberEntity) = {
val firstLetter = {
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index c6cfc317ea..0a469c9227 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -314,12 +314,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
inform("Creating doc template for " + sym)
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
- def inSource =
- if (sym.sourceFile != null && ! sym.isSynthetic)
- Some((sym.sourceFile, sym.pos.line))
+
+ protected def inSourceFromSymbol(symbol: Symbol) =
+ if (symbol.sourceFile != null && ! symbol.isSynthetic)
+ Some((symbol.sourceFile, symbol.pos.line))
else
None
+ def inSource = inSourceFromSymbol(sym)
+
def sourceUrl = {
def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
@@ -508,11 +511,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override lazy val linearization = {
- val symbol = sym.info.members.find {
+ override lazy val (inSource, linearization) = {
+ val representive = sym.info.members.find {
s => s.isPackageObject
} getOrElse sym
- linearizationFromSymbol(symbol)
+ (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
}
def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
index 10f972452f..5fd5b41625 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
@@ -37,6 +37,9 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ if ((history: History) ne NoHistory)
+ this setHistory history
+
// working around protected/trait/java insufficiencies.
def goBack(num: Int): Unit = back(num)
def readOneKey(prompt: String) = {
@@ -51,8 +54,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
// A hook for running code after the repl is done initializing.
lazy val postInit: Unit = {
this setBellEnabled false
- if ((history: History) ne NoHistory)
- this setHistory history
if (completion ne NoCompletion) {
val argCompletor: ArgumentCompleter =
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 9d01e73063..dbb9b7a003 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -15,6 +15,7 @@ import symtab.Flags
import mutable.ListBuffer
import scala.annotation.elidable
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
@@ -821,7 +822,7 @@ trait ParallelMatching extends ast.TreeDSL
// match that's unimportant; so we add an instance check only if there
// is a binding.
def bindingWarning() = {
- if (isBound && settings.Xmigration28.value) {
+ if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
cunit.warning(scrutTree.pos,
"A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
}
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 06ebc20d3e..5c852ae07c 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -38,14 +38,25 @@ class FscSettings(error: String => Unit) extends Settings(error) {
private def holdsPath = Set[Settings#Setting](
d, dependencyfile, pluginsDir, Ygenjavap
)
+
+ override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
+ val (r, args) = super.processArguments(arguments, processAll)
+ // we need to ensure the files specified with relative locations are absolutized based on the currentDir
+ (r, args map {a => absolutizePath(a)})
+ }
+
+ /**
+ * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir.
+ * If it's already absolute then it's left alone.
+ */
+ private[this] def absolutizePath(p: String) = (Path(currentDir.value) resolve Path(p)).normalize.path
- /** All user set settings rewritten with absolute paths. */
- def absolutize(root: Path) {
- def rewrite(p: String) = (root resolve Path(p)).normalize.path
+ /** All user set settings rewritten with absolute paths based on currentDir */
+ def absolutize() {
userSetSettings foreach {
- case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(rewrite(p.value))
- case p: PathSetting => p.value = ClassPath.map(p.value, rewrite)
- case p: StringSetting => if (holdsPath(p)) p.value = rewrite(p.value)
+ case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(absolutizePath(p.value))
+ case p: PathSetting => p.value = ClassPath.map(p.value, absolutizePath)
+ case p: StringSetting => if (holdsPath(p)) p.value = absolutizePath(p.value)
case _ => ()
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index f1f289ed4d..e4f99474e1 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -221,6 +221,7 @@ class MutableSettings(val errorFn: String => Unit)
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -486,6 +487,35 @@ class MutableSettings(val errorFn: String => Unit)
withHelpSyntax(name + " <" + arg + ">")
}
+ /** A setting represented by a Scala version, (`default` unless set) */
+ class ScalaVersionSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String,
+ default: ScalaVersion)
+ extends Setting(name, descr) {
+ import ScalaVersion._
+
+ type T = ScalaVersion
+ protected var v: T = NoScalaVersion
+
+ override def tryToSet(args: List[String]) = {
+ value = default
+ Some(args)
+ }
+
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => value = default; Some(Nil)
+ case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ }
+
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
+
+ def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
+
+ withHelpSyntax(s"${name}:<${arg}>")
+ }
+
class PathSetting private[nsc](
name: String,
descr: String,
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index b820d10ddc..0a98d45cac 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -85,8 +85,7 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.").
- withDeprecationMessage("This setting is no longer useful and will be removed. Please remove it from your build.")
+ val Xmigration = ScalaVersionSetting("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -169,6 +168,7 @@ trait ScalaSettings extends AbsScalaSettings
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
+ val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
new file mode 100644
index 0000000000..d6a0149411
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -0,0 +1,194 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author James Iry
+ */
+// $Id$
+
+package scala.tools.nsc.settings
+
+/**
+ * Represents a single Scala version in a manner that
+ * supports easy comparison and sorting.
+ */
+abstract class ScalaVersion extends Ordered[ScalaVersion] {
+ def unparse: String
+}
+
+/**
+ * A scala version that sorts higher than all actual versions
+ */
+case object NoScalaVersion extends ScalaVersion {
+ def unparse = "none"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case NoScalaVersion => 0
+ case _ => 1
+ }
+}
+
+/**
+ * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion
+ * may or may not be a released version - i.e. this same class is used to represent
+ * final, release candidate, milestone, and development builds. The build argument is used
+ * to segregate builds
+ */
+case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
+ // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these
+ // comparisons a lot so I'm using brute force direct style code
+ if (major < thatMajor) -1
+ else if (major > thatMajor) 1
+ else if (minor < thatMinor) -1
+ else if (minor > thatMinor) 1
+ else if (rev < thatRev) -1
+ else if (rev > thatRev) 1
+ else build compare thatBuild
+ case AnyScalaVersion => 1
+ case NoScalaVersion => -1
+ }
+}
+
+/**
+ * A Scala version that sorts lower than all actual versions
+ */
+case object AnyScalaVersion extends ScalaVersion {
+ def unparse = "any"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case AnyScalaVersion => 0
+ case _ => -1
+ }
+}
+
+/**
+ * Factory methods for producing ScalaVersions
+ */
+object ScalaVersion {
+ private val dot = "\\."
+ private val dash = "\\-"
+ private def not(s:String) = s"[^${s}]"
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+
+ def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
+ def errorAndValue() = {
+ errorHandler(
+ s"There was a problem parsing ${versionString}. " +
+ "Versions should be in the form major[.minor[.revision]] " +
+ "where each part is a positive number, as in 2.10.1. " +
+ "The minor and revision parts are optional."
+ )
+ AnyScalaVersion
+ }
+
+ def toInt(s: String) = s match {
+ case null | "" => 0
+ case _ => s.toInt
+ }
+
+ def isInt(s: String) = util.Try(toInt(s)).isSuccess
+
+ def toBuild(s: String) = s match {
+ case null | "FINAL" => Final
+ case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
+ case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
+ case _ => Development(s)
+ }
+
+ try versionString match {
+ case "none" => NoScalaVersion
+ case "any" => AnyScalaVersion
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
+ case _ =>
+ errorAndValue()
+ } catch {
+ case e: NumberFormatException => errorAndValue()
+ }
+ }
+
+ def apply(versionString: String): ScalaVersion =
+ apply(versionString, msg => throw new NumberFormatException(msg))
+
+ /**
+ * The version of the compiler running now
+ */
+ val current = apply(util.Properties.versionNumberString)
+
+ /**
+ * The 2.8.0 version.
+ */
+ val twoDotEight = SpecificScalaVersion(2, 8, 0, Final)
+}
+
+/**
+ * Represents the data after the dash in major.minor.rev-build
+ */
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
+ /**
+ * Return a version of this build information that can be parsed back into the
+ * same ScalaBuild
+ */
+ def unparse: String
+}
+/**
+ * A development, test, nightly, snapshot or other "unofficial" build
+ */
+case class Development(id: String) extends ScalaBuild {
+ def unparse = s"-${id}"
+
+ def compare(that: ScalaBuild) = that match {
+ // sorting two development builds based on id is reasonably valid for two versions created with the same schema
+ // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
+ // this is a pragmatic compromise
+ case Development(thatId) => id compare thatId
+ // assume a development build is newer than anything else, that's not really true, but good luck
+ // mapping development build versions to other build types
+ case _ => 1
+ }
+}
+/**
+ * A final final
+ */
+case object Final extends ScalaBuild {
+ def unparse = ""
+
+ def compare(that: ScalaBuild) = that match {
+ case Final => 0
+ // a final is newer than anything other than a development build or another final
+ case Development(_) => -1
+ case _ => 1
+ }
+}
+
+/**
+ * A candidate for final release
+ */
+case class RC(n: Int) extends ScalaBuild {
+ def unparse = s"-RC${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two rcs based on their RC numbers
+ case RC(thatN) => n - thatN
+ // an rc is older than anything other than a milestone or another rc
+ case Milestone(_) => 1
+ case _ => -1
+ }
+}
+
+/**
+ * An intermediate release
+ */
+case class Milestone(n: Int) extends ScalaBuild {
+ def unparse = s"-M${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two milestones based on their milestone numbers
+ case Milestone(thatN) => n - thatN
+ // a milestone is older than anything other than another milestone
+ case _ => -1
+
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index a708a262e7..4b1d3c34f3 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -1043,7 +1043,13 @@ abstract class ClassfileParser {
val nClasses = in.nextChar
for (n <- 0 until nClasses) {
val cls = pool.getClassSymbol(in.nextChar.toInt)
- sym.addAnnotation(definitions.ThrowsClass, Literal(Constant(cls.tpe)))
+ val tp = if (cls.isMonomorphicType) cls.tpe else {
+ debuglog(s"Encountered polymorphic exception `${cls.fullName}` while parsing class file.")
+ // in case we encounter polymorphic exception the best we can do is to convert that type to
+ // monomorphic one by introducing existientals, see SI-7009 for details
+ typer.packSymbols(cls.typeParams, cls.tpe)
+ }
+ sym.addAnnotation(appliedType(definitions.ThrowsClass, tp), Literal(Constant(tp)))
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 25b7813646..e8b0cd2696 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -71,8 +71,8 @@ abstract class Pickler extends SubComponent {
if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
unit.error(t.pos, t.symbol.typeParams.length match {
case 0 => "macro has not been expanded"
- case 1 => "type parameter not specified"
- case _ => "type parameters not specified"
+ case 1 => "this type parameter must be specified"
+ case _ => "these type parameters must be specified"
})
return
}
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 44510ab0c2..7a0b034fd0 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -450,19 +450,31 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* is a value type (int et al.) in which case it must cast to the boxed version
* because invoke only returns object and erasure made sure the result is
* expected to be an AnyRef. */
- val t: Tree = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- assert(params.length == mparams.length, mparams)
-
- typedPos {
- val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
- qual = REF(sym)
+ val t: Tree = {
+ val (mparams, resType) = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ assert(params.length == mparams.length, ((params, mparams)))
+ (mparams, resType)
+ case tpe @ OverloadedType(pre, alts) =>
+ unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+ case mt @ MethodType(mparams, resType) :: Nil =>
+ unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ (mparams, resType)
+ case _ =>
+ unit.error(ad.pos, "Cannot resolve overload.")
+ (Nil, NoType)
+ }
+ }
+ typedPos {
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+ qual = REF(sym)
- BLOCK(
- VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType)
- )
- }
+ BLOCK(
+ VAL(sym) === qual0,
+ callAsReflective(mparams map (_.tpe), resType)
+ )
+ }
}
/* For testing purposes, the dynamic application's condition
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 1003d417f6..78c120c1ad 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -10,6 +10,7 @@ import symtab._
import Flags.{ CASE => _, _ }
import scala.collection.mutable.ListBuffer
import matching.{ Patterns, ParallelMatching }
+import scala.tools.nsc.settings.ScalaVersion
/** This class ...
*
@@ -553,7 +554,7 @@ abstract class ExplicitOuter extends InfoTransform
}
case _ =>
- if (settings.Xmigration28.value) tree match {
+ if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
if (isArraySeqTest(qual.tpe, args.head.tpe))
unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 39e16c3f58..bc54054028 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -75,36 +75,58 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
assert(matching.nonEmpty,
- s"no extension method found for $imeth:${imeth.tpe} among ${candidates.map(c => c.name+":"+c.tpe).toList} / ${extensionNames(imeth).toList}")
+ sm"""|no extension method found for:
+ |
+ | $imeth:${imeth.tpe}
+ |
+ | Candidates:
+ |
+ | ${candidates.map(c => c.name+":"+c.tpe).mkString("\n")}
+ |
+ | Candidates (signatures normalized):
+ |
+ | ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
+ |
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
matching.head
}
+ /** Recognize a MethodType which represents an extension method.
+ *
+ * It may have a curried parameter list with the `$this` alone in the first
+ * parameter list, in which case that parameter list is dropped. Or, since
+ * the curried lists disappear during uncurry, it may have a single parameter
+ * list with `$this` as the first parameter, in which case that parameter is
+ * removed from the list.
+ */
+ object ExtensionMethodType {
+ def unapply(tp: Type) = tp match {
+ case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF =>
+ Some((thiz, if (rest.isEmpty) restpe else MethodType(rest, restpe) ))
+ case _ =>
+ None
+ }
+ }
+
/** This method removes the `$this` argument from the parameter list a method.
*
* A method may be a `PolyType`, in which case we tear out the `$this` and the class
- * type params from its nested `MethodType`.
- * It may be a `MethodType`, either with a curried parameter list in which the first argument
- * is a `$this` - we just return the rest of the list.
- * This means that the corresponding symbol was generated during `extmethods`.
- *
- * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list.
- * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards,
- * for instance, during `specialize`.
- * In this case, the first argument is `$this` and we just get rid of it.
+ * type params from its nested `MethodType`. Or it may be a MethodType, as
+ * described at the ExtensionMethodType extractor.
*/
private def normalize(stpe: Type, clazz: Symbol): Type = stpe match {
case PolyType(tparams, restpe) =>
- GenPolyType(tparams dropRight clazz.typeParams.length, normalize(restpe.substSym(tparams takeRight clazz.typeParams.length, clazz.typeParams), clazz))
- case MethodType(List(thiz), restpe) if thiz.name == nme.SELF =>
- restpe
- case MethodType(tparams, restpe) =>
- MethodType(tparams.drop(1), restpe)
+ // method type parameters, class type parameters
+ val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length)
+ GenPolyType(mtparams,
+ normalize(restpe.substSym(ctparams, clazz.typeParams), clazz))
+ case ExtensionMethodType(thiz, etpe) =>
+ etpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil)
case _ =>
stpe
}
class Extender(unit: CompilationUnit) extends TypingTransformer(unit) {
-
private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]()
def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit =
@@ -115,27 +137,54 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
}
+ /** We will need to clone the info of the original method (which obtains clones
+ * of the method type parameters), clone the type parameters of the value class,
+ * and create a new polymethod with the union of all those type parameters, with
+ * their infos adjusted to be consistent with their new home. Example:
+ *
+ * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ * def baz[B >: A](x: B): List[B] = x :: xs
+ * // baz has to be transformed into this extension method, where
+ * // A is cloned from class Foo and B is cloned from method baz:
+ * // def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B]
+ * }
+ *
+ * TODO: factor out the logic for consolidating type parameters from a class
+ * and a method for re-use elsewhere, because nobody will get this right without
+ * some higher level facilities.
+ */
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
- // No variance for method type parameters
- var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
- val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
+ val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth
+ // Start with the class type parameters - clones will be method type parameters
+ // so must drop their variance.
+ val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
+
+ val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*)
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
- def transform(clonedType: Type): Type = clonedType match {
- case MethodType(params, restpe) =>
- // I assume it was a bug that this was dropping params... [Martin]: No, it wasn't; it's curried.
- MethodType(List(thisParam), clonedType)
- case NullaryMethodType(restpe) =>
- MethodType(List(thisParam), restpe)
- }
- val GenPolyType(tparams, restpe) = origInfo cloneInfo extensionMeth
- GenPolyType(tparams ::: newTypeParams, transform(restpe) substSym (clazz.typeParams, newTypeParams))
- }
+ val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult))
+ val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam)
- private def allParams(tpe: Type): List[Symbol] = tpe match {
- case MethodType(params, res) => params ::: allParams(res)
- case _ => List()
- }
+ def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass)
+ def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass)
+
+ // We can't substitute symbols on the entire polytype because we
+ // need to modify the bounds of the cloned type parameters, but we
+ // don't want to substitute for the cloned type parameters themselves.
+ val tparams = tparamsFromMethod ::: tparamsFromClass
+ GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType))
+ // For reference, calling fix on the GenPolyType plays out like this:
+ // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966]
+ // do not conform to method extension$baz#16148's type parameter bounds
+ //
+ // And the difference is visible here. See how B is bounded from below by A#16149
+ // in both cases, but in the failing case, the other type parameter has turned into
+ // a different A. (What is that A? It is a clone of the original A created in
+ // SubstMap during the call to substSym, but I am not clear on all the particulars.)
+ //
+ // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
+ // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
+ }
override def transform(tree: Tree): Tree = {
tree match {
case Template(_, _, _) =>
@@ -144,42 +193,62 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
wrap over other value classes anyway.
checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
+ currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
} else tree
case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension =>
- val companion = currentOwner.companionModule
- val origMeth = tree.symbol
- val extensionName = extensionNames(origMeth).head
- val extensionMeth = companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
- .setAnnotations(origMeth.annotations)
- companion.info.decls.enter(extensionMeth)
- val newInfo = extensionMethInfo(extensionMeth, origMeth.info, currentOwner)
+ val origMeth = tree.symbol
+ val origThis = currentOwner
+ val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params
+ val origParams = vparamss.flatten map (_.symbol)
+ val companion = origThis.companionModule
+
+ def makeExtensionMethodSymbol = {
+ val extensionName = extensionNames(origMeth).head
+ val extensionMeth = (
+ companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ setAnnotations origMeth.annotations
+ )
+ companion.info.decls.enter(extensionMeth)
+ }
+
+ val extensionMeth = makeExtensionMethodSymbol
+ val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis)
extensionMeth setInfo newInfo
- log("Value class %s spawns extension method.\n Old: %s\n New: %s".format(
- currentOwner,
- origMeth.defString,
- extensionMeth.defString)) // extensionMeth.defStringSeenAs(origInfo
-
- def thisParamRef = gen.mkAttributedIdent(extensionMeth.info.params.head setPos extensionMeth.pos)
- val GenPolyType(extensionTpeParams, extensionMono) = extensionMeth.info
- val origTpeParams = (tparams map (_.symbol)) ::: currentOwner.typeParams
- val extensionBody = rhs
+
+ log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}")
+
+ val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo
+ val extensionParams = allParameters(extensionMono)
+ val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
+
+ val extensionBody = (
+ rhs
.substituteSymbols(origTpeParams, extensionTpeParams)
- .substituteSymbols(vparamss.flatten map (_.symbol), allParams(extensionMono).tail)
- .substituteThis(currentOwner, thisParamRef)
- .changeOwner((origMeth, extensionMeth))
- extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) }
- val extensionCallPrefix = Apply(
- gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)),
- List(This(currentOwner)))
- val extensionCall = atOwner(origMeth) {
- localTyper.typedPos(rhs.pos) {
- gen.mkForwarder(extensionCallPrefix, mmap(vparamss)(_.symbol))
- }
- }
- deriveDefDef(tree)(_ => extensionCall)
+ .substituteSymbols(origParams, extensionParams)
+ .substituteThis(origThis, extensionThis)
+ .changeOwner(origMeth -> extensionMeth)
+ )
+
+ // Record the extension method ( FIXME: because... ? )
+ extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody))
+
+ // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
+ // which leaves the actual argument application for extensionCall.
+ val sel = Select(gen.mkAttributedRef(companion), extensionMeth)
+ val targs = origTpeParams map (_.tpeHK)
+ val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil)
+
+ // Apply all the argument lists.
+ deriveDefDef(tree)(_ =>
+ atOwner(origMeth)(
+ localTyper.typedPos(rhs.pos)(
+ gen.mkForwarder(callPrefix, mmap(vparamss)(_.symbol))
+ )
+ )
+ )
case _ =>
super.transform(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 448079abed..845843e9d6 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -451,20 +451,45 @@ abstract class LambdaLift extends InfoTransform {
}
case arg => arg
}
- /** Wrap expr argument in new *Ref(..) constructor, but make
- * sure that Try expressions stay at toplevel.
+
+ /** Wrap expr argument in new *Ref(..) constructor. But try/catch
+ * is a problem because a throw will clear the stack and post catch
+ * we would expect the partially-constructed object to be on the stack
+ * for the call to init. So we recursively
+ * search for "leaf" result expressions where we know its safe
+ * to put the new *Ref(..) constructor or, if all else fails, transform
+ * an expr to { val temp=expr; new *Ref(temp) }.
+ * The reason we narrowly look for try/catch in captured var definitions
+ * is because other try/catch expression have already been lifted
+ * see SI-6863
*/
- def refConstr(expr: Tree): Tree = expr match {
+ def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
+ // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
+ // a try/catch in final expression position.
+ case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
+ New(sym.tpe, expr)
case Try(block, catches, finalizer) =>
Try(refConstr(block), catches map refConstrCase, finalizer)
+ case Block(stats, expr) =>
+ Block(stats, refConstr(expr))
+ case If(cond, trueBranch, falseBranch) =>
+ If(cond, refConstr(trueBranch), refConstr(falseBranch))
+ case Match(selector, cases) =>
+ Match(selector, cases map refConstrCase)
+ // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
+ // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
+ // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
case _ =>
- New(sym.tpe, expr)
- }
+ debuglog("assigning expr to temp: " + (expr.pos))
+ val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
+ val tempDef = ValDef(tempSym, expr) setPos expr.pos
+ val tempRef = Ident(tempSym) setPos expr.pos
+ Block(tempDef, New(sym.tpe, tempRef))
+ }}
def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
- treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
- refConstr(constructorArg)
- })
+
+ treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 3cd943aa74..c9c68d080d 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -867,7 +867,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
rhs match {
case Block(List(assign), returnTree) =>
val Assign(moduleVarRef, _) = assign
- val cond = Apply(Select(moduleVarRef, nme.eq), List(NULL))
+ val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
abort("Invalid getter " + rhs + " for module in class " + clazz)
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 64051b56ec..232148676c 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -178,6 +178,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
+ def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+ def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
+ }
+ private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
+ assert(!specializedMethod.isOverloaded, specializedMethod.defString)
+ val om = Overload(specializedMethod, env)
+ overloads(method) ::= om
+ om
}
/** Just to mark uncheckable */
@@ -289,10 +297,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Return the specialized overload of sym in the given env, if any. */
- def overload(sym: Symbol, env: TypeEnv) =
- overloads(sym).find(ov => TypeEnv.includes(ov.env, env))
-
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
@@ -628,7 +632,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
- overloads(specMember) ::= Overload(om, typeEnv(om))
+ newOverload(specMember, om, typeEnv(om))
enterMember(om)
}
@@ -835,7 +839,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
- overloads(sym) ::= Overload(specMember, env)
+ newOverload(sym, specMember, env)
owner.info.decls.enter(specMember)
specMember
}
@@ -877,9 +881,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (wasSpec.nonEmpty)
debuglog("specialized overload for %s in %s".format(specMember, pp(typeEnv(specMember))))
- overloads(sym) ::= Overload(specMember, spec)
+ newOverload(sym, specMember, spec)
info(specMember) = SpecialOverload(sym, typeEnv(specMember))
-
specMember
}
@@ -994,7 +997,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
SpecialOverride(impl)
}
)
- overloads(overriding) ::= Overload(om, env)
+ newOverload(overriding, om, env)
ifDebug(afterSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
@@ -1476,54 +1479,41 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
transformTypeApply
- case Select(qual, name) =>
- def transformSelect = {
- qual match {
- case _: Super if illegalSpecializedInheritance(currentClass) =>
- val pos = tree.pos
- debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
- debuglog(pos.lineContent)
- tree
- case _ =>
+ case Select(Super(_, _), _) if illegalSpecializedInheritance(currentClass) =>
+ val pos = tree.pos
+ debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
+ tree
+ case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
-
- //log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
- if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) {
- // log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe))
- val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- // log("!!! found env: " + env + "; overloads: " + overloads(symbol))
- if (!env.isEmpty) {
- // debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
- val specMember = overload(symbol, env)
- if (specMember.isDefined) {
- localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name)))
- }
- else {
- val qual1 = transform(qual)
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ if (env.isEmpty) super.transform(tree)
+ else {
+ val qual1 = transform(qual)
+ def reselect(member: Symbol) = {
+ val newSelect = atPos(tree.pos)(Select(qual1, member))
+ if (member.isMethod) localTyper typedOperator newSelect
+ else localTyper typed newSelect
+ }
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(member, _)) => reselect(member)
+ case _ =>
val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
- if (specMember ne NoSymbol) {
- val tree1 = atPos(tree.pos)(Select(qual1, specMember))
- if (specMember.isMethod)
- localTyper.typedOperator(tree1)
- else
- localTyper.typed(tree1)
- } else
+ if (specMember ne NoSymbol)
+ reselect(specMember)
+ else
treeCopy.Select(tree, qual1, name)
- }
- } else
- super.transform(tree)
- } else overloads(symbol).find(_.sym.info =:= symbol.info) match {
- case Some(specMember) =>
- val qual1 = transform(qual)
- debuglog("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym))
- localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.sym)))
- case None =>
- super.transform(tree)
- }
+ }
}
+ case Select(qual, _) =>
+ overloads(symbol) find (_ matchesSym symbol) match {
+ case Some(Overload(member, _)) =>
+ val newTree = Select(transform(qual), member)
+ debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
+ localTyper.typedOperator(atPos(tree.pos)(newTree))
+ case None =>
+ super.transform(tree)
}
- transformSelect
case PackageDef(pid, stats) =>
tree.symbol.info // make sure specializations have been performed
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e05df09aaf..965063a724 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -231,7 +231,17 @@ abstract class UnCurry extends InfoTransform
* If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
*
*/
- def transformFunction(fun: Function): Tree =
+ def transformFunction(fun: Function): Tree = {
+ fun.tpe match {
+ // can happen when analyzer plugins assign refined types to functions, e.g.
+ // (() => Int) { def apply(): Int @typeConstraint }
+ case RefinedType(List(funTp), decls) =>
+ debuglog(s"eliminate refinement from function type ${fun.tpe}")
+ fun.tpe = funTp
+ case _ =>
+ ()
+ }
+
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
@@ -239,10 +249,7 @@ abstract class UnCurry extends InfoTransform
// only get here when running under -Xoldpatmat
synthPartialFunction(fun)
case _ =>
- val parents = (
- if (isFunctionType(fun.tpe)) addSerializable(abstractFunctionForFunctionType(fun.tpe))
- else addSerializable(ObjectClass.tpe, fun.tpe)
- )
+ val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
@@ -275,6 +282,7 @@ abstract class UnCurry extends InfoTransform
}
}
+ }
/** Transform a function node (x => body) of type PartialFunction[T, R] where
* body = expr match { case P_i if G_i => E_i }_i=1..n
@@ -603,8 +611,6 @@ abstract class UnCurry extends InfoTransform
}
case ValDef(_, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
- // a local variable that is mutable and free somewhere later should be lifted
- // as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
if (!sym.owner.isSourceMethod)
withNeedLift(true) { super.transform(tree) }
else
@@ -629,7 +635,7 @@ abstract class UnCurry extends InfoTransform
}
}
- case Assign(Select(_, _), _) =>
+ case Assign(_: RefTree, _) =>
withNeedLift(true) { super.transform(tree) }
case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 78175f393a..b50486306d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -25,6 +25,7 @@ trait Analyzer extends AnyRef
with TypeDiagnostics
with ContextErrors
with StdAttachments
+ with AnalyzerPlugins
{
val global : Global
import global._
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
new file mode 100644
index 0000000000..28f620dbb5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -0,0 +1,225 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package typechecker
+
+/**
+ * @author Lukas Rytz
+ * @version 1.0
+ */
+trait AnalyzerPlugins { self: Analyzer =>
+ import global._
+
+
+ trait AnalyzerPlugin {
+ /**
+ * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+ *
+ * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+ * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+ * at the phase in which their symbol is created. Observations show that this can even be the
+ * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+ * need to be active also in phases other than namer and typer.
+ *
+ * Typically, this method can be implemented as
+ *
+ * global.phase.id < global.currentRun.picklerPhase.id
+ */
+ def isActive(): Boolean = true
+
+ /**
+ * Let analyzer plugins change the expected type before type checking a tree.
+ */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+
+ /**
+ * Let analyzer plugins modify the type that has been computed for a tree.
+ *
+ * @param tpe The type inferred by the type checker, initially (for first plugin) `tree.tpe`
+ * @param typer The yper that type checked `tree`
+ * @param tree The type-checked tree
+ * @param mode Mode that was used for typing `tree`
+ * @param pt Expected type that was used for typing `tree`
+ */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+
+ /**
+ * Let analyzer plugins change the types assigned to definitions. For definitions that have
+ * an annotated type, the assigned type is obtained by typing that type tree. Otherwise, the
+ * type is inferred by typing the definition's righthand side.
+ *
+ * In order to know if the type was inferred, you can query the `wasEmpty` field in the `tpt`
+ * TypeTree of the definition (for DefDef and ValDef).
+ *
+ * (*) If the type of a method or value is inferred, the type-checked tree is stored in the
+ * `analyzer.transformed` hash map, indexed by the definition's rhs tree.
+ *
+ * NOTE: Invoking the type checker can lead to cyclic reference errors. For instance, if this
+ * method is called from the type completer of a recursive method, type checking the mehtod
+ * rhs will invoke the same completer again. It might be possible to avoid this situation by
+ * assigning `tpe` to `defTree.symbol` (untested) - the final type computed by this method
+ * will then be assigned to the definition's symbol by monoTypeCompleter (in Namers).
+ *
+ * The hooks into `typeSig` allow analyzer plugins to add annotations to (or change the types
+ * of) definition symbols. This cannot not be achieved by using `pluginsTyped`: this method
+ * is only called during type checking, so changing the type of a symbol at this point is too
+ * late: references to the symbol might already be typed and therefore obtain the the original
+ * type assigned during naming.
+ *
+ * @param defTree is the definition for which the type was computed. The different cases are
+ * outlined below. Note that this type is untyped (for methods and values with inferred type,
+ * the typed rhs trees are available in analyzer.transformed).
+ *
+ * Case defTree: Template
+ * - tpe : A ClassInfoType for the template
+ * - typer: The typer for template members, i.e. expressions and definitions of defTree.body
+ * - pt : WildcardType
+ * - the class symbol is accessible through typer.context.owner
+ *
+ * Case defTree: ClassDef
+ * - tpe : A ClassInfoType, or a PolyType(params, ClassInfoType) for polymorphic classes.
+ * The class type is the one computed by templateSig, i.e. through the above case
+ * - typer: The typer for the class. Note that this typer has a different context than the
+ * typer for the template.
+ * - pt : WildcardType
+ *
+ * Case defTree: ModuleDef
+ * - tpe : A ClassInfoType computed by templateSig
+ * - typer: The typer for the module. context.owner of this typer is the module class symbol
+ * - pt : WildcardType
+ *
+ * Case defTree: DefDef
+ * - tpe : The type of the method (MethodType, PolyType or NullaryMethodType). (*)
+ * - typer: The typer the rhs of this method
+ * - pt : If tpt.isEmpty, either the result type from the overridden method, or WildcardType.
+ * Otherwise the type obtained from typing tpt.
+ * - Note that for constructors, pt is the class type which the constructor creates. To type
+ * check the rhs of the constructor however, the expected type has to be WildcardType (see
+ * Typers.typedDefDef)
+ *
+ * Case defTree: ValDef
+ * - tpe : The type of this value. (*)
+ * - typer: The typer for the rhs of this value
+ * - pt : If tpt.isEmpty, WildcardType. Otherwise the type obtained from typing tpt.
+ * - Note that pluginsTypeSig might be called multiple times for the same ValDef since it is
+ * used to compute the types of the accessor methods (see `pluginsTypeSigAccessor`)
+ *
+ * Case defTree: TypeDef
+ * - tpe : The type obtained from typing rhs (PolyType if the TypeDef defines a polymorphic type)
+ * - typer: The typer for the rhs of this type
+ * - pt : WildcardType
+ */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = tpe
+
+ /**
+ * Modify the types of field accessors. The namer phase creates method types for getters and
+ * setters based on the type of the corresponding field.
+ *
+ * Note: in order to compute the method type of an accessor, the namer calls `typeSig` on the
+ * `ValDef` tree of the corresponding field. This implies that the `pluginsTypeSig` method
+ * is potentially called multiple times for the same ValDef tree.
+ *
+ * @param tpe The method type created by the namer for the accessor
+ * @param typer The typer for the ValDef (not for the rhs)
+ * @param tree The ValDef corresponding to the accessor
+ * @param sym The accessor method symbol (getter, setter, beanGetter or beanSetter)
+ */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = tpe
+
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adapting, it should return the tree unchanged.
+ */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+
+ /**
+ * Modify the type of a return expression. By default, return expressions have type
+ * NothingClass.tpe.
+ *
+ * @param tpe The type of the return expression
+ * @param typer The typer that was used for typing the return tree
+ * @param tree The typed return expression tree
+ * @param pt The return type of the enclosing method
+ */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
+ }
+
+
+
+ /** A list of registered analyzer plugins */
+ private var analyzerPlugins: List[AnalyzerPlugin] = Nil
+
+ /** Registers a new analyzer plugin */
+ def addAnalyzerPlugin(plugin: AnalyzerPlugin) {
+ if (!analyzerPlugins.contains(plugin))
+ analyzerPlugins = plugin :: analyzerPlugins
+ }
+
+
+ /** @see AnalyzerPlugin.pluginsPt */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ if (analyzerPlugins.isEmpty) pt
+ else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
+ if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+
+ /** @see AnalyzerPlugin.pluginsTyped */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTpe = addAnnotations(tree, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersTpe
+ else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypeSig */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+
+ /** @see AnalyzerPlugin.pluginsTypeSigAccessor */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+
+ /** @see AnalyzerPlugin.canAdaptAnnotations */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
+ annotCheckersExists || {
+ if (analyzerPlugins.isEmpty) false
+ else analyzerPlugins.exists(plugin =>
+ plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
+ }
+ }
+
+ /** @see AnalyzerPlugin.adaptAnnotations */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
+ if (analyzerPlugins.isEmpty) annotCheckersTree
+ else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
+ if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypedReturn */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersType
+ else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index dc367b11fd..fbf23968f0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -273,9 +273,6 @@ trait ContextErrors {
def VolatileValueError(vdef: Tree) =
issueNormalTypeError(vdef, "values cannot be volatile")
- def FinalVolatileVarError(vdef: Tree) =
- issueNormalTypeError(vdef, "final vars cannot be volatile")
-
def LocalVarUninitializedError(vdef: Tree) =
issueNormalTypeError(vdef, "local variables must be initialized")
@@ -763,10 +760,14 @@ trait ContextErrors {
else " of " + expanded.getClass
))
- def MacroImplementationNotFoundError(expandee: Tree) =
- macroExpansionError(expandee,
+ def MacroImplementationNotFoundError(expandee: Tree) = {
+ val message =
"macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)")
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
+ (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ else "")
+ macroExpansionError(expandee, message)
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index af2aeefecd..620665126e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -35,7 +35,7 @@ trait Contexts { self: Analyzer =>
val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
- private val startContext = {
+ private lazy val startContext = {
NoContext.make(
Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
rootMirror.RootClass,
@@ -342,6 +342,16 @@ trait Contexts { self: Analyzer =>
c
}
+ /**
+ * A context for typing constructor parameter ValDefs, super or self invocation arguments and default getters
+ * of constructors. These expressions need to be type checked in a scope outside the class, cf. spec 5.3.1.
+ *
+ * This method is called by namer / typer where `this` is the context for the constructor DefDef. The
+ * owner of the resulting (new) context is the outer context for the Template, i.e. the context for the
+ * ClassDef. This means that class type parameters will be in scope. The value parameters of the current
+ * constructor are also entered into the new constructor scope. Members of the class however will not be
+ * accessible.
+ */
def makeConstructorContext = {
var baseContext = enclClass.outer
while (baseContext.tree.isInstanceOf[Template])
@@ -361,6 +371,8 @@ trait Contexts { self: Analyzer =>
enterLocalElems(c.scope.elems)
}
}
+ // Enter the scope elements of this (the scope for the constructor DefDef) into the new constructor scope.
+ // Concretely, this will enter the value parameters of constructor.
enterElems(this)
argContext
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index df753ba53c..0b46582cbf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -317,15 +317,33 @@ abstract class Duplicators extends Analyzer {
super.typed(tree, mode, pt)
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
- // log("selection on this, no type ascription required")
- // we use the symbol name instead of the tree name because the symbol may have been
- // name mangled, rendering the tree name obsolete
- // log(tree)
- val t = super.typedPos(tree.pos, mode, pt) {
- Select(This(newClassOwner), tree.symbol.name)
- }
- // log("typed to: " + t + "; tpe = " + t.tpe + "; " + inspectTpe(t.tpe))
- t
+ // We use the symbol name instead of the tree name because the symbol
+ // may have been name mangled, rendering the tree name obsolete.
+ // ...but you can't just do a Select on a name because if the symbol is
+ // overloaded, you will crash in the backend.
+ val memberByName = newClassOwner.thisType.member(tree.symbol.name)
+ def nameSelection = Select(This(newClassOwner), tree.symbol.name)
+ val newTree = (
+ if (memberByName.isOverloaded) {
+ // Find the types of the overload alternatives as seen in the new class,
+ // and filter the list down to those which match the old type (after
+ // fixing the old type so it is seen as if from the new class.)
+ val typeInNewClass = fixType(oldClassOwner.info memberType tree.symbol)
+ val alts = memberByName.alternatives
+ val memberTypes = alts map (newClassOwner.info memberType _)
+ val memberString = memberByName.defString
+ alts zip memberTypes filter (_._2 =:= typeInNewClass) match {
+ case ((alt, tpe)) :: Nil =>
+ log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString")
+ Select(This(newClassOwner), alt)
+ case _ =>
+ log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
+ nameSelection
+ }
+ }
+ else nameSelection
+ )
+ super.typed(atPos(tree.pos)(newTree), mode, pt)
case This(_) if (oldClassOwner ne null) && (tree.symbol == oldClassOwner) =>
// val tree1 = Typed(This(newClassOwner), TypeTree(fixType(tree.tpe.widen)))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 2c2aa03d24..74078a4ed3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -257,8 +257,8 @@ trait Infer extends Checkable {
tp1 // @MAT aliases already handled by subtyping
}
- private val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
- private val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
+ private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
+ private lazy val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
/** The context-dependent inferencer part */
class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
@@ -411,8 +411,19 @@ trait Infer extends Checkable {
/** Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
+ *
* [Martin] I think Infer is also created by Erasure, with the default
* implementation of isCoercible
+ * [Paulp] (Assuming the above must refer to my comment on isCoercible)
+ * Nope, I examined every occurrence of Inferencer in trunk. It
+ * appears twice as a self-type, once at its definition, and once
+ * where it is instantiated in Typers. There are no others.
+ *
+ % ack -A0 -B0 --no-filename '\bInferencer\b' src
+ self: Inferencer =>
+ self: Inferencer =>
+ class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
+ val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
@@ -1578,10 +1589,10 @@ trait Infer extends Checkable {
}
// Drop those that use a default; keep those that use vararg/tupling conversion.
mtypes exists (t =>
- !t.typeSymbol.hasDefaultFlag && {
- compareLengths(t.params, argtpes) < 0 || // tupling (*)
- hasExactlyNumParams(t, argtpes.length) // same nb or vararg
- }
+ !t.typeSymbol.hasDefaultFlag && (
+ compareLengths(t.params, argtpes) < 0 // tupling (*)
+ || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
+ )
)
// (*) more arguments than parameters, but still applicable: tupling conversion works.
// todo: should not return "false" when paramTypes = (Unit) no argument is given
@@ -1608,15 +1619,18 @@ trait Infer extends Checkable {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
tryTwice { isSecondTry =>
- debuglog("infer method alt "+ tree.symbol +" with alternatives "+
- (alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt)
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
- val applicable = resolveOverloadedMethod(argtpes, {
- alts filter { alt =>
- inSilentMode(context)(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
- (!varArgsOnly || isVarArgsList(alt.tpe.params))
- }
- })
+ def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
+ isVarArgsList(alt.tpe.params)
+ && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
+ )
+ val applicable = resolveOverloadedMethod(argtpes,
+ alts filter (alt =>
+ varargsApplicableCheck(alt)
+ && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
+ )
+ )
def improves(sym1: Symbol, sym2: Symbol) = {
// util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index b20a9ea626..245656e2d7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -452,7 +452,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
val aparamtpe = aparam.tpe.dealias match {
- case RefinedType(List(tpe), Scope(sym)) if tpe == MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
+ case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
case tpe => tpe
}
checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
@@ -684,6 +684,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* the expandee with an error marker set if there has been an error
*/
def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
+ if (settings.Ymacronoexpand.value) return expandee // SI-6812
val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index b1cf93a879..99557d1527 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -235,8 +235,8 @@ trait MethodSynthesis {
context.unit.synthetics get meth match {
case Some(mdef) =>
context.unit.synthetics -= meth
- meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, false)
- cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, true)
+ meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, keepClean = false)
+ cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, keepClean = true)
List(cd, mdef)
case _ =>
// Shouldn't happen, but let's give ourselves a reasonable error when it does
@@ -329,6 +329,7 @@ trait MethodSynthesis {
*/
def category: Symbol
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
final def fieldSelection = Select(This(enclClass), basisSym)
final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 98b6264051..c728185d4e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -65,7 +65,18 @@ trait Namers extends MethodSynthesis {
case ModuleDef(_, _, _) => tree.symbol.moduleClass
case _ => tree.symbol
}
- newNamer(context.makeNewScope(tree, sym))
+ def isConstrParam(vd: ValDef) = {
+ (sym hasFlag PARAM | PRESUPER) &&
+ !vd.mods.isJavaDefined &&
+ sym.owner.isConstructor
+ }
+ val ownerCtx = tree match {
+ case vd: ValDef if isConstrParam(vd) =>
+ context.makeConstructorContext
+ case _ =>
+ context
+ }
+ newNamer(ownerCtx.makeNewScope(tree, sym))
}
def createInnerNamer() = {
newNamer(context.make(context.tree, owner, newScope))
@@ -423,6 +434,7 @@ trait Namers extends MethodSynthesis {
def enterSyms(trees: List[Tree]): Namer = {
trees.foldLeft(this: Namer) { (namer, t) =>
val ctx = namer enterSym t
+ // for Import trees, enterSym returns a changed context, so we need a new namer
if (ctx eq namer.context) namer
else newNamer(ctx)
}
@@ -521,20 +533,19 @@ trait Namers extends MethodSynthesis {
noDuplicates(selectors map (_.rename), AppearsTwice)
}
- def enterCopyMethod(copyDefDef: Tree, tparams: List[TypeDef]): Symbol = {
- val sym = copyDefDef.symbol
- val lazyType = completerOf(copyDefDef, tparams)
+ def enterCopyMethod(copyDef: DefDef): Symbol = {
+ val sym = copyDef.symbol
+ val lazyType = completerOf(copyDef)
/** Assign the types of the class parameters to the parameters of the
* copy method. See comment in `Unapplies.caseClassCopyMeth` */
def assignParamTypes() {
val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
- val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
+ val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol))
val classParamss = constructorType.paramss
- val DefDef(_, _, _, copyParamss, _, _) = copyDefDef
- map2(copyParamss, classParamss)((copyParams, classParams) =>
+ map2(copyDef.vparamss, classParamss)((copyParams, classParams) =>
map2(copyParams, classParams)((copyP, classP) =>
copyP.tpt setType subst(classP.tpe)
)
@@ -542,24 +553,28 @@ trait Namers extends MethodSynthesis {
}
sym setInfo {
- mkTypeCompleter(copyDefDef) { sym =>
+ mkTypeCompleter(copyDef) { sym =>
assignParamTypes()
lazyType complete sym
}
}
}
- def completerOf(tree: Tree): TypeCompleter = completerOf(tree, treeInfo.typeParameters(tree))
- def completerOf(tree: Tree, tparams: List[TypeDef]): TypeCompleter = {
+
+ def completerOf(tree: Tree): TypeCompleter = {
val mono = namerOf(tree.symbol) monoTypeCompleter tree
+ val tparams = treeInfo.typeParameters(tree)
if (tparams.isEmpty) mono
else {
- //@M! TypeDef's type params are handled differently
- //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
- //@M x is only in scope in `A[x <: B]'
+ /* @M! TypeDef's type params are handled differently, e.g., in `type T[A[x <: B], B]`, A and B are entered
+ * first as both are in scope in the definition of x. x is only in scope in `A[x <: B]`.
+ * No symbols are created for the abstract type's params at this point, i.e. the following assertion holds:
+ * !tree.symbol.isAbstractType || { tparams.forall(_.symbol == NoSymbol)
+ * (tested with the above example, `trait C { type T[A[X <: B], B] }`). See also comment in PolyTypeCompleter.
+ */
if (!tree.symbol.isAbstractType) //@M TODO: change to isTypeMember ?
createNamer(tree) enterSyms tparams
- new PolyTypeCompleter(tparams, mono, tree, context) //@M
+ new PolyTypeCompleter(tparams, mono, context) //@M
}
}
@@ -621,9 +636,9 @@ trait Namers extends MethodSynthesis {
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
if (name == nme.copy && sym.isSynthetic)
- enterCopyMethod(tree, tparams)
+ enterCopyMethod(tree)
else
- sym setInfo completerOf(tree, tparams)
+ sym setInfo completerOf(tree)
}
def enterClassDef(tree: ClassDef) {
@@ -736,13 +751,13 @@ trait Namers extends MethodSynthesis {
}
}
- def accessorTypeCompleter(tree: ValDef, isSetter: Boolean = false) = mkTypeCompleter(tree) { sym =>
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
+ def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
logAndValidate(sym) {
sym setInfo {
- if (isSetter)
- MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
- else
- NullaryMethodType(typeSig(tree))
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ else NullaryMethodType(typeSig(tree))
+ pluginsTypeSigAccessor(tp, typer, tree, sym)
}
}
}
@@ -805,17 +820,12 @@ trait Namers extends MethodSynthesis {
* assigns the type to the tpt's node. Returns the type.
*/
private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
- // compute result type from rhs
- val typedBody =
+ val rhsTpe =
if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
else defnTyper.computeType(tree.rhs, pt)
- val typedDefn = widenIfNecessary(tree.symbol, typedBody, pt)
- assignTypeToTree(tree, typedDefn)
- }
-
- private def assignTypeToTree(tree: ValOrDefDef, tpe: Type): Type = {
- tree.tpt defineType tpe setPos tree.pos.focus
+ val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
+ tree.tpt defineType defnTpe setPos tree.pos.focus
tree.tpt.tpe
}
@@ -892,163 +902,257 @@ trait Namers extends MethodSynthesis {
for (cda <- module.attachments.get[ConstructorDefaultsAttachment]) {
cda.companionModuleClassNamer = templateNamer
}
- ClassInfoType(parents, decls, clazz)
+ val classTp = ClassInfoType(parents, decls, clazz)
+ pluginsTypeSig(classTp, templateNamer.typer, templ, WildcardType)
}
- private def classSig(tparams: List[TypeDef], impl: Template): Type = {
+ private def classSig(cdef: ClassDef): Type = {
+ val clazz = cdef.symbol
+ val ClassDef(_, _, tparams, impl) = cdef
val tparams0 = typer.reenterTypeParams(tparams)
val resultType = templateSig(impl)
- GenPolyType(tparams0, resultType)
+ val res = GenPolyType(tparams0, resultType)
+ val pluginsTp = pluginsTypeSig(res, typer, cdef, WildcardType)
+
+ // Already assign the type to the class symbol (monoTypeCompleter will do it again).
+ // Allows isDerivedValueClass to look at the info.
+ clazz setInfo pluginsTp
+ if (clazz.isDerivedValueClass) {
+ log("Ensuring companion for derived value class " + cdef.name + " at " + cdef.pos.show)
+ clazz setFlag FINAL
+ // Don't force the owner's info lest we create cycles as in SI-6357.
+ enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
+ }
+ pluginsTp
}
- private def methodSig(ddef: DefDef, mods: Modifiers, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): Type = {
- val meth = owner
- val clazz = meth.owner
- // enters the skolemized version into scope, returns the deSkolemized symbols
- val tparamSyms = typer.reenterTypeParams(tparams)
- // since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams
- var vparamSymss = enterValueParams(vparamss)
+ private def moduleSig(mdef: ModuleDef): Type = {
+ val moduleSym = mdef.symbol
+ // The info of both the module and the moduleClass symbols need to be assigned. monoTypeCompleter assigns
+ // the result of typeSig to the module symbol. The module class info is assigned here as a side-effect.
+ val result = templateSig(mdef.impl)
+ val pluginsTp = pluginsTypeSig(result, typer, mdef, WildcardType)
+ // Assign the moduleClass info (templateSig returns a ClassInfoType)
+ val clazz = moduleSym.moduleClass
+ clazz setInfo pluginsTp
+ // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
+ clazz.tpe
+ }
+
+ /**
+ * The method type for `ddef`.
+ *
+ * If a PolyType(tparams, restp) is returned, `tparams` are the external symbols (not type skolems),
+ * i.e. instances of AbstractTypeSymbol. All references in `restp` to the type parameters are TypeRefs
+ * to these non-skolems.
+ *
+ * For type-checking the rhs (in case the result type is inferred), the type skolems of the type parameters
+ * are entered in scope. Equally, the parameter symbols entered into scope have types which refer to those
+ * skolems: when type-checking the rhs, references to parameters need to have types that refer to the skolems.
+ * In summary, typing an rhs happens with respect to the skolems.
+ *
+ * This means that the method's result type computed by the typer refers to skolems. In order to put it
+ * into the method type (the result of methodSig), typeRefs to skolems have to be replaced by references
+ * to the non-skolems.
+ */
+ private def methodSig(ddef: DefDef): Type = {
// DEPMETTODO: do we need to skolemize value parameter symbols?
- if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
- tpt setPos meth.pos.focus
- }
- var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
- val site = clazz.thisType
- /** Called for all value parameter lists, right to left
- * @param vparams the symbols of one parameter list
- * @param restpe the result type (possibly a MethodType)
+ val DefDef(_, _, tparams, vparamss, tpt, _) = ddef
+
+ val meth = owner
+ val methOwner = meth.owner
+ val site = methOwner.thisType
+
+ /* tparams already have symbols (created in enterDefDef/completerOf), namely the skolemized ones (created
+ * by the PolyTypeCompleter constructor, and assigned to tparams). reenterTypeParams enters the type skolems
+ * into scope and returns the non-skolems.
*/
- def makeMethodType(vparams: List[Symbol], restpe: Type) = {
- // TODODEPMET: check that we actually don't need to do anything here
- // new dependent method types: probably OK already, since 'enterValueParams' above
- // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
- // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
- // so re-use / adapt that)
- if (owner.isJavaDefined)
- // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
- JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
- else
- MethodType(vparams, restpe)
- }
+ val tparamSyms = typer.reenterTypeParams(tparams)
+
+ val tparamSkolems = tparams.map(_.symbol)
+
+ /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems
+ * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter
+ * types from the overridden method).
+ */
+ var vparamSymss = enterValueParams(vparamss)
+
+ /**
+ * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
+ * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
+ * so the resulting type is a valid external method type, it does not contain (references to) skolems.
+ */
def thisMethodType(restpe: Type) = {
val checkDependencies = new DependentTypeChecker(context)(this)
checkDependencies check vparamSymss
// DEPMETTODO: check not needed when they become on by default
checkDependencies(restpe)
- GenPolyType(
+ val makeMethodType = (vparams: List[Symbol], restpe: Type) => {
+ // TODODEPMET: check that we actually don't need to do anything here
+ // new dependent method types: probably OK already, since 'enterValueParams' above
+ // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
+ // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
+ // so re-use / adapt that)
+ if (meth.isJavaDefined)
+ // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
+ JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
+ else
+ MethodType(vparams, restpe)
+ }
+
+
+ val res = GenPolyType(
tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
if (vparamSymss.isEmpty) NullaryMethodType(restpe)
// vparamss refer (if they do) to skolemized tparams
else (vparamSymss :\ restpe) (makeMethodType)
)
+ res.substSym(tparamSkolems, tparamSyms)
}
- def transformedResult =
- thisMethodType(resultPt).substSym(tparams map (_.symbol), tparamSyms)
+ /**
+ * Creates a schematic method type which has WildcardTypes for non specified
+ * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
+ * type schema is
+ *
+ * PolyType(T, MethodType(List(a: T, b: WildcardType), WildcardType))
+ *
+ * where T are non-skolems.
+ */
+ def methodTypeSchema(resTp: Type) = {
+ // for all params without type set WildcaradType
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ thisMethodType(resTp)
+ }
- // luc: added .substSym from skolemized to deSkolemized
- // site.memberType(sym): PolyType(tparams, MethodType(..., ...))
- // ==> all references to tparams are deSkolemized
- // thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized.
- // ==> the two didn't match
- //
- // for instance, B.foo would not override A.foo, and the default on parameter b would not be inherited
- // class A { def foo[T](a: T)(b: T = a) = a }
- // class B extends A { override def foo[U](a: U)(b: U) = b }
- def overriddenSymbol =
- intersectionType(clazz.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches transformedResult)
+ def overriddenSymbol(resTp: Type) = {
+ intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
+ sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
}
- // TODO: see whether this or something similar would work instead.
- //
+ }
+ // TODO: see whether this or something similar would work instead:
// def overriddenSymbol = meth.nextOverriddenSymbol
- // fill in result type and parameter types from overridden symbol if there is a unique one.
- if (clazz.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
- // try to complete from matching definition in base type
- mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
- val overridden = overriddenSymbol
- if (overridden != NoSymbol && !overridden.isOverloaded) {
- overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
- resultPt = site.memberType(overridden) match {
- case PolyType(tparams, rt) => rt.substSym(tparams, tparamSyms)
- case mt => mt
- }
+ /**
+ * If `meth` doesn't have an explicit return type, extracts the return type from the method
+ * overridden by `meth` (if there's an unique one). This type is lateron used as the expected
+ * type for computing the type of the rhs. The resulting type references type skolems for
+ * type parameters (consistent with the result of `typer.typedType(tpt).tpe`).
+ *
+ * As a first side effect, this method assigns a MethodType constructed using this
+ * return type to `meth`. This allows omitting the result type for recursive methods.
+ *
+ * As another side effect, this method also assigns paramter types from the overridden
+ * method to parameters of `meth` that have missing types (the parser accepts missing
+ * parameter types under -Yinfer-argument-types).
+ */
+ def typesFromOverridden(methResTp: Type): Type = {
+ val overridden = overriddenSymbol(methResTp)
+ if (overridden == NoSymbol || overridden.isOverloaded) {
+ methResTp
+ } else {
+ overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
+ var overriddenTp = site.memberType(overridden) match {
+ case PolyType(tparams, rt) => rt.substSym(tparams, tparamSkolems)
+ case mt => mt
+ }
for (vparams <- vparamss) {
- var pps = resultPt.params
+ var overriddenParams = overriddenTp.params
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- val paramtpe = pps.head.tpe
- vparam.symbol setInfo paramtpe
- vparam.tpt defineType paramtpe setPos vparam.pos.focus
+ val overriddenParamTp = overriddenParams.head.tpe
+ // references to type parameteres in overriddenParamTp link to the type skolems, so the
+ // assigned type is consistent with the other / existing parameter types in vparamSymss.
+ vparam.symbol setInfo overriddenParamTp
+ vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
}
- pps = pps.tail
+ overriddenParams = overriddenParams.tail
}
- resultPt = resultPt.resultType
+ overriddenTp = overriddenTp.resultType
}
- resultPt match {
- case NullaryMethodType(rtpe) => resultPt = rtpe
- case MethodType(List(), rtpe) => resultPt = rtpe
+
+ overriddenTp match {
+ case NullaryMethodType(rtpe) => overriddenTp = rtpe
+ case MethodType(List(), rtpe) => overriddenTp = rtpe
case _ =>
}
+
if (tpt.isEmpty) {
// provisionally assign `meth` a method type with inherited result type
// that way, we can leave out the result type even if method is recursive.
- meth setInfo thisMethodType(resultPt)
+ meth setInfo thisMethodType(overriddenTp)
+ overriddenTp
+ } else {
+ methResTp
}
}
}
- // Add a () parameter section if this overrides some method with () parameters.
- if (clazz.isClass && vparamss.isEmpty && overriddenSymbol.alternatives.exists(
- _.info.isInstanceOf[MethodType])) {
+
+ if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
+ tpt defineType context.enclClass.owner.tpe
+ tpt setPos meth.pos.focus
+ }
+
+ val methResTp = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
+ val resTpFromOverride = if (methOwner.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
+ typesFromOverridden(methResTp)
+ } else {
+ methResTp
+ }
+
+ // Add a () parameter section if this overrides some method with () parameters
+ if (methOwner.isClass && vparamss.isEmpty &&
+ overriddenSymbol(methResTp).alternatives.exists(_.info.isInstanceOf[MethodType])) {
vparamSymss = ListOfNil
}
+
+ // issue an error for missing parameter types
mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
MissingParameterOrValTypeError(vparam)
vparam.tpt defineType ErrorType
}
}
- addDefaultGetters(meth, vparamss, tparams, overriddenSymbol)
+
+ addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
// (either "macro ???" as they used to or just "???" to maximally simplify their compilation)
- if (fastTrack contains ddef.symbol) ddef.symbol setFlag MACRO
+ if (fastTrack contains meth) meth setFlag MACRO
// macro defs need to be typechecked in advance
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (ddef.symbol.isTermMacro) {
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- typer.computeMacroDefType(ddef, pt)
+ if (meth.isTermMacro) {
+ typer.computeMacroDefType(ddef, resTpFromOverride)
}
- thisMethodType({
+ val res = thisMethodType({
val rt = (
if (!tpt.isEmpty) {
- typer.typedType(tpt).tpe
+ methResTp
} else {
- // replace deSkolemized symbols with skolemized ones
- // (for resultPt computed by looking at overridden symbol, right?)
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- assignTypeToTree(ddef, typer, pt)
- }
- )
+ // return type is inferred, we don't just use resTpFromOverride. Here, C.f has type String:
+ // trait T { def f: Object }; class C <: T { def f = "" }
+ // using resTpFromOverride as expected type allows for the following (C.f has type A):
+ // trait T { def f: A }; class C <: T { implicit def b2a(t: B): A = ???; def f = new B }
+ assignTypeToTree(ddef, typer, resTpFromOverride)
+ })
// #2382: return type of default getters are always @uncheckedVariance
if (meth.hasDefault)
rt.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List()))
else rt
})
+ pluginsTypeSig(res, typer, ddef, methResTp)
}
/**
@@ -1060,9 +1164,9 @@ trait Namers extends MethodSynthesis {
* flag.
*/
private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
- val clazz = meth.owner
+ val methOwner = meth.owner
val isConstr = meth.isConstructor
- val overridden = if (isConstr || !clazz.isClass) NoSymbol else overriddenSymbol
+ val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
val overrides = overridden != NoSymbol && !overridden.isOverloaded
// value parameters of the base class (whose defaults might be overridden)
var baseParamss = (vparamss, overridden.tpe.paramss) match {
@@ -1112,7 +1216,7 @@ trait Namers extends MethodSynthesis {
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = companionSymbolOf(clazz, context)
+ val module = companionSymbolOf(methOwner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
module.attachments.get[ConstructorDefaultsAttachment] match {
@@ -1158,7 +1262,7 @@ trait Namers extends MethodSynthesis {
name, deftParams, defvParamss, defTpt, defRhs)
}
if (!isConstr)
- clazz.resetFlag(INTERFACE) // there's a concrete member now
+ methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
if (forInteractive && default.owner.isTerm) {
// save the default getters as attachments in the method symbol. if compiling the
@@ -1183,15 +1287,31 @@ trait Namers extends MethodSynthesis {
}
}
+ private def valDefSig(vdef: ValDef) = {
+ val ValDef(_, _, tpt, rhs) = vdef
+ val result = if (tpt.isEmpty) {
+ if (rhs.isEmpty) {
+ MissingParameterOrValTypeError(tpt)
+ ErrorType
+ }
+ else assignTypeToTree(vdef, typer, WildcardType)
+ } else {
+ typer.typedType(tpt).tpe
+ }
+ pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result)
+
+ }
+
//@M! an abstract type definition (abstract type member/type parameter)
// may take type parameters, which are in scope in its bounds
- private def typeDefSig(tpsym: Symbol, tparams: List[TypeDef], rhs: Tree) = {
+ private def typeDefSig(tdef: TypeDef) = {
+ val TypeDef(_, _, tparams, rhs) = tdef
// log("typeDefSig(" + tpsym + ", " + tparams + ")")
val tparamSyms = typer.reenterTypeParams(tparams) //@M make tparams available in scope (just for this abstypedef)
val tp = typer.typedType(rhs).tpe match {
case TypeBounds(lt, rt) if (lt.isError || rt.isError) =>
TypeBounds.empty
- case tp @ TypeBounds(lt, rt) if (tpsym hasFlag JAVA) =>
+ case tp @ TypeBounds(lt, rt) if (tdef.symbol hasFlag JAVA) =>
TypeBounds(lt, objToAny(rt))
case tp =>
tp
@@ -1213,9 +1333,32 @@ trait Namers extends MethodSynthesis {
// However, separate compilation requires the symbol info to be
// loaded to do this check, but loading the info will probably
// lead to spurious cyclic errors. So omit the check.
- GenPolyType(tparamSyms, tp)
+ val res = GenPolyType(tparamSyms, tp)
+ pluginsTypeSig(res, typer, tdef, WildcardType)
}
+ private def importSig(imp: Import) = {
+ val Import(expr, selectors) = imp
+ val expr1 = typer.typedQualifier(expr)
+ typer checkStable expr1
+ if (expr1.symbol != null && expr1.symbol.isRootPackage)
+ RootImportError(imp)
+
+ if (expr1.isErrorTyped)
+ ErrorType
+ else {
+ val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
+ checkSelectors(newImport)
+ transformed(imp) = newImport
+ // copy symbol and type attributes back into old expression
+ // so that the structure builder will find it.
+ expr.symbol = expr1.symbol
+ expr.tpe = expr1.tpe
+ ImportType(expr1)
+ }
+ }
+
+
/** Given a case class
* case class C[Ts] (ps: Us)
* Add the following methods to toScope:
@@ -1239,6 +1382,11 @@ trait Namers extends MethodSynthesis {
caseClassCopyMeth(cdef) foreach namer.enterSyntheticSym
}
+ /**
+ * TypeSig is invoked by monoTypeCompleters. It returns the type of a definition which
+ * is then assigned to the corresponding symbol (typeSig itself does not need to assign
+ * the type to the symbol, but it can if necessary).
+ */
def typeSig(tree: Tree): Type = {
// log("typeSig " + tree)
/** For definitions, transform Annotation trees to AnnotationInfos, assign
@@ -1271,84 +1419,33 @@ trait Namers extends MethodSynthesis {
}
val sym: Symbol = tree.symbol
- // @Lukas: I am not sure this is the right way to do things.
- // We used to only decorate the module class with annotations, which is
- // clearly wrong. Now we decorate both the class and the object.
- // But maybe some annotations are only meant for one of these but not for the other?
- //
- // TODO: meta-annotations to indicate class vs. object.
+
+ // TODO: meta-annotations to indicate where module annotations should go (module vs moduleClass)
annotate(sym)
if (sym.isModule) annotate(sym.moduleClass)
def getSig = tree match {
- case cdef @ ClassDef(_, name, tparams, impl) =>
- val clazz = tree.symbol
- val result = createNamer(tree).classSig(tparams, impl)
- clazz setInfo result
- if (clazz.isDerivedValueClass) {
- log("Ensuring companion for derived value class " + name + " at " + cdef.pos.show)
- clazz setFlag FINAL
- // Don't force the owner's info lest we create cycles as in SI-6357.
- enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
- }
- result
-
- case ModuleDef(_, _, impl) =>
- val clazz = sym.moduleClass
- clazz setInfo createNamer(tree).templateSig(impl)
- clazz.tpe
-
- case ddef @ DefDef(mods, _, tparams, vparamss, tpt, rhs) =>
- // TODO: cleanup parameter list
- createNamer(tree).methodSig(ddef, mods, tparams, vparamss, tpt, rhs)
-
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val isBeforeSupercall = (
- (sym hasFlag PARAM | PRESUPER)
- && !mods.isJavaDefined
- && sym.owner.isConstructor
- )
- val typer1 = typer.constrTyperIf(isBeforeSupercall)
- if (tpt.isEmpty) {
- if (rhs.isEmpty) {
- MissingParameterOrValTypeError(tpt)
- ErrorType
- }
- else assignTypeToTree(vdef, newTyper(typer1.context.make(vdef, sym)), WildcardType)
- }
- else typer1.typedType(tpt).tpe
-
- case TypeDef(_, _, tparams, rhs) =>
- createNamer(tree).typeDefSig(sym, tparams, rhs) //@M!
-
- case Import(expr, selectors) =>
- val expr1 = typer.typedQualifier(expr)
- typer checkStable expr1
- if (expr1.symbol != null && expr1.symbol.isRootPackage)
- RootImportError(tree)
-
- if (expr1.isErrorTyped)
- ErrorType
- else {
- val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
- checkSelectors(newImport)
- transformed(tree) = newImport
- // copy symbol and type attributes back into old expression
- // so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
- ImportType(expr1)
- }
- }
+ case cdef: ClassDef =>
+ createNamer(tree).classSig(cdef)
+
+ case mdef: ModuleDef =>
+ createNamer(tree).moduleSig(mdef)
+
+ case ddef: DefDef =>
+ createNamer(tree).methodSig(ddef)
- val result =
- try getSig
- catch typeErrorHandler(tree, ErrorType)
+ case vdef: ValDef =>
+ createNamer(tree).valDefSig(vdef)
- result match {
- case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => deskolemizeTypeParams(tparams)(result)
- case _ => result
+ case tdef: TypeDef =>
+ createNamer(tree).typeDefSig(tdef) //@M!
+
+ case imp: Import =>
+ importSig(imp)
}
+
+ try getSig
+ catch typeErrorHandler(tree, ErrorType)
}
def includeParent(tpe: Type, parent: Symbol): Type = tpe match {
@@ -1508,14 +1605,25 @@ trait Namers extends MethodSynthesis {
}
}
- /** A class representing a lazy type with known type parameters.
+ /**
+ * A class representing a lazy type with known type parameters. `ctx` is the namer context in which the
+ * `owner` is defined.
+ *
+ * Constructing a PolyTypeCompleter for a DefDef creates type skolems for the type parameters and
+ * assigns them to the `tparams` trees.
*/
- class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, owner: Tree, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
- private val ownerSym = owner.symbol
- override val typeParams = tparams map (_.symbol) //@M
- override val tree = restp.tree
+ class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
+ // @M. If `owner` is an abstract type member, `typeParams` are all NoSymbol (see comment in `completerOf`),
+ // otherwise, the non-skolemized (external) type parameter symbols
+ override val typeParams = tparams map (_.symbol)
+
+ /* The definition tree (poly ClassDef, poly DefDef or HK TypeDef) */
+ override val tree = restp.tree
+
+ private val defnSym = tree.symbol
- if (ownerSym.isTerm) {
+ if (defnSym.isTerm) {
+ // for polymorphic DefDefs, create type skolems and assign them to the tparam trees.
val skolems = deriveFreshSkolems(tparams map (_.symbol))
map2(tparams, skolems)(_ setSymbol _)
}
@@ -1523,8 +1631,8 @@ trait Namers extends MethodSynthesis {
def completeImpl(sym: Symbol) = {
// @M an abstract type's type parameters are entered.
// TODO: change to isTypeMember ?
- if (ownerSym.isAbstractType)
- newNamerFor(ctx, owner) enterSyms tparams //@M
+ if (defnSym.isAbstractType)
+ newNamerFor(ctx, tree) enterSyms tparams //@M
restp complete sym
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index be218fcb02..2340c78f8c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -268,26 +268,32 @@ trait NamesDefaults { self: Analyzer =>
*
* For by-name parameters, create a value
* x$n: () => T = () => arg
+ *
+ * For Ident(<unapply-selector>) arguments, no ValDef is created (SI-3353).
*/
- def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
+ def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[Option[ValDef]] = {
val context = blockTyper.context
- val symPs = map2(args, paramTypes)((arg, tpe) => {
- val byName = isByNameParamType(tpe)
- val repeated = isScalaRepeatedParamType(tpe)
- val argTpe = (
- if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
- case _ => seqType(arg.tpe)
- }
- else arg.tpe
- ).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
- (context.scope.enter(s), byName, repeated)
+ val symPs = map2(args, paramTypes)((arg, tpe) => arg match {
+ case Ident(nme.SELECTOR_DUMMY) =>
+ None // don't create a local ValDef if the argument is <unapply-selector>
+ case _ =>
+ val byName = isByNameParamType(tpe)
+ val repeated = isScalaRepeatedParamType(tpe)
+ val argTpe = (
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
+ case _ => seqType(arg.tpe)
+ }
+ else arg.tpe
+ ).widen // have to widen or types inferred from literal defaults will be singletons
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
+ Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
- case ((sym, byName, repeated), arg) =>
+ case (None, _) => None
+ case (Some((sym, byName, repeated)), arg) =>
val body =
if (byName) {
val res = blockTyper.typed(Function(List(), arg))
@@ -303,7 +309,7 @@ trait NamesDefaults { self: Analyzer =>
blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
} else arg
}
- atPos(body.pos)(ValDef(sym, body).setType(NoType))
+ Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
}
}
@@ -329,27 +335,29 @@ trait NamesDefaults { self: Analyzer =>
// ValDef's in the block), change the arguments to these local values.
case Apply(expr, typedArgs) =>
// typedArgs: definition-site order
- val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, false, false)
+ val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos),
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
- val ref = gen.mkAttributedRef(vDef.symbol)
- atPos(vDef.pos.focus) {
- // for by-name parameters, the local value is a nullary function returning the argument
- tpe.typeSymbol match {
- case ByNameParamClass => Apply(ref, Nil)
- case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
- case _ => ref
+ val refArgs = map3(reorderArgs(valDefs, argPos), formals, typedArgs)((vDefOpt, tpe, origArg) => vDefOpt match {
+ case None => origArg
+ case Some(vDef) =>
+ val ref = gen.mkAttributedRef(vDef.symbol)
+ atPos(vDef.pos.focus) {
+ // for by-name parameters, the local value is a nullary function returning the argument
+ tpe.typeSymbol match {
+ case ByNameParamClass => Apply(ref, Nil)
+ case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
+ case _ => ref
+ }
}
- }
})
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt)
res.setPos(res.pos.makeTransparent)
- val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
+ val block = Block(stats ::: valDefs.flatten, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
context.namedApplyBlockInfo =
Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index 69bbab6e42..4b53802d95 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -409,15 +409,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// example check: List[Int] <:< ::[Int]
// TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- val (typeTestTreeMaker, patBinderOrCasted) =
- if (needsTypeTest(patBinder.info.widen, extractor.paramType)) {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
- (List(treeMaker), treeMaker.nextBinder)
- } else {
+ // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
+ val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
+ if (patBinder.info.widen <:< extractor.paramType) {
// no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
// SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
// TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
@@ -426,10 +420,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
*/
- (Nil, patBinder setInfo extractor.paramType)
+ (Nil, patBinder setInfo extractor.paramType, false)
+ } else {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+
+ // check whether typetest implies patBinder is not null,
+ // even though the eventual null check will be on patBinderOrCasted
+ // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
+ (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
}
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*)
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
}
@@ -622,8 +627,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// to which type should the previous binder be casted?
def paramType : Type
- // binder has been casted to paramType if necessary
- def treeMaker(binder: Symbol, pos: Position): TreeMaker
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
@@ -637,6 +647,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case bp => bp
}
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
+ case (b, PatternBoundToUnderscore()) => b
+ }.toSet
+
def subPatTypes: List[Type] =
if(isSeq) {
val TypeRef(pre, SeqClass, args) = seqTp
@@ -731,41 +746,31 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
protected def rawSubPatTypes = constructorTp.paramTypes
- // binder has type paramType
- def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
val paramAccessors = binder.constrParamAccessors
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
val mutableBinders =
- if (paramAccessors exists (_.isMutable))
+ if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
+ (paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
// checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders)
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
- // caseFieldAccessors is messed up after typers (reversed, names mangled for non-public fields)
- // TODO: figure out why...
val accessors = binder.caseFieldAccessors
- // luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
- // (need to undo name-mangling, including the sneaky trailing whitespace)
- val constrParamAccessors = binder.constrParamAccessors
-
- def indexInCPA(acc: Symbol) =
- constrParamAccessors indexWhere { orig =>
- // patmatDebug("compare: "+ (orig, acc, orig.name, acc.name, (acc.name == orig.name), (acc.name startsWith (orig.name append "$"))))
- val origName = orig.name.toString.trim
- val accName = acc.name.toString.trim
- (accName == origName) || (accName startsWith (origName + "$"))
- }
-
- // patmatDebug("caseFieldAccessors: "+ (accessors, binder.caseFieldAccessors map indexInCPA))
- // patmatDebug("constrParamAccessors: "+ constrParamAccessors)
-
- val accessorsSorted = accessors sortBy indexInCPA
- if (accessorsSorted isDefinedAt (i-1)) REF(binder) DOT accessorsSorted(i-1)
+ if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
@@ -781,11 +786,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def resultType = tpe.finalResultType
def isSeq = extractorCall.symbol.name == nme.unapplySeq
- def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted)
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
}
override protected def seqTree(binder: Symbol): Tree =
@@ -842,6 +857,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
object Bound {
def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
@@ -1009,10 +1034,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
trait PreserveSubPatBinders extends TreeMaker {
val subPatBinders: List[Symbol]
val subPatRefs: List[Tree]
+ val ignoredSubPatBinders: Set[Symbol]
// unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
// mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
- def storedBinders: Set[Symbol] = if (debugInfoEmitVars) subPatBinders.toSet else Set.empty
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
def emitVars = storedBinders.nonEmpty
@@ -1033,10 +1065,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
import CODE._
- def bindSubPats(in: Tree): Tree = if (!emitVars) in
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
else {
- val (subPatBindersStored, subPatRefsStored) = stored.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new collection.mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
}
}
@@ -1056,7 +1100,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val subPatRefs: List[Tree],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
- val prevBinder: Symbol) extends FunTreeMaker with PreserveSubPatBinders {
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
@@ -1099,27 +1147,35 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
- val mutableBinders: List[Symbol]) extends FunTreeMaker with PreserveSubPatBinders {
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
import CODE._
val nextBinder = prevBinder // just passing through
// mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
- // (the implementation could be optimized by duplicating code from `super.storedBinders`, but this seems more elegant)
- override def storedBinders: Set[Symbol] = super.storedBinders ++ mutableBinders.toSet
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
- casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ val cond =
+ if (binderKnownNonNull) extraCond
+ else (extraCond map (nullCheck AND _)
+ orElse Some(nullCheck))
+
+ cond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
}
override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
}
- // typetag-based tests are inserted by the type checker
- def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt)
-
object TypeTestTreeMaker {
// factored out so that we can consistently generate other representations of the tree that implements the test
// (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
@@ -1133,12 +1189,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result
def eqTest(pat: Tree, testedBinder: Symbol): Result
def and(a: Result, b: Result): Result
+ def tru: Result
}
object treeCondStrategy extends TypeTestCondStrategy { import CODE._
type Result = Tree
def and(a: Result, b: Result): Result = a AND b
+ def tru = TRUE_typed
def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
@@ -1169,6 +1227,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
def eqTest(pat: Tree, testedBinder: Symbol): Result = false
def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
}
}
@@ -1238,10 +1309,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// I think it's okay:
// - the isInstanceOf test includes a test for the element type
// - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if (expectedTp <:< AnyRefClass.tpe) && !needsTypeTest(testedBinder.info.widen, expectedTp) =>
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
+ case _ if testedBinder.info.widen <:< expectedTp =>
+ // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
+ // since the types conform, no further checking is required
+ if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ // have to test outer and non-null only when it's a reference type
+ else if (expectedTp <:< AnyRefClass.tpe) {
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+ } else default
case _ => default
}
@@ -1253,6 +1330,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
def isPureTypeTest = renderCondition(pureTypeTestChecker)
+ def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
+
override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
}
@@ -1751,6 +1830,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ def tru = TrueCond
}
ttm.renderCondition(condStrategy)
case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
@@ -1897,17 +1977,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case object False extends Prop
// symbols are propositions
- case class Sym(val variable: Var, val const: Const) extends Prop {
- private[this] val id = nextSymId
+ abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ private[this] val id = Sym.nextSymId
+
override def toString = variable +"="+ const +"#"+ id
}
- private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
-
+ class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+ object Sym {
+ private val uniques: util.HashSet[Sym] = new util.HashSet("uniques", 512)
+ def apply(variable: Var, const: Const): Sym = {
+ val newSym = new UniqueSym(variable, const)
+ (uniques findEntryOrUpdate newSym)
+ }
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ }
def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
-
trait PropTraverser {
def apply(x: Prop): Unit = x match {
case And(a, b) => apply(a); apply(b)
@@ -2063,6 +2150,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
import scala.collection.mutable.ArrayBuffer
type FormulaBuilder = ArrayBuffer[Clause]
def formulaBuilder = ArrayBuffer[Clause]()
+ def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
def toFormula(buff: FormulaBuilder): Formula = buff
@@ -2167,7 +2255,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
class Lit(val sym: Sym, val pos: Boolean) {
override def toString = if (!pos) "-"+ sym.toString else sym.toString
override def equals(o: Any) = o match {
- case o: Lit => (o.sym == sym) && (o.pos == pos)
+ case o: Lit => (o.sym eq sym) && (o.pos == pos)
case _ => false
}
override def hashCode = sym.hashCode + pos.hashCode
@@ -2216,13 +2304,18 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- private def dropUnit(f: Formula, unitLit: Lit) = {
+ private def dropUnit(f: Formula, unitLit: Lit): Formula = {
val negated = -unitLit
// drop entire clauses that are trivially true
// (i.e., disjunctions that contain the literal we're making true in the returned model),
// and simplify clauses by dropping the negation of the literal we're making true
// (since False \/ X == X)
- f.filterNot(_.contains(unitLit)).map(_ - negated)
+ val dropped = formulaBuilderSized(f.size)
+ for {
+ clause <- f
+ if !(clause contains unitLit)
+ } dropped += (clause - negated)
+ dropped
}
def findModelFor(f: Formula): Model = {
@@ -3699,11 +3792,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// nextBinder: T
// next == MatchMonad[U]
// returns MatchMonad[U]
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- VAL(nextBinder) === res,
- next
- ))
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest =
+ // only emit a local val for `nextBinder` if it's actually referenced in `next`
+ if (next.exists(_.symbol eq nextBinder))
+ BLOCK(
+ VAL(nextBinder) === res,
+ next
+ )
+ else next
+ ifThenElseZero(cond, rest)
+ }
// guardTree: Boolean
// next: MatchMonad[T]
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 969bb8aceb..b9fdd7280e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -11,6 +11,9 @@ import scala.collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.nsc.settings.AnyScalaVersion
+import scala.tools.nsc.settings.NoScalaVersion
/** <p>
* Post-attribution checking and transformation.
@@ -60,23 +63,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
super.transformInfo(sym, tp)
}
- val toJavaRepeatedParam = new TypeMap {
- def apply(tp: Type) = tp match {
- case TypeRef(pre, RepeatedParamClass, args) =>
- typeRef(pre, JavaRepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
-
- val toScalaRepeatedParam = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, JavaRepeatedParamClass, args) =>
- typeRef(pre, RepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
+ val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass)
+ val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass)
def accessFlagsToString(sym: Symbol) = flagsToString(
sym getFlag (PRIVATE | PROTECTED),
@@ -156,27 +144,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Override checking ------------------------------------------------------------
- def isJavaVarargsAncestor(clazz: Symbol) = (
- clazz.isClass
- && clazz.isJavaDefined
- && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
- )
-
/** Add bridges for vararg methods that extend Java vararg methods
*/
def addVarargBridges(clazz: Symbol): List[Tree] = {
// This is quite expensive, so attempt to skip it completely.
// Insist there at least be a java-defined ancestor which
// defines a varargs method. TODO: Find a cheaper way to exclude.
- if (clazz.thisType.baseClasses exists isJavaVarargsAncestor) {
+ if (inheritsJavaVarArgsMethod(clazz)) {
log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
val self = clazz.thisType
val bridges = new ListBuffer[Tree]
def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
- log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe)
+ log(s"Generating varargs bridge for ${member.fullLocationString} of type $bridgetpe")
- val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE) setPos clazz.pos
+ val newFlags = (member.flags | VBRIDGE | ARTIFACT) & ~PRIVATE
+ val bridge = member.cloneSymbolImpl(clazz, newFlags) setPos clazz.pos
bridge.setInfo(bridgetpe.cloneInfo(bridge))
clazz.info.decls enter bridge
@@ -189,26 +172,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
localTyper typed DefDef(bridge, body)
}
- // For all concrete non-private members that have a (Scala) repeated parameter:
- // compute the corresponding method type `jtpe` with a Java repeated parameter
+ // For all concrete non-private members (but: see below) that have a (Scala) repeated
+ // parameter: compute the corresponding method type `jtpe` with a Java repeated parameter
// if a method with type `jtpe` exists and that method is not a varargs bridge
// then create a varargs bridge of type `jtpe` that forwards to the
// member method with the Scala vararg type.
- for (member <- clazz.info.nonPrivateMembers) {
+ //
+ // @PP: Can't call nonPrivateMembers because we will miss refinement members,
+ // which have been marked private. See SI-4729.
+ for (member <- nonTrivialMembers(clazz)) {
+ log(s"Considering $member for java varargs bridge in $clazz")
if (!member.isDeferred && member.isMethod && hasRepeatedParam(member.info)) {
val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
+
// Delaying calling memberType as long as possible
if (inherited ne NoSymbol) {
- val jtpe = toJavaRepeatedParam(self.memberType(member))
+ val jtpe = toJavaRepeatedParam(self memberType member)
// this is a bit tortuous: we look for non-private members or bridges
// if we find a bridge everything is OK. If we find another member,
// we need to create a bridge
- if (inherited filter (sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)) exists)
+ val inherited1 = inherited filter (sym => !(sym hasFlag VBRIDGE) && (self memberType sym matches jtpe))
+ if (inherited1.exists)
bridges += varargBridge(member, jtpe)
}
}
}
+ if (bridges.size > 0)
+ log(s"Adding ${bridges.size} bridges for methods extending java varargs.")
+
bridges.toList
}
else Nil
@@ -905,13 +897,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* the type occurs itself at variance position given by `variance`
*/
def validateVariance(tp: Type, variance: Int): Unit = tp match {
- case ErrorType => ;
- case WildcardType => ;
- case NoType => ;
- case NoPrefix => ;
- case ThisType(_) => ;
- case ConstantType(_) => ;
- // case DeBruijnIndex(_, _) => ;
+ case ErrorType =>
+ case WildcardType =>
+ case BoundedWildcardType(bounds) =>
+ validateVariance(bounds, variance)
+ case NoType =>
+ case NoPrefix =>
+ case ThisType(_) =>
+ case ConstantType(_) =>
+ // case DeBruijnIndex(_, _) =>
case SingleType(pre, sym) =>
validateVariance(pre, variance)
case TypeRef(pre, sym, args) =>
@@ -1378,10 +1372,18 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* indicating it has changed semantics between versions.
*/
private def checkMigration(sym: Symbol, pos: Position) = {
- if (sym.hasMigrationAnnotation)
- unit.warning(pos, "%s has changed semantics in version %s:\n%s".format(
- sym.fullLocationString, sym.migrationVersion.get, sym.migrationMessage.get)
- )
+ if (sym.hasMigrationAnnotation) {
+ val changed = try
+ settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
+ catch {
+ case e : NumberFormatException =>
+ unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
+ true
+ }
+ if (changed)
+ unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
+ }
}
private def checkCompileTimeOnly(sym: Symbol, pos: Position) = {
@@ -1473,8 +1475,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
private def isRepeatedParamArg(tree: Tree) = currentApplication match {
case Apply(fn, args) =>
- !args.isEmpty && (args.last eq tree) &&
- fn.tpe.params.length == args.length && isRepeatedParamType(fn.tpe.params.last.tpe)
+ ( args.nonEmpty
+ && (args.last eq tree)
+ && (fn.tpe.params.length == args.length)
+ && isRepeatedParamType(fn.tpe.params.last.tpe)
+ )
case _ =>
false
}
@@ -1587,7 +1592,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* arbitrarily choose one as more important than the other.
*/
checkDeprecated(sym, tree.pos)
- if (settings.Xmigration28.value)
+ if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
@@ -1686,8 +1691,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
- if (currentOwner.isDerivedValueClass)
- currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler!
if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index a907ab6c66..39f6f764e7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -78,14 +78,7 @@ trait SyntheticMethods extends ast.TreeDSL {
else templ
}
- val originalAccessors = clazz.caseFieldAccessors
- // private ones will have been renamed -- make sure they are entered
- // in the original order.
- def accessors = clazz.caseFieldAccessors sortBy { acc =>
- originalAccessors indexWhere { orig =>
- (acc.name == orig.name) || (acc.name startsWith (orig.name append "$"))
- }
- }
+ def accessors = clazz.caseFieldAccessors
val arity = accessors.size
// If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., .
// !!! Hidden behind -Xexperimental due to bummer type inference bugs.
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 48a5a36b00..c5c3c560ea 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -117,7 +117,8 @@ abstract class TreeCheckers extends Analyzer {
try p.source.path + ":" + p.line
catch { case _: UnsupportedOperationException => p.toString }
- def errorFn(msg: Any): Unit = println("[check: %s] %s".format(phase.prev, msg))
+ private var hasError: Boolean = false
+ def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
def informFn(msg: Any) {
if (settings.verbose.value || settings.debug.value)
@@ -151,6 +152,7 @@ abstract class TreeCheckers extends Analyzer {
result
}
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
+ hasError = false
val unit0 = currentUnit
currentRun.currentUnit = unit
body
@@ -169,6 +171,7 @@ abstract class TreeCheckers extends Analyzer {
checker.precheck.traverse(unit.body)
checker.typed(unit.body)
checker.postcheck.traverse(unit.body)
+ if (hasError) unit.warning(NoPosition, "TreeCheckers detected non-compliant trees in " + unit)
}
}
@@ -217,8 +220,11 @@ abstract class TreeCheckers extends Analyzer {
case _ => ()
}
- object precheck extends Traverser {
+ object precheck extends TreeStackTraverser {
override def traverse(tree: Tree) {
+ checkSymbolRefsRespectScope(tree)
+ checkReturnReferencesDirectlyEnclosingDef(tree)
+
val sym = tree.symbol
def accessed = sym.accessed
def fail(msg: String) = errorFn(tree.pos, msg + classstr(tree) + " / " + tree)
@@ -289,6 +295,41 @@ abstract class TreeCheckers extends Analyzer {
}
super.traverse(tree)
}
+
+ private def checkSymbolRefsRespectScope(tree: Tree) {
+ def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
+ def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol
+ val info = Option(symbolOf(tree).info).getOrElse(NoType)
+ val referencedSymbols: List[Symbol] = {
+ val directRef = tree match {
+ case _: RefTree => symbolOf(tree).toOption
+ case _ => None
+ }
+ def referencedSyms(tp: Type) = (tp collect {
+ case TypeRef(_, sym, _) => sym
+ }).toList
+ val indirectRefs = referencedSyms(info)
+ (indirectRefs ++ directRef).distinct
+ }
+ for {
+ sym <- referencedSymbols
+ if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
+ } errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
+ }
+
+ private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree) {
+ tree match {
+ case _: Return =>
+ path.collectFirst {
+ case dd: DefDef => dd
+ } match {
+ case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
+ case Some(dd) =>
+ if (tree.symbol != dd.symbol) errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+ }
+ case _ =>
+ }
+ }
}
object postcheck extends Traverser {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 553583e6b7..026c130a87 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -453,12 +453,12 @@ trait Typers extends Modes with Adaptations with Tags {
def reenterValueParams(vparamss: List[List[ValDef]]) {
for (vparams <- vparamss)
for (vparam <- vparams)
- vparam.symbol = context.scope enter vparam.symbol
+ context.scope enter vparam.symbol
}
def reenterTypeParams(tparams: List[TypeDef]): List[Symbol] =
for (tparam <- tparams) yield {
- tparam.symbol = context.scope enter tparam.symbol
+ context.scope enter tparam.symbol
tparam.symbol.deSkolemize
}
@@ -872,7 +872,9 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
+ // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
+ // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
}
else
@@ -1052,15 +1054,21 @@ trait Typers extends Modes with Adaptations with Tags {
def insertApply(): Tree = {
assert(!inHKMode(mode), modeString(mode)) //@M
- val qual = adaptToName(tree, nme.apply) match {
- case id @ Ident(_) =>
- val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType
- else if (id.symbol.owner.isClass)
- context.enclosingSubClassContext(id.symbol.owner).prefix
- else NoPrefix
- stabilize(id, pre, EXPRmode | QUALmode, WildcardType)
- case sel @ Select(qualqual, _) =>
- stabilize(sel, qualqual.tpe, EXPRmode | QUALmode, WildcardType)
+ val adapted = adaptToName(tree, nme.apply)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
+ val qual = adapted match {
+ case This(_) =>
+ gen.stabilize(adapted)
+ case Ident(_) =>
+ val owner = adapted.symbol.owner
+ val pre =
+ if (owner.isPackageClass) owner.thisType
+ else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
+ else NoPrefix
+ stabilize0(pre)
+ case Select(qualqual, _) =>
+ stabilize0(qualqual.tpe)
case other =>
other
}
@@ -1071,8 +1079,8 @@ trait Typers extends Modes with Adaptations with Tags {
// begin adapt
tree.tpe match {
- case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1)
- adaptAnnotations(tree, mode, pt)
+ case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
+ adaptAnnotations(tree, this, mode, pt)
case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
val sym = tree.symbol
if (sym != null && sym.isDeprecated) {
@@ -1176,8 +1184,8 @@ trait Typers extends Modes with Adaptations with Tags {
Select(tree, "to" + sym.name)
}
}
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, mode, pt), mode, pt)
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
case _ =>
}
if (!context.undetparams.isEmpty) {
@@ -1452,7 +1460,7 @@ trait Typers extends Modes with Adaptations with Tags {
case DefDef(_, name, _, _, _, rhs) =>
if (stat.symbol.isAuxiliaryConstructor)
notAllowed("secondary constructor")
- else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_))
+ else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_) && !stat.symbol.isSynthetic)
notAllowed(s"redefinition of $name method. See SIP-15, criterion 4.")
else if (stat.symbol != null && stat.symbol.isParamAccessor)
notAllowed("additional parameter")
@@ -1903,7 +1911,7 @@ trait Typers extends Modes with Adaptations with Tags {
})
}
val impl2 = finishMethodSynthesis(impl1, clazz, context)
-
+
// SI-5954. On second compile of a companion class contained in a package object we end up
// with some confusion of names which leads to having two symbols with the same name in the
// same owner. Until that can be straightened out we can't allow companion objects in package
@@ -1916,20 +1924,20 @@ trait Typers extends Modes with Adaptations with Tags {
// can't handle case classes in package objects
if (m.isCaseClass) pkgObjectRestriction(m, mdef, "case")
// can't handle companion class/object pairs in package objects
- else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
- (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
+ else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
+ (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
pkgObjectRestriction(m, mdef, "companion")
}
def pkgObjectRestriction(m : Symbol, mdef : ModuleDef, restricted : String) = {
val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
context.error(if (m.pos.isDefined) m.pos else mdef.pos, s"implementation restriction: package object ${pkgName} cannot contain ${restricted} ${m}. Instead, ${m} should be placed directly in package ${pkgName}.")
- }
+ }
}
if (!settings.companionsInPkgObjs.value && mdef.symbol.isPackageObject)
restrictPackageObjectMembers(mdef)
-
+
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
@@ -2058,21 +2066,28 @@ trait Typers extends Modes with Adaptations with Tags {
* @return ...
*/
def typedValDef(vdef: ValDef): ValDef = {
-// attributes(vdef)
+ val sym = vdef.symbol
+ val valDefTyper = {
+ val maybeConstrCtx =
+ if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext
+ else context
+ newTyper(maybeConstrCtx.makeNewScope(vdef, sym))
+ }
+ valDefTyper.typedValDefImpl(vdef)
+ }
+
+ // use typedValDef instead. this version is called after creating a new context for the ValDef
+ private def typedValDefImpl(vdef: ValDef) = {
val sym = vdef.symbol.initialize
- val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor)
val typedMods = typedModifiers(vdef.mods)
sym.annotations.map(_.completeInfo)
- var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt))
+ val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
- if (sym.hasAnnotation(definitions.VolatileAttr)) {
- if (!sym.isMutable)
- VolatileValueError(vdef)
- else if (sym.isFinal)
- FinalVolatileVarError(vdef)
- }
+ if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable)
+ VolatileValueError(vdef)
+
val rhs1 =
if (vdef.rhs.isEmpty) {
if (sym.isVariable && sym.owner.isTerm && !sym.isLazy && !isPastTyper)
@@ -2095,7 +2110,7 @@ trait Typers extends Modes with Adaptations with Tags {
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
- newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
+ transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
@@ -2213,37 +2228,58 @@ trait Typers extends Modes with Adaptations with Tags {
*/
def checkMethodStructuralCompatible(ddef: DefDef): Unit = {
val meth = ddef.symbol
- def fail(pos: Position, msg: String) = unit.error(pos, msg)
- val tp: Type = meth.tpe match {
- case mt @ MethodType(_, _) => mt
- case NullaryMethodType(restpe) => restpe // TODO_NMT: drop NullaryMethodType from resultType?
- case PolyType(_, restpe) => restpe
- case _ => NoType
- }
- def nthParamPos(n: Int) = ddef.vparamss match {
- case xs :: _ if xs.length > n => xs(n).pos
- case _ => meth.pos
- }
- def failStruct(pos: Position, what: String, where: String = "Parameter") =
- fail(pos, s"$where type in structural refinement may not refer to $what")
-
- foreachWithIndex(tp.paramTypes) { (paramType, idx) =>
- val sym = paramType.typeSymbol
- def paramPos = nthParamPos(idx)
-
- if (sym.isAbstractType) {
- if (!sym.hasTransOwner(meth.owner))
- failStruct(paramPos, "an abstract type defined outside that refinement")
- else if (!sym.hasTransOwner(meth))
- failStruct(paramPos, "a type member of that refinement")
+ def parentString = meth.owner.parentSymbols filterNot (_ == ObjectClass) match {
+ case Nil => ""
+ case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")")
+ }
+ def fail(pos: Position, msg: String): Boolean = {
+ unit.error(pos, msg)
+ false
+ }
+ /** Have to examine all parameters in all lists.
+ */
+ def paramssTypes(tp: Type): List[List[Type]] = tp match {
+ case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
+ case PolyType(_, restpe) => paramssTypes(restpe)
+ case _ => Nil
+ }
+ def resultType = meth.tpe.finalResultType
+ def nthParamPos(n1: Int, n2: Int) =
+ try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
+
+ def failStruct(pos: Position, what: String, where: String = "Parameter type") =
+ fail(pos, s"$where in structural refinement may not refer to $what")
+
+ foreachWithIndex(paramssTypes(meth.tpe)) { (paramList, listIdx) =>
+ foreachWithIndex(paramList) { (paramType, paramIdx) =>
+ val sym = paramType.typeSymbol
+ def paramPos = nthParamPos(listIdx, paramIdx)
+
+ /** Not enough to look for abstract types; have to recursively check the bounds
+ * of each abstract type for more abstract types. Almost certainly there are other
+ * exploitable type soundness bugs which can be seen by bounding a type parameter
+ * by an abstract type which itself is bounded by an abstract type.
+ */
+ def checkAbstract(tp0: Type, what: String): Boolean = {
+ def check(sym: Symbol): Boolean = !sym.isAbstractType || {
+ log(s"""checking $tp0 in refinement$parentString at ${meth.owner.owner.fullLocationString}""")
+ ( (!sym.hasTransOwner(meth.owner) && failStruct(paramPos, "an abstract type defined outside that refinement", what))
+ || (!sym.hasTransOwner(meth) && failStruct(paramPos, "a type member of that refinement", what))
+ || checkAbstract(sym.info.bounds.hi, "Type bound")
+ )
+ }
+ tp0.dealiasWidenChain forall (t => check(t.typeSymbol))
+ }
+ checkAbstract(paramType, "Parameter type")
+
+ if (sym.isDerivedValueClass)
+ failStruct(paramPos, "a user-defined value class")
+ if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
+ failStruct(paramPos, "the type of that refinement (self type)")
}
- if (sym.isDerivedValueClass)
- failStruct(paramPos, "a user-defined value class")
- if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
- failStruct(paramPos, "the type of that refinement (self type)")
}
- if (tp.resultType.typeSymbol.isDerivedValueClass)
- failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result")
+ if (resultType.typeSymbol.isDerivedValueClass)
+ failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
}
def typedUseCase(useCase: UseCase) {
@@ -2373,13 +2409,12 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedTypeDef(tdef: TypeDef): TypeDef =
- typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty){
- _.typedTypeDef0(tdef)
+ typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty) {
+ _.typedTypeDefImpl(tdef)
}
- // call typedTypeDef instead
- // a TypeDef with type parameters must always be type checked in a new scope
- private def typedTypeDef0(tdef: TypeDef): TypeDef = {
+ // use typedTypeDef instead. this version is called after creating a new context for the TypeDef
+ private def typedTypeDefImpl(tdef: TypeDef): TypeDef = {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
@@ -4434,8 +4469,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (typed(expr).tpe.typeSymbol != UnitClass)
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
- treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
- .setType(adaptTypeOfReturn(expr1, restpt.tpe, NothingClass.tpe))
+ val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
+ val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ res.setType(tp)
}
}
}
@@ -5339,10 +5375,14 @@ trait Typers extends Modes with Adaptations with Tags {
typed(docdef.definition, mode, pt)
}
+ /**
+ * The typer with the correct context for a method definition. If the method is a default getter for
+ * a constructor default, the resulting typer has a constructor context (fixes SI-5543).
+ */
def defDefTyper(ddef: DefDef) = {
- val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ val isConstrDefaultGetter = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
- newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(flag)
+ newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
}
def typedAlternative(alt: Alternative) = {
@@ -5629,20 +5669,21 @@ trait Typers extends Modes with Adaptations with Tags {
lastTreeToTyper = tree
indentTyping()
- var alreadyTyped = false
+ val ptPlugins = pluginsPt(pt, this, tree, mode)
+
val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
- (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
+ (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- alreadyTyped = tree.tpe ne null
+ val alreadyTyped = tree.tpe ne null
var tree1: Tree = if (alreadyTyped) tree else {
printTyping(
- ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
+ ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
"enrichmentEnabled" -> context.enrichmentEnabled,
@@ -5651,7 +5692,7 @@ trait Typers extends Modes with Adaptations with Tags {
"context.owner" -> context.owner
)
)
- typed1(tree, mode, dropExistential(pt))
+ typed1(tree, mode, dropExistential(ptPlugins))
}
// Can happen during erroneous compilation - error(s) have been
// reported, but we need to avoid causing an NPE with this tree
@@ -5665,12 +5706,12 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
- val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
+ val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, ptPlugins, tree)
if (!alreadyTyped) {
printTyping("adapted %s: %s to %s, %s".format(
- tree1, tree1.tpe.widen, pt, context.undetparamsString)
+ tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString)
) //DEBUG
}
if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
@@ -5685,7 +5726,7 @@ trait Typers extends Modes with Adaptations with Tags {
setError(tree)
case ex: Exception =>
if (settings.debug.value) // @M causes cyclic reference error
- Console.println("exception when typing "+tree+", pt = "+pt)
+ Console.println("exception when typing "+tree+", pt = "+ptPlugins)
if (context != null && context.unit.exists && tree != null)
logError("AT: " + (tree.pos).dbgString, ex)
throw ex
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 3e4e0f49d7..577aa087ea 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -113,7 +113,7 @@ trait Unapplies extends ast.TreeDSL
def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
def parents = if (inheritFromFun) List(createFun) else Nil
def toString = DefDef(
- Modifiers(OVERRIDE | FINAL),
+ Modifiers(OVERRIDE | FINAL | SYNTHETIC),
nme.toString_,
Nil,
ListOfNil,
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
index 7d97b0c782..ea436a71fb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
@@ -67,6 +67,8 @@ trait Variances {
def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
VARIANCES
+ case BoundedWildcardType(bounds) =>
+ varianceInType(bounds)(tparam)
case SingleType(pre, sym) =>
varianceInType(pre)(tparam)
case TypeRef(pre, sym, args) =>
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 15025f85e3..00c72cf423 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -9,6 +9,7 @@ import scala.tools.nsc.MissingRequirementError
abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val global: Global
import global._
+ import analyzer.{AnalyzerPlugin, Typer}
import definitions._
//override val verbose = true
@@ -18,12 +19,12 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* Checks whether @cps annotations conform
*/
object checker extends AnnotationChecker {
- private def addPlusMarker(tp: Type) = tp withAnnotation newPlusMarker()
- private def addMinusMarker(tp: Type) = tp withAnnotation newMinusMarker()
+ private[CPSAnnotationChecker] def addPlusMarker(tp: Type) = tp withAnnotation newPlusMarker()
+ private[CPSAnnotationChecker] def addMinusMarker(tp: Type) = tp withAnnotation newMinusMarker()
- private def cleanPlus(tp: Type) =
+ private[CPSAnnotationChecker] def cleanPlus(tp: Type) =
removeAttribs(tp, MarkerCPSAdaptPlus, MarkerCPSTypes)
- private def cleanPlusWith(tp: Type)(newAnnots: AnnotationInfo*) =
+ private[CPSAnnotationChecker] def cleanPlusWith(tp: Type)(newAnnots: AnnotationInfo*) =
cleanPlus(tp) withAnnotations newAnnots.toList
/** Check annotations to decide whether tpe1 <:< tpe2 */
@@ -116,8 +117,13 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else
bounds
}
+ }
+
+ object plugin extends AnalyzerPlugin {
+
+ import checker._
- override def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
if (!cpsEnabled) return false
vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
@@ -183,7 +189,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else false
}
- override def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
if (!cpsEnabled) return tree
vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt)
@@ -239,14 +245,15 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* is in tail position. Therefore, we are making sure that only the types of return expressions
* are adapted which will either be removed, or lead to an error.
*/
- override def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = {
+ override def pluginsTypedReturn(default: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val expr = tree.expr
// only adapt if method's result type (pt) is cps type
val annots = cpsParamAnnotation(pt)
if (annots.nonEmpty) {
- // return type of `tree` without plus marker, but only if it doesn't have other cps annots
- if (hasPlusMarker(tree.tpe) && !hasCpsParamTypes(tree.tpe))
- tree.setType(removeAttribs(tree.tpe, MarkerCPSAdaptPlus))
- tree.tpe
+ // return type of `expr` without plus marker, but only if it doesn't have other cps annots
+ if (hasPlusMarker(expr.tpe) && !hasCpsParamTypes(expr.tpe))
+ expr.setType(removeAttribs(expr.tpe, MarkerCPSAdaptPlus))
+ expr.tpe
} else default
}
@@ -393,7 +400,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/** Modify the type that has thus far been inferred
* for a tree. All this should do is add annotations. */
- override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
import scala.util.control._
if (!cpsEnabled) {
if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
index 8a500d6c4d..237159795a 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
@@ -33,6 +33,7 @@ class SelectiveCPSPlugin(val global: Global) extends Plugin {
val global: SelectiveCPSPlugin.this.global.type = SelectiveCPSPlugin.this.global
}
global.addAnnotationChecker(checker.checker)
+ global.analyzer.addAnalyzerPlugin(checker.plugin)
global.log("instantiated cps plugin: " + this)
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index 49fea9434c..adb6de6afd 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -17,7 +17,8 @@ package scala.annotation
* order between Scala 2.7 and 2.8.
*
* @param message A message describing the change, which is emitted
- * by the compiler if the flag `-Xmigration` is set.
+ * by the compiler if the flag `-Xmigration` indicates a version
+ * prior to the changedIn version.
*
* @param changedIn The version, in which the behaviour change was
* introduced.
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 56e386ad67..55ac3995e9 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -295,6 +295,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
result
}
+
+ override def foldRight[B](z: B)(op: (A, B) => B): B =
+ reverse.foldLeft(z)((right, left) => op(left, right))
override def stringPrefix = "List"
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 1c461973e4..5bb4ef5f21 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -841,9 +841,16 @@ self =>
* // produces: "1, 2, 3, 4, 5, 6"
* }}}
*/
- override def distinct: Stream[A] =
- if (isEmpty) this
- else cons(head, tail.filter(head != _).distinct)
+ override def distinct: Stream[A] = {
+ // This should use max memory proportional to N, whereas
+ // recursively calling distinct on the tail is N^2.
+ def loop(seen: Set[A], rest: Stream[A]): Stream[A] = {
+ if (rest.isEmpty) rest
+ else if (seen(rest.head)) loop(seen, rest.tail)
+ else cons(rest.head, loop(seen + rest.head, rest.tail))
+ }
+ loop(Set(), this)
+ }
/** Returns a new sequence of given length containing the elements of this
* sequence followed by zero or more occurrences of given elements.
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index 212ee917c5..7f05deffc8 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -12,6 +12,7 @@ package scala.collection
package mutable
import generic._
+import annotation.tailrec
/** A simple mutable map backed by a list.
*
@@ -47,13 +48,17 @@ extends AbstractMap[A, B]
def get(key: A): Option[B] = elems find (_._1 == key) map (_._2)
def iterator: Iterator[(A, B)] = elems.iterator
- def += (kv: (A, B)) = { elems = remove(kv._1, elems); elems = kv :: elems; siz += 1; this }
- def -= (key: A) = { elems = remove(key, elems); this }
- private def remove(key: A, elems: List[(A, B)]): List[(A, B)] =
- if (elems.isEmpty) elems
- else if (elems.head._1 == key) { siz -= 1; elems.tail }
- else elems.head :: remove(key, elems.tail)
+ def += (kv: (A, B)) = { elems = remove(kv._1, elems, List()); elems = kv :: elems; siz += 1; this }
+ def -= (key: A) = { elems = remove(key, elems, List()); this }
+
+ @tailrec
+ private def remove(key: A, elems: List[(A, B)], acc: List[(A, B)]): List[(A, B)] = {
+ if (elems.isEmpty) acc
+ else if (elems.head._1 == key) { siz -= 1; acc ::: elems.tail }
+ else remove(key, elems.tail, elems.head :: acc)
+ }
+
override def clear() = { elems = List(); siz = 0 }
override def size: Int = siz
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 215f90b17e..77625e381c 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -25,11 +25,15 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
case some => some
}
+ private val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
+ def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
+ }
+
// Implement BlockContext on FJP threads
class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
def wire[T <: Thread](thread: T): T = {
thread.setDaemon(daemonic)
- //Potentially set things like uncaught exception handler, name etc
+ thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
thread
}
@@ -73,7 +77,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
new ForkJoinPool(
desiredParallelism,
threadFactory,
- null, //FIXME we should have an UncaughtExceptionHandler, see what Akka does
+ uncaughtExceptionHandler,
true) // Async all the way baby
} catch {
case NonFatal(t) =>
@@ -94,13 +98,13 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
def execute(runnable: Runnable): Unit = executor match {
case fj: ForkJoinPool =>
+ val fjt = runnable match {
+ case t: ForkJoinTask[_] => t
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq fj =>
- (runnable match {
- case fjt: ForkJoinTask[_] => fjt
- case _ => ForkJoinTask.adapt(runnable)
- }).fork
- case _ => fj.execute(runnable)
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
+ case _ => fj execute fjt
}
case generic => generic execute runnable
}
@@ -111,6 +115,20 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
private[concurrent] object ExecutionContextImpl {
+ final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
+ final override def setRawResult(u: Unit): Unit = ()
+ final override def getRawResult(): Unit = ()
+ final override def exec(): Boolean = try { runnable.run(); true } catch {
+ case anything: Throwable ⇒
+ val t = Thread.currentThread
+ t.getUncaughtExceptionHandler match {
+ case null ⇒
+ case some ⇒ some.uncaughtException(t, anything)
+ }
+ throw anything
+ }
+ }
+
def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index e9da45a079..52f1075137 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -34,7 +34,7 @@ private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete
value = v
// Note that we cannot prepare the ExecutionContext at this point, since we might
// already be running on a different thread!
- executor.execute(this)
+ try executor.execute(this) catch { case NonFatal(t) => executor reportFailure t }
}
}
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index d3f8df9110..84f6f0be9c 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -95,10 +95,7 @@ package object scala {
val Equiv = scala.math.Equiv
type Fractional[T] = scala.math.Fractional[T]
- val Fractional = scala.math.Fractional
-
type Integral[T] = scala.math.Integral[T]
- val Integral = scala.math.Integral
type Numeric[T] = scala.math.Numeric[T]
val Numeric = scala.math.Numeric
diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest/scala/tools/partest/ASMConverters.scala
new file mode 100644
index 0000000000..d618e086f4
--- /dev/null
+++ b/src/partest/scala/tools/partest/ASMConverters.scala
@@ -0,0 +1,71 @@
+package scala.tools.partest
+
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm.tree.{ClassNode, MethodNode, InsnList}
+
+/** Makes using ASM from ByteCodeTests more convenient.
+ *
+ * Wraps ASM instructions in case classes so that equals and toString work
+ * for the purpose of bytecode diffing and pretty printing.
+ */
+trait ASMConverters {
+ // wrap ASM's instructions so we get case class-style `equals` and `toString`
+ object instructions {
+ def fromMethod(meth: MethodNode): List[Instruction] = {
+ val insns = meth.instructions
+ val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) }
+
+ asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[Instruction]]
+ }
+
+ sealed abstract class Instruction { def opcode: String }
+ case class Field (opcode: String, desc: String, name: String, owner: String) extends Instruction
+ case class Incr (opcode: String, incr: Int, `var`: Int) extends Instruction
+ case class Op (opcode: String) extends Instruction
+ case class IntOp (opcode: String, operand: Int) extends Instruction
+ case class Jump (opcode: String, label: Label) extends Instruction
+ case class Ldc (opcode: String, cst: Any) extends Instruction
+ case class LookupSwitch (opcode: String, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction
+ case class TableSwitch (opcode: String, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction
+ case class Method (opcode: String, desc: String, name: String, owner: String) extends Instruction
+ case class NewArray (opcode: String, desc: String, dims: Int) extends Instruction
+ case class TypeOp (opcode: String, desc: String) extends Instruction
+ case class VarOp (opcode: String, `var`: Int) extends Instruction
+ case class Label (offset: Int) extends Instruction { def opcode: String = "" }
+ case class FrameEntry (local: List[Any], stack: List[Any]) extends Instruction { def opcode: String = "" }
+ case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: String = "" }
+ }
+
+ abstract class AsmToScala {
+ import instructions._
+
+ def labelIndex(l: asm.tree.AbstractInsnNode): Int
+
+ def mapOver(is: List[Any]): List[Any] = is map {
+ case i: asm.tree.AbstractInsnNode => apply(i)
+ case x => x
+ }
+
+ def op(i: asm.tree.AbstractInsnNode) = if (asm.util.Printer.OPCODES.isDefinedAt(i.getOpcode)) asm.util.Printer.OPCODES(i.getOpcode) else "?"
+ def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList
+ def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label]
+ def apply(x: asm.tree.AbstractInsnNode): Instruction = x match {
+ case i: asm.tree.FieldInsnNode => Field (op(i), i.desc: String, i.name: String, i.owner: String)
+ case i: asm.tree.IincInsnNode => Incr (op(i), i.incr: Int, i.`var`: Int)
+ case i: asm.tree.InsnNode => Op (op(i))
+ case i: asm.tree.IntInsnNode => IntOp (op(i), i.operand: Int)
+ case i: asm.tree.JumpInsnNode => Jump (op(i), this(i.label))
+ case i: asm.tree.LdcInsnNode => Ldc (op(i), i.cst: Any)
+ case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (op(i), this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]])
+ case i: asm.tree.TableSwitchInsnNode => TableSwitch (op(i), this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]])
+ case i: asm.tree.MethodInsnNode => Method (op(i), i.desc: String, i.name: String, i.owner: String)
+ case i: asm.tree.MultiANewArrayInsnNode => NewArray (op(i), i.desc: String, i.dims: Int)
+ case i: asm.tree.TypeInsnNode => TypeOp (op(i), i.desc: String)
+ case i: asm.tree.VarInsnNode => VarOp (op(i), i.`var`: Int)
+ case i: asm.tree.LabelNode => Label (labelIndex(x))
+ case i: asm.tree.FrameNode => FrameEntry (mapOver(lst(i.local)), mapOver(lst(i.stack)))
+ case i: asm.tree.LineNumberNode => LineNumber (i.line: Int, this(i.start): Label)
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala
new file mode 100644
index 0000000000..41329a8264
--- /dev/null
+++ b/src/partest/scala/tools/partest/BytecodeTest.scala
@@ -0,0 +1,102 @@
+package scala.tools.partest
+
+import scala.tools.nsc.util.JavaClassPath
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, MethodNode, InsnList}
+import java.io.InputStream
+
+/**
+ * Provides utilities for inspecting bytecode using ASM library.
+ *
+ * HOW TO USE
+ * 1. Create subdirectory in test/files/jvm for your test. Let's name it $TESTDIR.
+ * 2. Create $TESTDIR/BytecodeSrc_1.scala that contains Scala source file that you
+ * want to inspect the bytecode for. The '_1' suffix signals to partest that it
+ * should compile this file first.
+ * 3. Create $TESTDIR/Test.scala:
+ * import scala.tools.partest.BytecodeTest
+ * object Test extends BytecodeTest {
+ * def show {
+ * // your code that inspect ASM trees and prints values
+ * }
+ * }
+ * 4. Create corresponding check file.
+ *
+ * EXAMPLE
+ * See test/files/jvm/bytecode-test-example for an example of bytecode test.
+ *
+ */
+abstract class BytecodeTest extends ASMConverters {
+
+ /** produce the output to be compared against a checkfile */
+ protected def show(): Unit
+
+ def main(args: Array[String]): Unit = show
+
+// asserts
+ def sameBytecode(methA: MethodNode, methB: MethodNode) = {
+ val isa = instructions.fromMethod(methA)
+ val isb = instructions.fromMethod(methB)
+ if (isa == isb) println("bytecode identical")
+ else diffInstructions(isa, isb)
+ }
+
+ import instructions._
+ // bytecode is equal modulo local variable numbering
+ def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match {
+ case _ if a == b => true
+ case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true
+ case _ => false
+ }
+
+ def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = {
+ val isa = fromMethod(methA)
+ val isb = fromMethod(methB)
+ if (isa == isb) println("bytecode identical")
+ else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar")
+ else diffInstructions(isa, isb)
+ }
+
+ def diffInstructions(isa: List[Instruction], isb: List[Instruction]) = {
+ val len = Math.max(isa.length, isb.length)
+ if (len > 0 ) {
+ val width = isa.map(_.toString.length).max
+ val lineWidth = len.toString.length
+ (1 to len) foreach { line =>
+ val isaPadded = isa.map(_.toString) orElse Stream.continually("")
+ val isbPadded = isb.map(_.toString) orElse Stream.continually("")
+ val a = isaPadded(line-1)
+ val b = isbPadded(line-1)
+
+ println(s"""$line${" " * (lineWidth-line.toString.length)} ${if (a==b) "==" else "<>"} $a${" " * (width-a.length)} | $b""")
+ }
+ }
+ }
+
+// loading
+ protected def getMethod(classNode: ClassNode, name: String): MethodNode =
+ classNode.methods.asScala.find(_.name == name) getOrElse
+ sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
+
+ protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = {
+ val classBytes: InputStream = (for {
+ classRep <- classpath.findClass(name)
+ binary <- classRep.binary
+ } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath")
+
+ val cr = new ClassReader(classBytes)
+ val cn = new ClassNode()
+ cr.accept(cn, if (skipDebugInfo) ClassReader.SKIP_DEBUG else 0)
+ cn
+ }
+
+ protected lazy val classpath: JavaClassPath = {
+ import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+ import scala.tools.util.PathResolver.Defaults
+ // logic inspired by scala.tools.util.PathResolver implementation
+ val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath)
+ new JavaClassPath(containers, DefaultJavaContext)
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 562b1da8e3..2ba18a8207 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -90,6 +90,7 @@ trait Exprs { self: Universe =>
* }}}
* because expr of type Expr[T] itself does not have a method foo.
*/
+ // @compileTimeOnly("Cannot use splice outside reify")
def splice: T
/**
@@ -106,6 +107,7 @@ trait Exprs { self: Universe =>
* object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
* }}}
*/
+ // @compileTimeOnly("Cannot use value except for signatures of macro implementations")
val value: T
override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 5318d3e540..1ab975b233 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -16,7 +16,15 @@ trait AnnotationCheckers {
/** An additional checker for annotations on types.
* Typically these are registered by compiler plugins
* with the addAnnotationChecker method. */
- abstract class AnnotationChecker {
+ trait AnnotationChecker {
+
+ /**
+ * Selectively activate this annotation checker. When using both an annotation checker
+ * and an analyzer plugin, it is common to run both of them only during selected
+ * compiler phases. See documentation in AnalyzerPlugin.isActive.
+ */
+ def isActive(): Boolean = true
+
/** Check the annotations on two types conform. */
def annotationsConform(tpe1: Type, tpe2: Type): Boolean
@@ -29,39 +37,51 @@ trait AnnotationCheckers {
def annotationsGlb(tp: Type, ts: List[Type]): Type = tp
/** Refine the bounds on type parameters to the given type arguments. */
- def adaptBoundsToAnnotations(bounds: List[TypeBounds],
- tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = bounds
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol],
+ targs: List[Type]): List[TypeBounds] = bounds
- /** Modify the type that has thus far been inferred
- * for a tree. All this should do is add annotations. */
+ /**
+ * Modify the type that has thus far been inferred for a tree. All this should
+ * do is add annotations.
+ */
+ @deprecated("Create an AnalyzerPlugin and use pluginsTyped", "2.10.1")
def addAnnotations(tree: Tree, tpe: Type): Type = tpe
- /** Decide whether this annotation checker can adapt a tree
- * that has an annotated type to the given type tp, taking
- * into account the given mode (see method adapt in trait Typers).*/
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ @deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false
- /** Adapt a tree that has an annotated type to the given type tp,
- * taking into account the given mode (see method adapt in trait Typers).
- * An implementation cannot rely on canAdaptAnnotations being called
- * before. If the implementing class cannot do the adaptiong, it
- * should return the tree unchanged.*/
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adaptiong, it should return the tree unchanged.
+ */
+ @deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
- /** Adapt the type of a return expression. The decision of an annotation checker
- * whether the type should be adapted is based on the type of the expression
- * which is returned, as well as the result type of the method (pt).
- * By default, this method simply returns the passed `default` type.
+ /**
+ * Adapt the type of a return expression. The decision of a typer plugin whether the type
+ * should be adapted is based on the type of the expression which is returned, as well as the
+ * result type of the method (pt).
+ *
+ * By default, this method simply returns the passed `default` type.
*/
+ @deprecated("Create an AnalyzerPlugin and use pluginsTypedReturn. Note: the 'tree' argument here is\n"+
+ "the 'expr' of a Return tree; 'pluginsTypedReturn' takes the Return tree itself as argument", "2.10.1")
def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = default
}
// Syncnote: Annotation checkers inaccessible to reflection, so no sync in var necessary.
+
/** The list of annotation checkers that have been registered */
private var annotationCheckers: List[AnnotationChecker] = Nil
- /** Register an annotation checker. Typically these
- * are added by compiler plugins. */
+ /** Register an annotation checker. Typically these are added by compiler plugins. */
def addAnnotationChecker(checker: AnnotationChecker) {
if (!(annotationCheckers contains checker))
annotationCheckers = checker :: annotationCheckers
@@ -72,76 +92,53 @@ trait AnnotationCheckers {
annotationCheckers = Nil
}
- /** Check that the annotations on two types conform. To do
- * so, consult all registered annotation checkers. */
- def annotationsConform(tp1: Type, tp2: Type): Boolean = {
- /* Finish quickly if there are no annotations */
- if (tp1.annotations.isEmpty && tp2.annotations.isEmpty)
- true
- else
- annotationCheckers.forall(
- _.annotationsConform(tp1,tp2))
- }
-
- /** Refine the computed least upper bound of a list of types.
- * All this should do is add annotations. */
- def annotationsLub(tpe: Type, ts: List[Type]): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.annotationsLub(tpe, ts))
- }
-
- /** Refine the computed greatest lower bound of a list of types.
- * All this should do is add annotations. */
- def annotationsGlb(tpe: Type, ts: List[Type]): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.annotationsGlb(tpe, ts))
- }
-
- /** Refine the bounds on type parameters to the given type arguments. */
- def adaptBoundsToAnnotations(bounds: List[TypeBounds],
- tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
- annotationCheckers.foldLeft(bounds)((bounds, checker) =>
- checker.adaptBoundsToAnnotations(bounds, tparams, targs))
- }
-
- /** Let all annotations checkers add extra annotations
- * to this tree's type. */
- def addAnnotations(tree: Tree, tpe: Type): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.addAnnotations(tree, tpe))
- }
-
- /** Find out whether any annotation checker can adapt a tree
- * to a given type. Called by Typers.adapt. */
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
- annotationCheckers.exists(_.canAdaptAnnotations(tree, mode, pt))
- }
-
- /** Let registered annotation checkers adapt a tree
- * to a given type (called by Typers.adapt). Annotation checkers
- * that cannot do the adaption should pass the tree through
- * unchanged. */
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
- annotationCheckers.foldLeft(tree)((tree, checker) =>
- checker.adaptAnnotations(tree, mode, pt))
- }
-
- /** Let a registered annotation checker adapt the type of a return expression.
- * Annotation checkers that cannot do the adaptation should simply return
- * the `default` argument.
- *
- * Note that the result is undefined if more than one annotation checker
- * returns an adapted type which is not a subtype of `default`.
- */
- def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = {
- val adaptedTypes = annotationCheckers flatMap { checker =>
- val adapted = checker.adaptTypeOfReturn(tree, pt, default)
- if (!(adapted <:< default)) List(adapted)
- else List()
- }
- adaptedTypes match {
- case fst :: _ => fst
- case List() => default
- }
- }
+ /** @see AnnotationChecker.annotationsConform */
+ def annotationsConform(tp1: Type, tp2: Type): Boolean =
+ if (annotationCheckers.isEmpty || (tp1.annotations.isEmpty && tp2.annotations.isEmpty)) true
+ else annotationCheckers.forall(checker => {
+ !checker.isActive() || checker.annotationsConform(tp1,tp2)
+ })
+
+ /** @see AnnotationChecker.annotationsLub */
+ def annotationsLub(tpe: Type, ts: List[Type]): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.annotationsLub(tpe, ts))
+
+ /** @see AnnotationChecker.annotationsGlb */
+ def annotationsGlb(tpe: Type, ts: List[Type]): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.annotationsGlb(tpe, ts))
+
+ /** @see AnnotationChecker.adaptBoundsToAnnotations */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol],
+ targs: List[Type]): List[TypeBounds] =
+ if (annotationCheckers.isEmpty) bounds
+ else annotationCheckers.foldLeft(bounds)((bounds, checker) =>
+ if (!checker.isActive()) bounds else checker.adaptBoundsToAnnotations(bounds, tparams, targs))
+
+
+ /* The following methods will be removed with the deprecated methods is AnnotationChecker. */
+
+ def addAnnotations(tree: Tree, tpe: Type): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.addAnnotations(tree, tpe))
+
+ def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean =
+ if (annotationCheckers.isEmpty) false
+ else annotationCheckers.exists(checker => {
+ checker.isActive() && checker.canAdaptAnnotations(tree, mode, pt)
+ })
+
+ def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree =
+ if (annotationCheckers.isEmpty) tree
+ else annotationCheckers.foldLeft(tree)((tree, checker) =>
+ if (!checker.isActive()) tree else checker.adaptAnnotations(tree, mode, pt))
+
+ def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type =
+ if (annotationCheckers.isEmpty) default
+ else annotationCheckers.foldLeft(default)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.adaptTypeOfReturn(tree, pt, tpe))
}
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 4269b65297..6e4ca76382 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -671,6 +671,11 @@ trait Definitions extends api.StandardDefinitions {
case _ => Nil
}
+ def dropNullaryMethod(tp: Type) = tp match {
+ case NullaryMethodType(restpe) => restpe
+ case _ => tp
+ }
+
def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match {
case RefinedType(p :: _, _) => p.normalize
case tp => tp
@@ -678,9 +683,10 @@ trait Definitions extends api.StandardDefinitions {
def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply)
- def abstractFunctionForFunctionType(tp: Type) =
- if (isFunctionType(tp)) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
- else NoType
+ def abstractFunctionForFunctionType(tp: Type) = {
+ assert(isFunctionType(tp), tp)
+ abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
+ }
def isFunctionType(tp: Type): Boolean = tp.normalize match {
case TypeRef(_, sym, args) if args.nonEmpty =>
@@ -864,6 +870,12 @@ trait Definitions extends api.StandardDefinitions {
removeRedundantObjects(parents)
}
+ /** Flatten curried parameter lists of a method type. */
+ def allParameters(tpe: Type): List[Symbol] = tpe match {
+ case MethodType(params, res) => params ::: allParameters(res)
+ case _ => Nil
+ }
+
def typeStringNoPackage(tp: Type) =
"" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "."
@@ -949,7 +961,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
- lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.macros.compileTimeOnly")
+ lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 59c027868e..8b24678fd6 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -32,19 +32,4 @@ trait ExistentialsAndSkolems {
}
(new Deskolemizer).typeSkolems
}
-
- /** Convert to corresponding type parameters all skolems of method
- * parameters which appear in `tparams`.
- */
- def deskolemizeTypeParams(tparams: List[Symbol])(tp: Type): Type = {
- class DeSkolemizeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) if sym.isTypeSkolem && (tparams contains sym.deSkolemize) =>
- mapOver(typeRef(NoPrefix, sym.deSkolemize, args))
- case _ =>
- mapOver(tp)
- }
- }
- new DeSkolemizeMap mapOver tp
- }
}
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index a4287fb181..72ad84edec 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -86,7 +86,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case n: TypeName => if (isClass) newClassSymbol(n, pos, newFlags) else newNonClassSymbol(n, pos, newFlags)
}
- def knownDirectSubclasses = children
+ def knownDirectSubclasses = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ children
+ }
+
def baseClasses = info.baseClasses
def module = sourceModule
def thisPrefix: Type = thisType
@@ -1188,6 +1192,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* to generate a type of kind *
* for a term symbol, its usual type.
* See the tpe/tpeHK overrides in TypeSymbol for more.
+ *
+ * For type symbols, `tpe` is different than `info`. `tpe` returns a typeRef
+ * to the type symbol, `info` returns the type information of the type symbol,
+ * e.g. a ClassInfoType for classes or a TypeBounds for abstract types.
*/
def tpe: Type = info
def tpeHK: Type = tpe
@@ -1583,8 +1591,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
setAnnotations(annot :: annotations)
// Convenience for the overwhelmingly common case
- def addAnnotation(sym: Symbol, args: Tree*): this.type =
+ def addAnnotation(sym: Symbol, args: Tree*): this.type = {
+ // The assertion below is meant to prevent from issues like SI-7009 but it's disabled
+ // due to problems with cycles while compiling Scala library. It's rather shocking that
+ // just checking if sym is monomorphic type introduces nasty cycles. We are definitively
+ // forcing too much because monomorphism is a local property of a type that can be checked
+ // syntactically
+ // assert(sym.initialize.isMonomorphicType, sym)
addAnnotation(AnnotationInfo(sym.tpe, args.toList, Nil))
+ }
+
+ /** Use that variant if you want to pass (for example) an applied type */
+ def addAnnotation(tp: Type, args: Tree*): this.type = {
+ assert(tp.typeParams.isEmpty, tp)
+ addAnnotation(AnnotationInfo(tp, args.toList, Nil))
+ }
// ------ comparisons ----------------------------------------------------------------
@@ -1651,6 +1672,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
@inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
+ final def toOption: Option[Symbol] = if (exists) Some(this) else None
+
// ------ cloneing -------------------------------------------------------------------
/** A clone of this symbol. */
@@ -1728,8 +1751,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** For a case class, the symbols of the accessor methods, one for each
* argument in the first parameter list of the primary constructor.
* The empty list for all other classes.
- */
- final def caseFieldAccessors: List[Symbol] =
+ *
+ * This list will be sorted to correspond to the declaration order
+ * in the constructor parameter
+ */
+ final def caseFieldAccessors: List[Symbol] = {
+ // We can't rely on the ordering of the case field accessors within decls --
+ // handling of non-public parameters seems to change the order (see SI-7035.)
+ //
+ // Luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
+ // (need to undo name-mangling, including the sneaky trailing whitespace)
+ //
+ // The slightly more principled approach of using the paramss of the
+ // primary constructor leads to cycles in, for example, pos/t5084.scala.
+ val primaryNames = constrParamAccessors.map(acc => nme.dropLocalSuffix(acc.name))
+ caseFieldAccessorsUnsorted.sortBy { acc =>
+ primaryNames indexWhere { orig =>
+ (acc.name == orig) || (acc.name startsWith (orig append "$"))
+ }
+ }
+ }
+ private final def caseFieldAccessorsUnsorted: List[Symbol] =
(info.decls filter (_.isCaseAccessorMethod)).toList
final def constrParamAccessors: List[Symbol] =
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index ebf0998573..c1753fc5a1 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -172,10 +172,29 @@ abstract class TreeGen extends macros.TreeBuilder {
if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage))
mkAttributedIdent(sym)
else {
+ // Have to recognize anytime a selection is made on a package
+ // so it can be rewritten to foo.bar.`package`.name rather than
+ // foo.bar.name if name is in the package object.
+ // TODO - factor out the common logic between this and
+ // the Typers method "isInPackageObject", used in typedIdent.
+ val qualsym = (
+ if (qual.tpe ne null) qual.tpe.typeSymbol
+ else if (qual.symbol ne null) qual.symbol
+ else NoSymbol
+ )
+ val needsPackageQualifier = (
+ (sym ne null)
+ && qualsym.isPackage
+ && !sym.isDefinedInPackage
+ )
val pkgQualifier =
- if (sym != null && sym.owner.isPackageObjectClass && sym.effectiveOwner == qual.tpe.typeSymbol) {
- val obj = sym.owner.sourceModule
- Select(qual, nme.PACKAGE) setSymbol obj setType singleType(qual.tpe, obj)
+ if (needsPackageQualifier) {
+ // The owner of a symbol which requires package qualification may be the
+ // package object iself, but it also could be any superclass of the package
+ // object. In the latter case, we must go through the qualifier's info
+ // to obtain the right symbol.
+ val packageObject = if (sym.owner.isModuleClass) sym.owner.sourceModule else qual.tpe member nme.PACKAGE
+ Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject)
}
else qual
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index e1a18570b2..98c1afb323 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -261,22 +261,24 @@ abstract class TreeInfo {
* in the position `for { <tree> <- expr }` based only
* on information at the `parser` phase? To qualify, there
* may be no subtree that will be interpreted as a
- * Stable Identifier Pattern.
+ * Stable Identifier Pattern, nor any type tests, even
+ * on TupleN. See SI-6968.
*
* For instance:
*
* {{{
- * foo @ (bar, (baz, quux))
+ * (foo @ (bar @ _)) = 0
* }}}
*
- * is a variable pattern; if the structure matches,
- * then the remainder is inevitable.
+ * is a not a variable pattern; if only binds names.
*
* The following are not variable patterns.
*
* {{{
- * foo @ (bar, (`baz`, quux)) // back quoted ident, not at top level
- * foo @ (bar, Quux) // UpperCase ident, not at top level
+ * `bar`
+ * Bar
+ * (a, b)
+ * _: T
* }}}
*
* If the pattern is a simple identifier, it is always
@@ -305,10 +307,6 @@ abstract class TreeInfo {
tree match {
case Bind(name, pat) => isVarPatternDeep0(pat)
case Ident(name) => isVarPattern(tree)
- case Apply(sel, args) =>
- ( isReferenceToScalaMember(sel, TupleClass(args.size).name.toTermName)
- && (args forall isVarPatternDeep0)
- )
case _ => false
}
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 431afd286d..a528a9ced8 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -1440,6 +1440,22 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (tree.hasSymbol) {
subst(from, to)
tree match {
+ case _: DefTree =>
+ val newInfo = symSubst(tree.symbol.info)
+ if (!(newInfo =:= tree.symbol.info)) {
+ debuglog(sm"""
+ |TreeSymSubstituter: updated info of symbol ${tree.symbol}
+ | Old: ${showRaw(tree.symbol.info, printTypes = true, printIds = true)}
+ | New: ${showRaw(newInfo, printTypes = true, printIds = true)}""")
+ tree.symbol updateInfo newInfo
+ }
+ case _ =>
+ // no special handling is required for Function or Import nodes here.
+ // as they don't have interesting infos attached to their symbols.
+ // Subsitution of the referenced symbol of Return nodes is handled
+ // in .ChangeOwnerTraverser
+ }
+ tree match {
case Ident(name0) if tree.symbol != NoSymbol =>
treeCopy.Ident(tree, tree.symbol.name)
case Select(qual, name0) if tree.symbol != NoSymbol =>
@@ -1488,6 +1504,15 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
+ trait TreeStackTraverser extends Traverser {
+ import collection.mutable
+ val path: mutable.Stack[Tree] = mutable.Stack()
+ abstract override def traverse(t: Tree) = {
+ path push t
+ try super.traverse(t) finally path.pop()
+ }
+ }
+
private lazy val duplicator = new Transformer {
override val treeCopy = newStrictTreeCopier
override def transform(t: Tree) = {
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index dbc00edb1a..9d4bdab837 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -22,6 +22,8 @@ import util.ThreeValues._
// internal: error
case WildcardType =>
// internal: unknown
+ case BoundedWildcardType(bounds) =>
+ // internal: unknown
case NoType =>
case NoPrefix =>
case ThisType(sym) =>
@@ -744,7 +746,7 @@ trait Types extends api.Types { self: SymbolTable =>
val trivial = (
this.isTrivial
|| phase.erasedTypes && pre.typeSymbol != ArrayClass
- || pre.normalize.isTrivial && !isPossiblePrefix(clazz)
+ || skipPrefixOf(pre, clazz)
)
if (trivial) this
else {
@@ -3590,12 +3592,6 @@ trait Types extends api.Types { self: SymbolTable =>
val pre1 = pre match {
case x: SuperType if sym1.isEffectivelyFinal || sym1.isDeferred =>
x.thistpe
- case _: CompoundType if sym1.isClass =>
- // sharpen prefix so that it is maximal and still contains the class.
- pre.parents.reverse dropWhile (_.member(sym1.name) != sym1) match {
- case Nil => pre
- case parent :: _ => parent
- }
case _ => pre
}
if (pre eq pre1) TypeRef(pre, sym1, args)
@@ -3852,12 +3848,16 @@ trait Types extends api.Types { self: SymbolTable =>
// This is the specified behavior.
protected def etaExpandKeepsStar = false
+ /** Turn any T* types into Seq[T] except when
+ * in method parameter position.
+ */
object dropRepeatedParamType extends TypeMap {
def apply(tp: Type): Type = tp match {
case MethodType(params, restpe) =>
- MethodType(params, apply(restpe))
- case PolyType(tparams, restpe) =>
- PolyType(tparams, apply(restpe))
+ // Not mapping over params
+ val restpe1 = apply(restpe)
+ if (restpe eq restpe1) tp
+ else MethodType(params, restpe1)
case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
seqType(arg)
case _ =>
@@ -4469,14 +4469,15 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+ private def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ )
+
/** A map to compute the asSeenFrom method */
class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
var capturedSkolems: List[Symbol] = List()
var capturedParams: List[Symbol] = List()
- private def skipPrefixOf(pre: Type, clazz: Symbol) = (
- (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
- )
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
object annotationArgRewriter extends TypeMapTransformer {
private def canRewriteThis(sym: Symbol) = (
@@ -4509,8 +4510,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
def apply(tp: Type): Type =
- if (skipPrefixOf(pre, clazz)) tp
- else tp match {
+ tp match {
case ThisType(sym) =>
def toPrefix(pre: Type, clazz: Symbol): Type =
if (skipPrefixOf(pre, clazz)) tp
@@ -4672,6 +4672,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** A map to implement the `substSym` method. */
class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
+ def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
+
protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
case SingleType(pre, _) => singleType(pre, sym)
@@ -4704,23 +4706,13 @@ trait Types extends api.Types { self: SymbolTable =>
case idx => Some(to(idx))
}
- override def transform(tree: Tree) =
- tree match {
- case tree@Ident(_) =>
- termMapsTo(tree.symbol) match {
- case Some(tosym) =>
- if (tosym.info.bounds.hi.typeSymbol isSubClass SingletonClass) {
- Ident(tosym.existentialToString)
- .setSymbol(tosym)
- .setPos(tosym.pos)
- .setType(dropSingletonType(tosym.info.bounds.hi))
- } else {
- giveup()
- }
- case none => super.transform(tree)
- }
- case tree => super.transform(tree)
+ override def transform(tree: Tree) = {
+ termMapsTo(tree.symbol) match {
+ case Some(tosym) => tree.symbol = tosym
+ case None => ()
}
+ super.transform(tree)
+ }
}
trans.transform(tree)
}
@@ -6073,7 +6065,7 @@ trait Types extends api.Types { self: SymbolTable =>
(sameLength(params1, params2) &&
mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- isSubType(res1, res2.substSym(params2, params1), depth))
+ isSubType(res1.substSym(params1, params2), res2, depth))
// TODO: if mt1.params.isEmpty, consider NullaryMethodType?
case _ =>
false
@@ -6616,7 +6608,7 @@ trait Types extends api.Types { self: SymbolTable =>
val ts0 = elimSub0(ts)
if (ts0.isEmpty || ts0.tail.isEmpty) ts0
else {
- val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.underlying))
+ val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
if (ts1 eq ts0) ts0
else elimSub(ts1, depth)
}
@@ -6733,6 +6725,8 @@ trait Types extends api.Types { self: SymbolTable =>
NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
case ts @ TypeBounds(_, _) :: rest =>
TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ case ts @ AnnotatedType(annots, tpe, _) :: rest =>
+ annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
case ts =>
lubResults get (depth, ts) match {
case Some(lubType) =>
@@ -7112,6 +7106,14 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
+ def isJavaVarargsAncestor(clazz: Symbol) = (
+ clazz.isClass
+ && clazz.isJavaDefined
+ && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
+ )
+ def inheritsJavaVarArgsMethod(clazz: Symbol) =
+ clazz.thisType.baseClasses exists isJavaVarargsAncestor
+
/** All types in list must be polytypes with type parameter lists of
* same length as tparams.
* Returns list of list of bounds infos, where corresponding type
@@ -7224,6 +7226,12 @@ trait Types extends api.Types { self: SymbolTable =>
else (ps :+ SerializableClass.tpe).toList
)
+ /** Members of the given class, other than those inherited
+ * from Any or AnyRef.
+ */
+ def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] =
+ clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass)
+
def objToAny(tp: Type): Type =
if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
else tp
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
new file mode 100644
index 0000000000..058ff61fbf
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
@@ -0,0 +1,31 @@
+package scala.reflect
+package internal
+package annotations
+
+import scala.annotation.meta._
+
+/**
+ * An annotation that designates a member should not be referred to after
+ * type checking (which includes macro expansion); it must only be used in
+ * the arguments of some other macro that will eliminate it from the AST.
+ *
+ * Later on, this annotation should be removed and implemented with domain-specific macros.
+ * If a certain method `inner` mustn't be called outside the context of a given macro `outer`,
+ * then it should itself be declared as a macro.
+ *
+ * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and
+ * report an error if `outer` is not detected. In principle, we could use this approach right now,
+ * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise
+ * track of the stack of the trees being typechecked.
+ *
+ * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`.
+ * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner`
+ * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or carry out
+ * domain-specific logic.
+ *
+ * @param message the error message to print during compilation if a reference remains
+ * after type checking
+ * @since 2.10.1
+ */
+@getter @setter @beanGetter @beanSetter
+final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index 3d10d4c9ce..8f287a1640 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -91,7 +91,7 @@ abstract class Position extends scala.reflect.api.Position { self =>
/** An optional value containing the source file referred to by this position, or
* None if not defined.
*/
- def source: SourceFile = throw new UnsupportedOperationException("Position.source")
+ def source: SourceFile = throw new UnsupportedOperationException(s"Position.source on ${this.getClass}")
/** Is this position neither a NoPosition nor a FakePosition?
* If isDefined is true, offset and source are both defined.
@@ -111,19 +111,19 @@ abstract class Position extends scala.reflect.api.Position { self =>
def makeTransparent: Position = this
/** The start of the position's range, error if not a range position */
- def start: Int = throw new UnsupportedOperationException("Position.start")
+ def start: Int = throw new UnsupportedOperationException(s"Position.start on ${this.getClass}")
/** The start of the position's range, or point if not a range position */
def startOrPoint: Int = point
/** The point (where the ^ is) of the position */
- def point: Int = throw new UnsupportedOperationException("Position.point")
+ def point: Int = throw new UnsupportedOperationException(s"Position.point on ${this.getClass}")
/** The point (where the ^ is) of the position, or else `default` if undefined */
def pointOrElse(default: Int): Int = default
/** The end of the position's range, error if not a range position */
- def end: Int = throw new UnsupportedOperationException("Position.end")
+ def end: Int = throw new UnsupportedOperationException(s"Position.end on ${this.getClass}")
/** The end of the position's range, or point if not a range position */
def endOrPoint: Int = point
diff --git a/src/reflect/scala/reflect/macros/compileTimeOnly.scala b/src/reflect/scala/reflect/macros/compileTimeOnly.scala
deleted file mode 100644
index 5a3a352a53..0000000000
--- a/src/reflect/scala/reflect/macros/compileTimeOnly.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package scala.reflect
-package macros
-
-import scala.annotation.meta._
-
-/**
- * An annotation that designates a member should not be referred to after
- * type checking (which includes macro expansion); it must only be used in
- * the arguments of some other macro that will eliminate it from the AST.
- *
- * @param message the error message to print during compilation if a reference remains
- * after type checking
- * @since 2.10.1
- */
-@getter @setter @beanGetter @beanSetter
-final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 7c84279699..90f8cb8d71 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -184,7 +184,7 @@ object Main extends Main {
val cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
val path = cparg match {
case Some(cp) => new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- case _ => PathResolver.fromPathString("")
+ case _ => PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
}
// print the classpath if output is verbose
if (verbose)
diff --git a/test/benchmarking/t6726-patmat-analysis.scala b/test/benchmarking/t6726-patmat-analysis.scala
new file mode 100644
index 0000000000..bcb7f6c6b2
--- /dev/null
+++ b/test/benchmarking/t6726-patmat-analysis.scala
@@ -0,0 +1,4005 @@
+trait Foo{
+abstract class Base
+case class Dummy0(x: Int) extends Base
+case class Dummy1(x: Int) extends Base
+case class Dummy2(x: Int) extends Base
+case class Dummy3(x: Int) extends Base
+case class Dummy4(x: Int) extends Base
+case class Dummy5(x: Int) extends Base
+case class Dummy6(x: Int) extends Base
+case class Dummy7(x: Int) extends Base
+case class Dummy8(x: Int) extends Base
+case class Dummy9(x: Int) extends Base
+case class Dummy10(x: Int) extends Base
+case class Dummy11(x: Int) extends Base
+case class Dummy12(x: Int) extends Base
+case class Dummy13(x: Int) extends Base
+case class Dummy14(x: Int) extends Base
+case class Dummy15(x: Int) extends Base
+case class Dummy16(x: Int) extends Base
+case class Dummy17(x: Int) extends Base
+case class Dummy18(x: Int) extends Base
+case class Dummy19(x: Int) extends Base
+case class Dummy20(x: Int) extends Base
+case class Dummy21(x: Int) extends Base
+case class Dummy22(x: Int) extends Base
+case class Dummy23(x: Int) extends Base
+case class Dummy24(x: Int) extends Base
+case class Dummy25(x: Int) extends Base
+case class Dummy26(x: Int) extends Base
+case class Dummy27(x: Int) extends Base
+case class Dummy28(x: Int) extends Base
+case class Dummy29(x: Int) extends Base
+case class Dummy30(x: Int) extends Base
+case class Dummy31(x: Int) extends Base
+case class Dummy32(x: Int) extends Base
+case class Dummy33(x: Int) extends Base
+case class Dummy34(x: Int) extends Base
+case class Dummy35(x: Int) extends Base
+case class Dummy36(x: Int) extends Base
+case class Dummy37(x: Int) extends Base
+case class Dummy38(x: Int) extends Base
+case class Dummy39(x: Int) extends Base
+case class Dummy40(x: Int) extends Base
+case class Dummy41(x: Int) extends Base
+case class Dummy42(x: Int) extends Base
+case class Dummy43(x: Int) extends Base
+case class Dummy44(x: Int) extends Base
+case class Dummy45(x: Int) extends Base
+case class Dummy46(x: Int) extends Base
+case class Dummy47(x: Int) extends Base
+case class Dummy48(x: Int) extends Base
+case class Dummy49(x: Int) extends Base
+case class Dummy50(x: Int) extends Base
+case class Dummy51(x: Int) extends Base
+case class Dummy52(x: Int) extends Base
+case class Dummy53(x: Int) extends Base
+case class Dummy54(x: Int) extends Base
+case class Dummy55(x: Int) extends Base
+case class Dummy56(x: Int) extends Base
+case class Dummy57(x: Int) extends Base
+case class Dummy58(x: Int) extends Base
+case class Dummy59(x: Int) extends Base
+case class Dummy60(x: Int) extends Base
+case class Dummy61(x: Int) extends Base
+case class Dummy62(x: Int) extends Base
+case class Dummy63(x: Int) extends Base
+case class Dummy64(x: Int) extends Base
+case class Dummy65(x: Int) extends Base
+case class Dummy66(x: Int) extends Base
+case class Dummy67(x: Int) extends Base
+case class Dummy68(x: Int) extends Base
+case class Dummy69(x: Int) extends Base
+case class Dummy70(x: Int) extends Base
+case class Dummy71(x: Int) extends Base
+case class Dummy72(x: Int) extends Base
+case class Dummy73(x: Int) extends Base
+case class Dummy74(x: Int) extends Base
+case class Dummy75(x: Int) extends Base
+case class Dummy76(x: Int) extends Base
+case class Dummy77(x: Int) extends Base
+case class Dummy78(x: Int) extends Base
+case class Dummy79(x: Int) extends Base
+case class Dummy80(x: Int) extends Base
+case class Dummy81(x: Int) extends Base
+case class Dummy82(x: Int) extends Base
+case class Dummy83(x: Int) extends Base
+case class Dummy84(x: Int) extends Base
+case class Dummy85(x: Int) extends Base
+case class Dummy86(x: Int) extends Base
+case class Dummy87(x: Int) extends Base
+case class Dummy88(x: Int) extends Base
+case class Dummy89(x: Int) extends Base
+case class Dummy90(x: Int) extends Base
+case class Dummy91(x: Int) extends Base
+case class Dummy92(x: Int) extends Base
+case class Dummy93(x: Int) extends Base
+case class Dummy94(x: Int) extends Base
+case class Dummy95(x: Int) extends Base
+case class Dummy96(x: Int) extends Base
+case class Dummy97(x: Int) extends Base
+case class Dummy98(x: Int) extends Base
+case class Dummy99(x: Int) extends Base
+case class Dummy100(x: Int) extends Base
+case class Dummy101(x: Int) extends Base
+case class Dummy102(x: Int) extends Base
+case class Dummy103(x: Int) extends Base
+case class Dummy104(x: Int) extends Base
+case class Dummy105(x: Int) extends Base
+case class Dummy106(x: Int) extends Base
+case class Dummy107(x: Int) extends Base
+case class Dummy108(x: Int) extends Base
+case class Dummy109(x: Int) extends Base
+case class Dummy110(x: Int) extends Base
+case class Dummy111(x: Int) extends Base
+case class Dummy112(x: Int) extends Base
+case class Dummy113(x: Int) extends Base
+case class Dummy114(x: Int) extends Base
+case class Dummy115(x: Int) extends Base
+case class Dummy116(x: Int) extends Base
+case class Dummy117(x: Int) extends Base
+case class Dummy118(x: Int) extends Base
+case class Dummy119(x: Int) extends Base
+case class Dummy120(x: Int) extends Base
+case class Dummy121(x: Int) extends Base
+case class Dummy122(x: Int) extends Base
+case class Dummy123(x: Int) extends Base
+case class Dummy124(x: Int) extends Base
+case class Dummy125(x: Int) extends Base
+case class Dummy126(x: Int) extends Base
+case class Dummy127(x: Int) extends Base
+case class Dummy128(x: Int) extends Base
+case class Dummy129(x: Int) extends Base
+case class Dummy130(x: Int) extends Base
+case class Dummy131(x: Int) extends Base
+case class Dummy132(x: Int) extends Base
+case class Dummy133(x: Int) extends Base
+case class Dummy134(x: Int) extends Base
+case class Dummy135(x: Int) extends Base
+case class Dummy136(x: Int) extends Base
+case class Dummy137(x: Int) extends Base
+case class Dummy138(x: Int) extends Base
+case class Dummy139(x: Int) extends Base
+case class Dummy140(x: Int) extends Base
+case class Dummy141(x: Int) extends Base
+case class Dummy142(x: Int) extends Base
+case class Dummy143(x: Int) extends Base
+case class Dummy144(x: Int) extends Base
+case class Dummy145(x: Int) extends Base
+case class Dummy146(x: Int) extends Base
+case class Dummy147(x: Int) extends Base
+case class Dummy148(x: Int) extends Base
+case class Dummy149(x: Int) extends Base
+case class Dummy150(x: Int) extends Base
+case class Dummy151(x: Int) extends Base
+case class Dummy152(x: Int) extends Base
+case class Dummy153(x: Int) extends Base
+case class Dummy154(x: Int) extends Base
+case class Dummy155(x: Int) extends Base
+case class Dummy156(x: Int) extends Base
+case class Dummy157(x: Int) extends Base
+case class Dummy158(x: Int) extends Base
+case class Dummy159(x: Int) extends Base
+case class Dummy160(x: Int) extends Base
+case class Dummy161(x: Int) extends Base
+case class Dummy162(x: Int) extends Base
+case class Dummy163(x: Int) extends Base
+case class Dummy164(x: Int) extends Base
+case class Dummy165(x: Int) extends Base
+case class Dummy166(x: Int) extends Base
+case class Dummy167(x: Int) extends Base
+case class Dummy168(x: Int) extends Base
+case class Dummy169(x: Int) extends Base
+case class Dummy170(x: Int) extends Base
+case class Dummy171(x: Int) extends Base
+case class Dummy172(x: Int) extends Base
+case class Dummy173(x: Int) extends Base
+case class Dummy174(x: Int) extends Base
+case class Dummy175(x: Int) extends Base
+case class Dummy176(x: Int) extends Base
+case class Dummy177(x: Int) extends Base
+case class Dummy178(x: Int) extends Base
+case class Dummy179(x: Int) extends Base
+case class Dummy180(x: Int) extends Base
+case class Dummy181(x: Int) extends Base
+case class Dummy182(x: Int) extends Base
+case class Dummy183(x: Int) extends Base
+case class Dummy184(x: Int) extends Base
+case class Dummy185(x: Int) extends Base
+case class Dummy186(x: Int) extends Base
+case class Dummy187(x: Int) extends Base
+case class Dummy188(x: Int) extends Base
+case class Dummy189(x: Int) extends Base
+case class Dummy190(x: Int) extends Base
+case class Dummy191(x: Int) extends Base
+case class Dummy192(x: Int) extends Base
+case class Dummy193(x: Int) extends Base
+case class Dummy194(x: Int) extends Base
+case class Dummy195(x: Int) extends Base
+case class Dummy196(x: Int) extends Base
+case class Dummy197(x: Int) extends Base
+case class Dummy198(x: Int) extends Base
+case class Dummy199(x: Int) extends Base
+case class Dummy200(x: Int) extends Base
+case class Dummy201(x: Int) extends Base
+case class Dummy202(x: Int) extends Base
+case class Dummy203(x: Int) extends Base
+case class Dummy204(x: Int) extends Base
+case class Dummy205(x: Int) extends Base
+case class Dummy206(x: Int) extends Base
+case class Dummy207(x: Int) extends Base
+case class Dummy208(x: Int) extends Base
+case class Dummy209(x: Int) extends Base
+case class Dummy210(x: Int) extends Base
+case class Dummy211(x: Int) extends Base
+case class Dummy212(x: Int) extends Base
+case class Dummy213(x: Int) extends Base
+case class Dummy214(x: Int) extends Base
+case class Dummy215(x: Int) extends Base
+case class Dummy216(x: Int) extends Base
+case class Dummy217(x: Int) extends Base
+case class Dummy218(x: Int) extends Base
+case class Dummy219(x: Int) extends Base
+case class Dummy220(x: Int) extends Base
+case class Dummy221(x: Int) extends Base
+case class Dummy222(x: Int) extends Base
+case class Dummy223(x: Int) extends Base
+case class Dummy224(x: Int) extends Base
+case class Dummy225(x: Int) extends Base
+case class Dummy226(x: Int) extends Base
+case class Dummy227(x: Int) extends Base
+case class Dummy228(x: Int) extends Base
+case class Dummy229(x: Int) extends Base
+case class Dummy230(x: Int) extends Base
+case class Dummy231(x: Int) extends Base
+case class Dummy232(x: Int) extends Base
+case class Dummy233(x: Int) extends Base
+case class Dummy234(x: Int) extends Base
+case class Dummy235(x: Int) extends Base
+case class Dummy236(x: Int) extends Base
+case class Dummy237(x: Int) extends Base
+case class Dummy238(x: Int) extends Base
+case class Dummy239(x: Int) extends Base
+case class Dummy240(x: Int) extends Base
+case class Dummy241(x: Int) extends Base
+case class Dummy242(x: Int) extends Base
+case class Dummy243(x: Int) extends Base
+case class Dummy244(x: Int) extends Base
+case class Dummy245(x: Int) extends Base
+case class Dummy246(x: Int) extends Base
+case class Dummy247(x: Int) extends Base
+case class Dummy248(x: Int) extends Base
+case class Dummy249(x: Int) extends Base
+case class Dummy250(x: Int) extends Base
+case class Dummy251(x: Int) extends Base
+case class Dummy252(x: Int) extends Base
+case class Dummy253(x: Int) extends Base
+case class Dummy254(x: Int) extends Base
+case class Dummy255(x: Int) extends Base
+case class Dummy256(x: Int) extends Base
+case class Dummy257(x: Int) extends Base
+case class Dummy258(x: Int) extends Base
+case class Dummy259(x: Int) extends Base
+case class Dummy260(x: Int) extends Base
+case class Dummy261(x: Int) extends Base
+case class Dummy262(x: Int) extends Base
+case class Dummy263(x: Int) extends Base
+case class Dummy264(x: Int) extends Base
+case class Dummy265(x: Int) extends Base
+case class Dummy266(x: Int) extends Base
+case class Dummy267(x: Int) extends Base
+case class Dummy268(x: Int) extends Base
+case class Dummy269(x: Int) extends Base
+case class Dummy270(x: Int) extends Base
+case class Dummy271(x: Int) extends Base
+case class Dummy272(x: Int) extends Base
+case class Dummy273(x: Int) extends Base
+case class Dummy274(x: Int) extends Base
+case class Dummy275(x: Int) extends Base
+case class Dummy276(x: Int) extends Base
+case class Dummy277(x: Int) extends Base
+case class Dummy278(x: Int) extends Base
+case class Dummy279(x: Int) extends Base
+case class Dummy280(x: Int) extends Base
+case class Dummy281(x: Int) extends Base
+case class Dummy282(x: Int) extends Base
+case class Dummy283(x: Int) extends Base
+case class Dummy284(x: Int) extends Base
+case class Dummy285(x: Int) extends Base
+case class Dummy286(x: Int) extends Base
+case class Dummy287(x: Int) extends Base
+case class Dummy288(x: Int) extends Base
+case class Dummy289(x: Int) extends Base
+case class Dummy290(x: Int) extends Base
+case class Dummy291(x: Int) extends Base
+case class Dummy292(x: Int) extends Base
+case class Dummy293(x: Int) extends Base
+case class Dummy294(x: Int) extends Base
+case class Dummy295(x: Int) extends Base
+case class Dummy296(x: Int) extends Base
+case class Dummy297(x: Int) extends Base
+case class Dummy298(x: Int) extends Base
+case class Dummy299(x: Int) extends Base
+case class Dummy300(x: Int) extends Base
+case class Dummy301(x: Int) extends Base
+case class Dummy302(x: Int) extends Base
+case class Dummy303(x: Int) extends Base
+case class Dummy304(x: Int) extends Base
+case class Dummy305(x: Int) extends Base
+case class Dummy306(x: Int) extends Base
+case class Dummy307(x: Int) extends Base
+case class Dummy308(x: Int) extends Base
+case class Dummy309(x: Int) extends Base
+case class Dummy310(x: Int) extends Base
+case class Dummy311(x: Int) extends Base
+case class Dummy312(x: Int) extends Base
+case class Dummy313(x: Int) extends Base
+case class Dummy314(x: Int) extends Base
+case class Dummy315(x: Int) extends Base
+case class Dummy316(x: Int) extends Base
+case class Dummy317(x: Int) extends Base
+case class Dummy318(x: Int) extends Base
+case class Dummy319(x: Int) extends Base
+case class Dummy320(x: Int) extends Base
+case class Dummy321(x: Int) extends Base
+case class Dummy322(x: Int) extends Base
+case class Dummy323(x: Int) extends Base
+case class Dummy324(x: Int) extends Base
+case class Dummy325(x: Int) extends Base
+case class Dummy326(x: Int) extends Base
+case class Dummy327(x: Int) extends Base
+case class Dummy328(x: Int) extends Base
+case class Dummy329(x: Int) extends Base
+case class Dummy330(x: Int) extends Base
+case class Dummy331(x: Int) extends Base
+case class Dummy332(x: Int) extends Base
+case class Dummy333(x: Int) extends Base
+case class Dummy334(x: Int) extends Base
+case class Dummy335(x: Int) extends Base
+case class Dummy336(x: Int) extends Base
+case class Dummy337(x: Int) extends Base
+case class Dummy338(x: Int) extends Base
+case class Dummy339(x: Int) extends Base
+case class Dummy340(x: Int) extends Base
+case class Dummy341(x: Int) extends Base
+case class Dummy342(x: Int) extends Base
+case class Dummy343(x: Int) extends Base
+case class Dummy344(x: Int) extends Base
+case class Dummy345(x: Int) extends Base
+case class Dummy346(x: Int) extends Base
+case class Dummy347(x: Int) extends Base
+case class Dummy348(x: Int) extends Base
+case class Dummy349(x: Int) extends Base
+case class Dummy350(x: Int) extends Base
+case class Dummy351(x: Int) extends Base
+case class Dummy352(x: Int) extends Base
+case class Dummy353(x: Int) extends Base
+case class Dummy354(x: Int) extends Base
+case class Dummy355(x: Int) extends Base
+case class Dummy356(x: Int) extends Base
+case class Dummy357(x: Int) extends Base
+case class Dummy358(x: Int) extends Base
+case class Dummy359(x: Int) extends Base
+case class Dummy360(x: Int) extends Base
+case class Dummy361(x: Int) extends Base
+case class Dummy362(x: Int) extends Base
+case class Dummy363(x: Int) extends Base
+case class Dummy364(x: Int) extends Base
+case class Dummy365(x: Int) extends Base
+case class Dummy366(x: Int) extends Base
+case class Dummy367(x: Int) extends Base
+case class Dummy368(x: Int) extends Base
+case class Dummy369(x: Int) extends Base
+case class Dummy370(x: Int) extends Base
+case class Dummy371(x: Int) extends Base
+case class Dummy372(x: Int) extends Base
+case class Dummy373(x: Int) extends Base
+case class Dummy374(x: Int) extends Base
+case class Dummy375(x: Int) extends Base
+case class Dummy376(x: Int) extends Base
+case class Dummy377(x: Int) extends Base
+case class Dummy378(x: Int) extends Base
+case class Dummy379(x: Int) extends Base
+case class Dummy380(x: Int) extends Base
+case class Dummy381(x: Int) extends Base
+case class Dummy382(x: Int) extends Base
+case class Dummy383(x: Int) extends Base
+case class Dummy384(x: Int) extends Base
+case class Dummy385(x: Int) extends Base
+case class Dummy386(x: Int) extends Base
+case class Dummy387(x: Int) extends Base
+case class Dummy388(x: Int) extends Base
+case class Dummy389(x: Int) extends Base
+case class Dummy390(x: Int) extends Base
+case class Dummy391(x: Int) extends Base
+case class Dummy392(x: Int) extends Base
+case class Dummy393(x: Int) extends Base
+case class Dummy394(x: Int) extends Base
+case class Dummy395(x: Int) extends Base
+case class Dummy396(x: Int) extends Base
+case class Dummy397(x: Int) extends Base
+case class Dummy398(x: Int) extends Base
+case class Dummy399(x: Int) extends Base
+case class Dummy400(x: Int) extends Base
+case class Dummy401(x: Int) extends Base
+case class Dummy402(x: Int) extends Base
+case class Dummy403(x: Int) extends Base
+case class Dummy404(x: Int) extends Base
+case class Dummy405(x: Int) extends Base
+case class Dummy406(x: Int) extends Base
+case class Dummy407(x: Int) extends Base
+case class Dummy408(x: Int) extends Base
+case class Dummy409(x: Int) extends Base
+case class Dummy410(x: Int) extends Base
+case class Dummy411(x: Int) extends Base
+case class Dummy412(x: Int) extends Base
+case class Dummy413(x: Int) extends Base
+case class Dummy414(x: Int) extends Base
+case class Dummy415(x: Int) extends Base
+case class Dummy416(x: Int) extends Base
+case class Dummy417(x: Int) extends Base
+case class Dummy418(x: Int) extends Base
+case class Dummy419(x: Int) extends Base
+case class Dummy420(x: Int) extends Base
+case class Dummy421(x: Int) extends Base
+case class Dummy422(x: Int) extends Base
+case class Dummy423(x: Int) extends Base
+case class Dummy424(x: Int) extends Base
+case class Dummy425(x: Int) extends Base
+case class Dummy426(x: Int) extends Base
+case class Dummy427(x: Int) extends Base
+case class Dummy428(x: Int) extends Base
+case class Dummy429(x: Int) extends Base
+case class Dummy430(x: Int) extends Base
+case class Dummy431(x: Int) extends Base
+case class Dummy432(x: Int) extends Base
+case class Dummy433(x: Int) extends Base
+case class Dummy434(x: Int) extends Base
+case class Dummy435(x: Int) extends Base
+case class Dummy436(x: Int) extends Base
+case class Dummy437(x: Int) extends Base
+case class Dummy438(x: Int) extends Base
+case class Dummy439(x: Int) extends Base
+case class Dummy440(x: Int) extends Base
+case class Dummy441(x: Int) extends Base
+case class Dummy442(x: Int) extends Base
+case class Dummy443(x: Int) extends Base
+case class Dummy444(x: Int) extends Base
+case class Dummy445(x: Int) extends Base
+case class Dummy446(x: Int) extends Base
+case class Dummy447(x: Int) extends Base
+case class Dummy448(x: Int) extends Base
+case class Dummy449(x: Int) extends Base
+case class Dummy450(x: Int) extends Base
+case class Dummy451(x: Int) extends Base
+case class Dummy452(x: Int) extends Base
+case class Dummy453(x: Int) extends Base
+case class Dummy454(x: Int) extends Base
+case class Dummy455(x: Int) extends Base
+case class Dummy456(x: Int) extends Base
+case class Dummy457(x: Int) extends Base
+case class Dummy458(x: Int) extends Base
+case class Dummy459(x: Int) extends Base
+case class Dummy460(x: Int) extends Base
+case class Dummy461(x: Int) extends Base
+case class Dummy462(x: Int) extends Base
+case class Dummy463(x: Int) extends Base
+case class Dummy464(x: Int) extends Base
+case class Dummy465(x: Int) extends Base
+case class Dummy466(x: Int) extends Base
+case class Dummy467(x: Int) extends Base
+case class Dummy468(x: Int) extends Base
+case class Dummy469(x: Int) extends Base
+case class Dummy470(x: Int) extends Base
+case class Dummy471(x: Int) extends Base
+case class Dummy472(x: Int) extends Base
+case class Dummy473(x: Int) extends Base
+case class Dummy474(x: Int) extends Base
+case class Dummy475(x: Int) extends Base
+case class Dummy476(x: Int) extends Base
+case class Dummy477(x: Int) extends Base
+case class Dummy478(x: Int) extends Base
+case class Dummy479(x: Int) extends Base
+case class Dummy480(x: Int) extends Base
+case class Dummy481(x: Int) extends Base
+case class Dummy482(x: Int) extends Base
+case class Dummy483(x: Int) extends Base
+case class Dummy484(x: Int) extends Base
+case class Dummy485(x: Int) extends Base
+case class Dummy486(x: Int) extends Base
+case class Dummy487(x: Int) extends Base
+case class Dummy488(x: Int) extends Base
+case class Dummy489(x: Int) extends Base
+case class Dummy490(x: Int) extends Base
+case class Dummy491(x: Int) extends Base
+case class Dummy492(x: Int) extends Base
+case class Dummy493(x: Int) extends Base
+case class Dummy494(x: Int) extends Base
+case class Dummy495(x: Int) extends Base
+case class Dummy496(x: Int) extends Base
+case class Dummy497(x: Int) extends Base
+case class Dummy498(x: Int) extends Base
+case class Dummy499(x: Int) extends Base
+case class Dummy500(x: Int) extends Base
+case class Dummy501(x: Int) extends Base
+case class Dummy502(x: Int) extends Base
+case class Dummy503(x: Int) extends Base
+case class Dummy504(x: Int) extends Base
+case class Dummy505(x: Int) extends Base
+case class Dummy506(x: Int) extends Base
+case class Dummy507(x: Int) extends Base
+case class Dummy508(x: Int) extends Base
+case class Dummy509(x: Int) extends Base
+case class Dummy510(x: Int) extends Base
+case class Dummy511(x: Int) extends Base
+case class Dummy512(x: Int) extends Base
+case class Dummy513(x: Int) extends Base
+case class Dummy514(x: Int) extends Base
+case class Dummy515(x: Int) extends Base
+case class Dummy516(x: Int) extends Base
+case class Dummy517(x: Int) extends Base
+case class Dummy518(x: Int) extends Base
+case class Dummy519(x: Int) extends Base
+case class Dummy520(x: Int) extends Base
+case class Dummy521(x: Int) extends Base
+case class Dummy522(x: Int) extends Base
+case class Dummy523(x: Int) extends Base
+case class Dummy524(x: Int) extends Base
+case class Dummy525(x: Int) extends Base
+case class Dummy526(x: Int) extends Base
+case class Dummy527(x: Int) extends Base
+case class Dummy528(x: Int) extends Base
+case class Dummy529(x: Int) extends Base
+case class Dummy530(x: Int) extends Base
+case class Dummy531(x: Int) extends Base
+case class Dummy532(x: Int) extends Base
+case class Dummy533(x: Int) extends Base
+case class Dummy534(x: Int) extends Base
+case class Dummy535(x: Int) extends Base
+case class Dummy536(x: Int) extends Base
+case class Dummy537(x: Int) extends Base
+case class Dummy538(x: Int) extends Base
+case class Dummy539(x: Int) extends Base
+case class Dummy540(x: Int) extends Base
+case class Dummy541(x: Int) extends Base
+case class Dummy542(x: Int) extends Base
+case class Dummy543(x: Int) extends Base
+case class Dummy544(x: Int) extends Base
+case class Dummy545(x: Int) extends Base
+case class Dummy546(x: Int) extends Base
+case class Dummy547(x: Int) extends Base
+case class Dummy548(x: Int) extends Base
+case class Dummy549(x: Int) extends Base
+case class Dummy550(x: Int) extends Base
+case class Dummy551(x: Int) extends Base
+case class Dummy552(x: Int) extends Base
+case class Dummy553(x: Int) extends Base
+case class Dummy554(x: Int) extends Base
+case class Dummy555(x: Int) extends Base
+case class Dummy556(x: Int) extends Base
+case class Dummy557(x: Int) extends Base
+case class Dummy558(x: Int) extends Base
+case class Dummy559(x: Int) extends Base
+case class Dummy560(x: Int) extends Base
+case class Dummy561(x: Int) extends Base
+case class Dummy562(x: Int) extends Base
+case class Dummy563(x: Int) extends Base
+case class Dummy564(x: Int) extends Base
+case class Dummy565(x: Int) extends Base
+case class Dummy566(x: Int) extends Base
+case class Dummy567(x: Int) extends Base
+case class Dummy568(x: Int) extends Base
+case class Dummy569(x: Int) extends Base
+case class Dummy570(x: Int) extends Base
+case class Dummy571(x: Int) extends Base
+case class Dummy572(x: Int) extends Base
+case class Dummy573(x: Int) extends Base
+case class Dummy574(x: Int) extends Base
+case class Dummy575(x: Int) extends Base
+case class Dummy576(x: Int) extends Base
+case class Dummy577(x: Int) extends Base
+case class Dummy578(x: Int) extends Base
+case class Dummy579(x: Int) extends Base
+case class Dummy580(x: Int) extends Base
+case class Dummy581(x: Int) extends Base
+case class Dummy582(x: Int) extends Base
+case class Dummy583(x: Int) extends Base
+case class Dummy584(x: Int) extends Base
+case class Dummy585(x: Int) extends Base
+case class Dummy586(x: Int) extends Base
+case class Dummy587(x: Int) extends Base
+case class Dummy588(x: Int) extends Base
+case class Dummy589(x: Int) extends Base
+case class Dummy590(x: Int) extends Base
+case class Dummy591(x: Int) extends Base
+case class Dummy592(x: Int) extends Base
+case class Dummy593(x: Int) extends Base
+case class Dummy594(x: Int) extends Base
+case class Dummy595(x: Int) extends Base
+case class Dummy596(x: Int) extends Base
+case class Dummy597(x: Int) extends Base
+case class Dummy598(x: Int) extends Base
+case class Dummy599(x: Int) extends Base
+case class Dummy600(x: Int) extends Base
+case class Dummy601(x: Int) extends Base
+case class Dummy602(x: Int) extends Base
+case class Dummy603(x: Int) extends Base
+case class Dummy604(x: Int) extends Base
+case class Dummy605(x: Int) extends Base
+case class Dummy606(x: Int) extends Base
+case class Dummy607(x: Int) extends Base
+case class Dummy608(x: Int) extends Base
+case class Dummy609(x: Int) extends Base
+case class Dummy610(x: Int) extends Base
+case class Dummy611(x: Int) extends Base
+case class Dummy612(x: Int) extends Base
+case class Dummy613(x: Int) extends Base
+case class Dummy614(x: Int) extends Base
+case class Dummy615(x: Int) extends Base
+case class Dummy616(x: Int) extends Base
+case class Dummy617(x: Int) extends Base
+case class Dummy618(x: Int) extends Base
+case class Dummy619(x: Int) extends Base
+case class Dummy620(x: Int) extends Base
+case class Dummy621(x: Int) extends Base
+case class Dummy622(x: Int) extends Base
+case class Dummy623(x: Int) extends Base
+case class Dummy624(x: Int) extends Base
+case class Dummy625(x: Int) extends Base
+case class Dummy626(x: Int) extends Base
+case class Dummy627(x: Int) extends Base
+case class Dummy628(x: Int) extends Base
+case class Dummy629(x: Int) extends Base
+case class Dummy630(x: Int) extends Base
+case class Dummy631(x: Int) extends Base
+case class Dummy632(x: Int) extends Base
+case class Dummy633(x: Int) extends Base
+case class Dummy634(x: Int) extends Base
+case class Dummy635(x: Int) extends Base
+case class Dummy636(x: Int) extends Base
+case class Dummy637(x: Int) extends Base
+case class Dummy638(x: Int) extends Base
+case class Dummy639(x: Int) extends Base
+case class Dummy640(x: Int) extends Base
+case class Dummy641(x: Int) extends Base
+case class Dummy642(x: Int) extends Base
+case class Dummy643(x: Int) extends Base
+case class Dummy644(x: Int) extends Base
+case class Dummy645(x: Int) extends Base
+case class Dummy646(x: Int) extends Base
+case class Dummy647(x: Int) extends Base
+case class Dummy648(x: Int) extends Base
+case class Dummy649(x: Int) extends Base
+case class Dummy650(x: Int) extends Base
+case class Dummy651(x: Int) extends Base
+case class Dummy652(x: Int) extends Base
+case class Dummy653(x: Int) extends Base
+case class Dummy654(x: Int) extends Base
+case class Dummy655(x: Int) extends Base
+case class Dummy656(x: Int) extends Base
+case class Dummy657(x: Int) extends Base
+case class Dummy658(x: Int) extends Base
+case class Dummy659(x: Int) extends Base
+case class Dummy660(x: Int) extends Base
+case class Dummy661(x: Int) extends Base
+case class Dummy662(x: Int) extends Base
+case class Dummy663(x: Int) extends Base
+case class Dummy664(x: Int) extends Base
+case class Dummy665(x: Int) extends Base
+case class Dummy666(x: Int) extends Base
+case class Dummy667(x: Int) extends Base
+case class Dummy668(x: Int) extends Base
+case class Dummy669(x: Int) extends Base
+case class Dummy670(x: Int) extends Base
+case class Dummy671(x: Int) extends Base
+case class Dummy672(x: Int) extends Base
+case class Dummy673(x: Int) extends Base
+case class Dummy674(x: Int) extends Base
+case class Dummy675(x: Int) extends Base
+case class Dummy676(x: Int) extends Base
+case class Dummy677(x: Int) extends Base
+case class Dummy678(x: Int) extends Base
+case class Dummy679(x: Int) extends Base
+case class Dummy680(x: Int) extends Base
+case class Dummy681(x: Int) extends Base
+case class Dummy682(x: Int) extends Base
+case class Dummy683(x: Int) extends Base
+case class Dummy684(x: Int) extends Base
+case class Dummy685(x: Int) extends Base
+case class Dummy686(x: Int) extends Base
+case class Dummy687(x: Int) extends Base
+case class Dummy688(x: Int) extends Base
+case class Dummy689(x: Int) extends Base
+case class Dummy690(x: Int) extends Base
+case class Dummy691(x: Int) extends Base
+case class Dummy692(x: Int) extends Base
+case class Dummy693(x: Int) extends Base
+case class Dummy694(x: Int) extends Base
+case class Dummy695(x: Int) extends Base
+case class Dummy696(x: Int) extends Base
+case class Dummy697(x: Int) extends Base
+case class Dummy698(x: Int) extends Base
+case class Dummy699(x: Int) extends Base
+case class Dummy700(x: Int) extends Base
+case class Dummy701(x: Int) extends Base
+case class Dummy702(x: Int) extends Base
+case class Dummy703(x: Int) extends Base
+case class Dummy704(x: Int) extends Base
+case class Dummy705(x: Int) extends Base
+case class Dummy706(x: Int) extends Base
+case class Dummy707(x: Int) extends Base
+case class Dummy708(x: Int) extends Base
+case class Dummy709(x: Int) extends Base
+case class Dummy710(x: Int) extends Base
+case class Dummy711(x: Int) extends Base
+case class Dummy712(x: Int) extends Base
+case class Dummy713(x: Int) extends Base
+case class Dummy714(x: Int) extends Base
+case class Dummy715(x: Int) extends Base
+case class Dummy716(x: Int) extends Base
+case class Dummy717(x: Int) extends Base
+case class Dummy718(x: Int) extends Base
+case class Dummy719(x: Int) extends Base
+case class Dummy720(x: Int) extends Base
+case class Dummy721(x: Int) extends Base
+case class Dummy722(x: Int) extends Base
+case class Dummy723(x: Int) extends Base
+case class Dummy724(x: Int) extends Base
+case class Dummy725(x: Int) extends Base
+case class Dummy726(x: Int) extends Base
+case class Dummy727(x: Int) extends Base
+case class Dummy728(x: Int) extends Base
+case class Dummy729(x: Int) extends Base
+case class Dummy730(x: Int) extends Base
+case class Dummy731(x: Int) extends Base
+case class Dummy732(x: Int) extends Base
+case class Dummy733(x: Int) extends Base
+case class Dummy734(x: Int) extends Base
+case class Dummy735(x: Int) extends Base
+case class Dummy736(x: Int) extends Base
+case class Dummy737(x: Int) extends Base
+case class Dummy738(x: Int) extends Base
+case class Dummy739(x: Int) extends Base
+case class Dummy740(x: Int) extends Base
+case class Dummy741(x: Int) extends Base
+case class Dummy742(x: Int) extends Base
+case class Dummy743(x: Int) extends Base
+case class Dummy744(x: Int) extends Base
+case class Dummy745(x: Int) extends Base
+case class Dummy746(x: Int) extends Base
+case class Dummy747(x: Int) extends Base
+case class Dummy748(x: Int) extends Base
+case class Dummy749(x: Int) extends Base
+case class Dummy750(x: Int) extends Base
+case class Dummy751(x: Int) extends Base
+case class Dummy752(x: Int) extends Base
+case class Dummy753(x: Int) extends Base
+case class Dummy754(x: Int) extends Base
+case class Dummy755(x: Int) extends Base
+case class Dummy756(x: Int) extends Base
+case class Dummy757(x: Int) extends Base
+case class Dummy758(x: Int) extends Base
+case class Dummy759(x: Int) extends Base
+case class Dummy760(x: Int) extends Base
+case class Dummy761(x: Int) extends Base
+case class Dummy762(x: Int) extends Base
+case class Dummy763(x: Int) extends Base
+case class Dummy764(x: Int) extends Base
+case class Dummy765(x: Int) extends Base
+case class Dummy766(x: Int) extends Base
+case class Dummy767(x: Int) extends Base
+case class Dummy768(x: Int) extends Base
+case class Dummy769(x: Int) extends Base
+case class Dummy770(x: Int) extends Base
+case class Dummy771(x: Int) extends Base
+case class Dummy772(x: Int) extends Base
+case class Dummy773(x: Int) extends Base
+case class Dummy774(x: Int) extends Base
+case class Dummy775(x: Int) extends Base
+case class Dummy776(x: Int) extends Base
+case class Dummy777(x: Int) extends Base
+case class Dummy778(x: Int) extends Base
+case class Dummy779(x: Int) extends Base
+case class Dummy780(x: Int) extends Base
+case class Dummy781(x: Int) extends Base
+case class Dummy782(x: Int) extends Base
+case class Dummy783(x: Int) extends Base
+case class Dummy784(x: Int) extends Base
+case class Dummy785(x: Int) extends Base
+case class Dummy786(x: Int) extends Base
+case class Dummy787(x: Int) extends Base
+case class Dummy788(x: Int) extends Base
+case class Dummy789(x: Int) extends Base
+case class Dummy790(x: Int) extends Base
+case class Dummy791(x: Int) extends Base
+case class Dummy792(x: Int) extends Base
+case class Dummy793(x: Int) extends Base
+case class Dummy794(x: Int) extends Base
+case class Dummy795(x: Int) extends Base
+case class Dummy796(x: Int) extends Base
+case class Dummy797(x: Int) extends Base
+case class Dummy798(x: Int) extends Base
+case class Dummy799(x: Int) extends Base
+case class Dummy800(x: Int) extends Base
+case class Dummy801(x: Int) extends Base
+case class Dummy802(x: Int) extends Base
+case class Dummy803(x: Int) extends Base
+case class Dummy804(x: Int) extends Base
+case class Dummy805(x: Int) extends Base
+case class Dummy806(x: Int) extends Base
+case class Dummy807(x: Int) extends Base
+case class Dummy808(x: Int) extends Base
+case class Dummy809(x: Int) extends Base
+case class Dummy810(x: Int) extends Base
+case class Dummy811(x: Int) extends Base
+case class Dummy812(x: Int) extends Base
+case class Dummy813(x: Int) extends Base
+case class Dummy814(x: Int) extends Base
+case class Dummy815(x: Int) extends Base
+case class Dummy816(x: Int) extends Base
+case class Dummy817(x: Int) extends Base
+case class Dummy818(x: Int) extends Base
+case class Dummy819(x: Int) extends Base
+case class Dummy820(x: Int) extends Base
+case class Dummy821(x: Int) extends Base
+case class Dummy822(x: Int) extends Base
+case class Dummy823(x: Int) extends Base
+case class Dummy824(x: Int) extends Base
+case class Dummy825(x: Int) extends Base
+case class Dummy826(x: Int) extends Base
+case class Dummy827(x: Int) extends Base
+case class Dummy828(x: Int) extends Base
+case class Dummy829(x: Int) extends Base
+case class Dummy830(x: Int) extends Base
+case class Dummy831(x: Int) extends Base
+case class Dummy832(x: Int) extends Base
+case class Dummy833(x: Int) extends Base
+case class Dummy834(x: Int) extends Base
+case class Dummy835(x: Int) extends Base
+case class Dummy836(x: Int) extends Base
+case class Dummy837(x: Int) extends Base
+case class Dummy838(x: Int) extends Base
+case class Dummy839(x: Int) extends Base
+case class Dummy840(x: Int) extends Base
+case class Dummy841(x: Int) extends Base
+case class Dummy842(x: Int) extends Base
+case class Dummy843(x: Int) extends Base
+case class Dummy844(x: Int) extends Base
+case class Dummy845(x: Int) extends Base
+case class Dummy846(x: Int) extends Base
+case class Dummy847(x: Int) extends Base
+case class Dummy848(x: Int) extends Base
+case class Dummy849(x: Int) extends Base
+case class Dummy850(x: Int) extends Base
+case class Dummy851(x: Int) extends Base
+case class Dummy852(x: Int) extends Base
+case class Dummy853(x: Int) extends Base
+case class Dummy854(x: Int) extends Base
+case class Dummy855(x: Int) extends Base
+case class Dummy856(x: Int) extends Base
+case class Dummy857(x: Int) extends Base
+case class Dummy858(x: Int) extends Base
+case class Dummy859(x: Int) extends Base
+case class Dummy860(x: Int) extends Base
+case class Dummy861(x: Int) extends Base
+case class Dummy862(x: Int) extends Base
+case class Dummy863(x: Int) extends Base
+case class Dummy864(x: Int) extends Base
+case class Dummy865(x: Int) extends Base
+case class Dummy866(x: Int) extends Base
+case class Dummy867(x: Int) extends Base
+case class Dummy868(x: Int) extends Base
+case class Dummy869(x: Int) extends Base
+case class Dummy870(x: Int) extends Base
+case class Dummy871(x: Int) extends Base
+case class Dummy872(x: Int) extends Base
+case class Dummy873(x: Int) extends Base
+case class Dummy874(x: Int) extends Base
+case class Dummy875(x: Int) extends Base
+case class Dummy876(x: Int) extends Base
+case class Dummy877(x: Int) extends Base
+case class Dummy878(x: Int) extends Base
+case class Dummy879(x: Int) extends Base
+case class Dummy880(x: Int) extends Base
+case class Dummy881(x: Int) extends Base
+case class Dummy882(x: Int) extends Base
+case class Dummy883(x: Int) extends Base
+case class Dummy884(x: Int) extends Base
+case class Dummy885(x: Int) extends Base
+case class Dummy886(x: Int) extends Base
+case class Dummy887(x: Int) extends Base
+case class Dummy888(x: Int) extends Base
+case class Dummy889(x: Int) extends Base
+case class Dummy890(x: Int) extends Base
+case class Dummy891(x: Int) extends Base
+case class Dummy892(x: Int) extends Base
+case class Dummy893(x: Int) extends Base
+case class Dummy894(x: Int) extends Base
+case class Dummy895(x: Int) extends Base
+case class Dummy896(x: Int) extends Base
+case class Dummy897(x: Int) extends Base
+case class Dummy898(x: Int) extends Base
+case class Dummy899(x: Int) extends Base
+case class Dummy900(x: Int) extends Base
+case class Dummy901(x: Int) extends Base
+case class Dummy902(x: Int) extends Base
+case class Dummy903(x: Int) extends Base
+case class Dummy904(x: Int) extends Base
+case class Dummy905(x: Int) extends Base
+case class Dummy906(x: Int) extends Base
+case class Dummy907(x: Int) extends Base
+case class Dummy908(x: Int) extends Base
+case class Dummy909(x: Int) extends Base
+case class Dummy910(x: Int) extends Base
+case class Dummy911(x: Int) extends Base
+case class Dummy912(x: Int) extends Base
+case class Dummy913(x: Int) extends Base
+case class Dummy914(x: Int) extends Base
+case class Dummy915(x: Int) extends Base
+case class Dummy916(x: Int) extends Base
+case class Dummy917(x: Int) extends Base
+case class Dummy918(x: Int) extends Base
+case class Dummy919(x: Int) extends Base
+case class Dummy920(x: Int) extends Base
+case class Dummy921(x: Int) extends Base
+case class Dummy922(x: Int) extends Base
+case class Dummy923(x: Int) extends Base
+case class Dummy924(x: Int) extends Base
+case class Dummy925(x: Int) extends Base
+case class Dummy926(x: Int) extends Base
+case class Dummy927(x: Int) extends Base
+case class Dummy928(x: Int) extends Base
+case class Dummy929(x: Int) extends Base
+case class Dummy930(x: Int) extends Base
+case class Dummy931(x: Int) extends Base
+case class Dummy932(x: Int) extends Base
+case class Dummy933(x: Int) extends Base
+case class Dummy934(x: Int) extends Base
+case class Dummy935(x: Int) extends Base
+case class Dummy936(x: Int) extends Base
+case class Dummy937(x: Int) extends Base
+case class Dummy938(x: Int) extends Base
+case class Dummy939(x: Int) extends Base
+case class Dummy940(x: Int) extends Base
+case class Dummy941(x: Int) extends Base
+case class Dummy942(x: Int) extends Base
+case class Dummy943(x: Int) extends Base
+case class Dummy944(x: Int) extends Base
+case class Dummy945(x: Int) extends Base
+case class Dummy946(x: Int) extends Base
+case class Dummy947(x: Int) extends Base
+case class Dummy948(x: Int) extends Base
+case class Dummy949(x: Int) extends Base
+case class Dummy950(x: Int) extends Base
+case class Dummy951(x: Int) extends Base
+case class Dummy952(x: Int) extends Base
+case class Dummy953(x: Int) extends Base
+case class Dummy954(x: Int) extends Base
+case class Dummy955(x: Int) extends Base
+case class Dummy956(x: Int) extends Base
+case class Dummy957(x: Int) extends Base
+case class Dummy958(x: Int) extends Base
+case class Dummy959(x: Int) extends Base
+case class Dummy960(x: Int) extends Base
+case class Dummy961(x: Int) extends Base
+case class Dummy962(x: Int) extends Base
+case class Dummy963(x: Int) extends Base
+case class Dummy964(x: Int) extends Base
+case class Dummy965(x: Int) extends Base
+case class Dummy966(x: Int) extends Base
+case class Dummy967(x: Int) extends Base
+case class Dummy968(x: Int) extends Base
+case class Dummy969(x: Int) extends Base
+case class Dummy970(x: Int) extends Base
+case class Dummy971(x: Int) extends Base
+case class Dummy972(x: Int) extends Base
+case class Dummy973(x: Int) extends Base
+case class Dummy974(x: Int) extends Base
+case class Dummy975(x: Int) extends Base
+case class Dummy976(x: Int) extends Base
+case class Dummy977(x: Int) extends Base
+case class Dummy978(x: Int) extends Base
+case class Dummy979(x: Int) extends Base
+case class Dummy980(x: Int) extends Base
+case class Dummy981(x: Int) extends Base
+case class Dummy982(x: Int) extends Base
+case class Dummy983(x: Int) extends Base
+case class Dummy984(x: Int) extends Base
+case class Dummy985(x: Int) extends Base
+case class Dummy986(x: Int) extends Base
+case class Dummy987(x: Int) extends Base
+case class Dummy988(x: Int) extends Base
+case class Dummy989(x: Int) extends Base
+case class Dummy990(x: Int) extends Base
+case class Dummy991(x: Int) extends Base
+case class Dummy992(x: Int) extends Base
+case class Dummy993(x: Int) extends Base
+case class Dummy994(x: Int) extends Base
+case class Dummy995(x: Int) extends Base
+case class Dummy996(x: Int) extends Base
+case class Dummy997(x: Int) extends Base
+case class Dummy998(x: Int) extends Base
+case class Dummy999(x: Int) extends Base
+case class Dummy1000(x: Int) extends Base
+case class Dummy1001(x: Int) extends Base
+case class Dummy1002(x: Int) extends Base
+case class Dummy1003(x: Int) extends Base
+case class Dummy1004(x: Int) extends Base
+case class Dummy1005(x: Int) extends Base
+case class Dummy1006(x: Int) extends Base
+case class Dummy1007(x: Int) extends Base
+case class Dummy1008(x: Int) extends Base
+case class Dummy1009(x: Int) extends Base
+case class Dummy1010(x: Int) extends Base
+case class Dummy1011(x: Int) extends Base
+case class Dummy1012(x: Int) extends Base
+case class Dummy1013(x: Int) extends Base
+case class Dummy1014(x: Int) extends Base
+case class Dummy1015(x: Int) extends Base
+case class Dummy1016(x: Int) extends Base
+case class Dummy1017(x: Int) extends Base
+case class Dummy1018(x: Int) extends Base
+case class Dummy1019(x: Int) extends Base
+case class Dummy1020(x: Int) extends Base
+case class Dummy1021(x: Int) extends Base
+case class Dummy1022(x: Int) extends Base
+case class Dummy1023(x: Int) extends Base
+case class Dummy1024(x: Int) extends Base
+case class Dummy1025(x: Int) extends Base
+case class Dummy1026(x: Int) extends Base
+case class Dummy1027(x: Int) extends Base
+case class Dummy1028(x: Int) extends Base
+case class Dummy1029(x: Int) extends Base
+case class Dummy1030(x: Int) extends Base
+case class Dummy1031(x: Int) extends Base
+case class Dummy1032(x: Int) extends Base
+case class Dummy1033(x: Int) extends Base
+case class Dummy1034(x: Int) extends Base
+case class Dummy1035(x: Int) extends Base
+case class Dummy1036(x: Int) extends Base
+case class Dummy1037(x: Int) extends Base
+case class Dummy1038(x: Int) extends Base
+case class Dummy1039(x: Int) extends Base
+case class Dummy1040(x: Int) extends Base
+case class Dummy1041(x: Int) extends Base
+case class Dummy1042(x: Int) extends Base
+case class Dummy1043(x: Int) extends Base
+case class Dummy1044(x: Int) extends Base
+case class Dummy1045(x: Int) extends Base
+case class Dummy1046(x: Int) extends Base
+case class Dummy1047(x: Int) extends Base
+case class Dummy1048(x: Int) extends Base
+case class Dummy1049(x: Int) extends Base
+case class Dummy1050(x: Int) extends Base
+case class Dummy1051(x: Int) extends Base
+case class Dummy1052(x: Int) extends Base
+case class Dummy1053(x: Int) extends Base
+case class Dummy1054(x: Int) extends Base
+case class Dummy1055(x: Int) extends Base
+case class Dummy1056(x: Int) extends Base
+case class Dummy1057(x: Int) extends Base
+case class Dummy1058(x: Int) extends Base
+case class Dummy1059(x: Int) extends Base
+case class Dummy1060(x: Int) extends Base
+case class Dummy1061(x: Int) extends Base
+case class Dummy1062(x: Int) extends Base
+case class Dummy1063(x: Int) extends Base
+case class Dummy1064(x: Int) extends Base
+case class Dummy1065(x: Int) extends Base
+case class Dummy1066(x: Int) extends Base
+case class Dummy1067(x: Int) extends Base
+case class Dummy1068(x: Int) extends Base
+case class Dummy1069(x: Int) extends Base
+case class Dummy1070(x: Int) extends Base
+case class Dummy1071(x: Int) extends Base
+case class Dummy1072(x: Int) extends Base
+case class Dummy1073(x: Int) extends Base
+case class Dummy1074(x: Int) extends Base
+case class Dummy1075(x: Int) extends Base
+case class Dummy1076(x: Int) extends Base
+case class Dummy1077(x: Int) extends Base
+case class Dummy1078(x: Int) extends Base
+case class Dummy1079(x: Int) extends Base
+case class Dummy1080(x: Int) extends Base
+case class Dummy1081(x: Int) extends Base
+case class Dummy1082(x: Int) extends Base
+case class Dummy1083(x: Int) extends Base
+case class Dummy1084(x: Int) extends Base
+case class Dummy1085(x: Int) extends Base
+case class Dummy1086(x: Int) extends Base
+case class Dummy1087(x: Int) extends Base
+case class Dummy1088(x: Int) extends Base
+case class Dummy1089(x: Int) extends Base
+case class Dummy1090(x: Int) extends Base
+case class Dummy1091(x: Int) extends Base
+case class Dummy1092(x: Int) extends Base
+case class Dummy1093(x: Int) extends Base
+case class Dummy1094(x: Int) extends Base
+case class Dummy1095(x: Int) extends Base
+case class Dummy1096(x: Int) extends Base
+case class Dummy1097(x: Int) extends Base
+case class Dummy1098(x: Int) extends Base
+case class Dummy1099(x: Int) extends Base
+case class Dummy1100(x: Int) extends Base
+case class Dummy1101(x: Int) extends Base
+case class Dummy1102(x: Int) extends Base
+case class Dummy1103(x: Int) extends Base
+case class Dummy1104(x: Int) extends Base
+case class Dummy1105(x: Int) extends Base
+case class Dummy1106(x: Int) extends Base
+case class Dummy1107(x: Int) extends Base
+case class Dummy1108(x: Int) extends Base
+case class Dummy1109(x: Int) extends Base
+case class Dummy1110(x: Int) extends Base
+case class Dummy1111(x: Int) extends Base
+case class Dummy1112(x: Int) extends Base
+case class Dummy1113(x: Int) extends Base
+case class Dummy1114(x: Int) extends Base
+case class Dummy1115(x: Int) extends Base
+case class Dummy1116(x: Int) extends Base
+case class Dummy1117(x: Int) extends Base
+case class Dummy1118(x: Int) extends Base
+case class Dummy1119(x: Int) extends Base
+case class Dummy1120(x: Int) extends Base
+case class Dummy1121(x: Int) extends Base
+case class Dummy1122(x: Int) extends Base
+case class Dummy1123(x: Int) extends Base
+case class Dummy1124(x: Int) extends Base
+case class Dummy1125(x: Int) extends Base
+case class Dummy1126(x: Int) extends Base
+case class Dummy1127(x: Int) extends Base
+case class Dummy1128(x: Int) extends Base
+case class Dummy1129(x: Int) extends Base
+case class Dummy1130(x: Int) extends Base
+case class Dummy1131(x: Int) extends Base
+case class Dummy1132(x: Int) extends Base
+case class Dummy1133(x: Int) extends Base
+case class Dummy1134(x: Int) extends Base
+case class Dummy1135(x: Int) extends Base
+case class Dummy1136(x: Int) extends Base
+case class Dummy1137(x: Int) extends Base
+case class Dummy1138(x: Int) extends Base
+case class Dummy1139(x: Int) extends Base
+case class Dummy1140(x: Int) extends Base
+case class Dummy1141(x: Int) extends Base
+case class Dummy1142(x: Int) extends Base
+case class Dummy1143(x: Int) extends Base
+case class Dummy1144(x: Int) extends Base
+case class Dummy1145(x: Int) extends Base
+case class Dummy1146(x: Int) extends Base
+case class Dummy1147(x: Int) extends Base
+case class Dummy1148(x: Int) extends Base
+case class Dummy1149(x: Int) extends Base
+case class Dummy1150(x: Int) extends Base
+case class Dummy1151(x: Int) extends Base
+case class Dummy1152(x: Int) extends Base
+case class Dummy1153(x: Int) extends Base
+case class Dummy1154(x: Int) extends Base
+case class Dummy1155(x: Int) extends Base
+case class Dummy1156(x: Int) extends Base
+case class Dummy1157(x: Int) extends Base
+case class Dummy1158(x: Int) extends Base
+case class Dummy1159(x: Int) extends Base
+case class Dummy1160(x: Int) extends Base
+case class Dummy1161(x: Int) extends Base
+case class Dummy1162(x: Int) extends Base
+case class Dummy1163(x: Int) extends Base
+case class Dummy1164(x: Int) extends Base
+case class Dummy1165(x: Int) extends Base
+case class Dummy1166(x: Int) extends Base
+case class Dummy1167(x: Int) extends Base
+case class Dummy1168(x: Int) extends Base
+case class Dummy1169(x: Int) extends Base
+case class Dummy1170(x: Int) extends Base
+case class Dummy1171(x: Int) extends Base
+case class Dummy1172(x: Int) extends Base
+case class Dummy1173(x: Int) extends Base
+case class Dummy1174(x: Int) extends Base
+case class Dummy1175(x: Int) extends Base
+case class Dummy1176(x: Int) extends Base
+case class Dummy1177(x: Int) extends Base
+case class Dummy1178(x: Int) extends Base
+case class Dummy1179(x: Int) extends Base
+case class Dummy1180(x: Int) extends Base
+case class Dummy1181(x: Int) extends Base
+case class Dummy1182(x: Int) extends Base
+case class Dummy1183(x: Int) extends Base
+case class Dummy1184(x: Int) extends Base
+case class Dummy1185(x: Int) extends Base
+case class Dummy1186(x: Int) extends Base
+case class Dummy1187(x: Int) extends Base
+case class Dummy1188(x: Int) extends Base
+case class Dummy1189(x: Int) extends Base
+case class Dummy1190(x: Int) extends Base
+case class Dummy1191(x: Int) extends Base
+case class Dummy1192(x: Int) extends Base
+case class Dummy1193(x: Int) extends Base
+case class Dummy1194(x: Int) extends Base
+case class Dummy1195(x: Int) extends Base
+case class Dummy1196(x: Int) extends Base
+case class Dummy1197(x: Int) extends Base
+case class Dummy1198(x: Int) extends Base
+case class Dummy1199(x: Int) extends Base
+case class Dummy1200(x: Int) extends Base
+case class Dummy1201(x: Int) extends Base
+case class Dummy1202(x: Int) extends Base
+case class Dummy1203(x: Int) extends Base
+case class Dummy1204(x: Int) extends Base
+case class Dummy1205(x: Int) extends Base
+case class Dummy1206(x: Int) extends Base
+case class Dummy1207(x: Int) extends Base
+case class Dummy1208(x: Int) extends Base
+case class Dummy1209(x: Int) extends Base
+case class Dummy1210(x: Int) extends Base
+case class Dummy1211(x: Int) extends Base
+case class Dummy1212(x: Int) extends Base
+case class Dummy1213(x: Int) extends Base
+case class Dummy1214(x: Int) extends Base
+case class Dummy1215(x: Int) extends Base
+case class Dummy1216(x: Int) extends Base
+case class Dummy1217(x: Int) extends Base
+case class Dummy1218(x: Int) extends Base
+case class Dummy1219(x: Int) extends Base
+case class Dummy1220(x: Int) extends Base
+case class Dummy1221(x: Int) extends Base
+case class Dummy1222(x: Int) extends Base
+case class Dummy1223(x: Int) extends Base
+case class Dummy1224(x: Int) extends Base
+case class Dummy1225(x: Int) extends Base
+case class Dummy1226(x: Int) extends Base
+case class Dummy1227(x: Int) extends Base
+case class Dummy1228(x: Int) extends Base
+case class Dummy1229(x: Int) extends Base
+case class Dummy1230(x: Int) extends Base
+case class Dummy1231(x: Int) extends Base
+case class Dummy1232(x: Int) extends Base
+case class Dummy1233(x: Int) extends Base
+case class Dummy1234(x: Int) extends Base
+case class Dummy1235(x: Int) extends Base
+case class Dummy1236(x: Int) extends Base
+case class Dummy1237(x: Int) extends Base
+case class Dummy1238(x: Int) extends Base
+case class Dummy1239(x: Int) extends Base
+case class Dummy1240(x: Int) extends Base
+case class Dummy1241(x: Int) extends Base
+case class Dummy1242(x: Int) extends Base
+case class Dummy1243(x: Int) extends Base
+case class Dummy1244(x: Int) extends Base
+case class Dummy1245(x: Int) extends Base
+case class Dummy1246(x: Int) extends Base
+case class Dummy1247(x: Int) extends Base
+case class Dummy1248(x: Int) extends Base
+case class Dummy1249(x: Int) extends Base
+case class Dummy1250(x: Int) extends Base
+case class Dummy1251(x: Int) extends Base
+case class Dummy1252(x: Int) extends Base
+case class Dummy1253(x: Int) extends Base
+case class Dummy1254(x: Int) extends Base
+case class Dummy1255(x: Int) extends Base
+case class Dummy1256(x: Int) extends Base
+case class Dummy1257(x: Int) extends Base
+case class Dummy1258(x: Int) extends Base
+case class Dummy1259(x: Int) extends Base
+case class Dummy1260(x: Int) extends Base
+case class Dummy1261(x: Int) extends Base
+case class Dummy1262(x: Int) extends Base
+case class Dummy1263(x: Int) extends Base
+case class Dummy1264(x: Int) extends Base
+case class Dummy1265(x: Int) extends Base
+case class Dummy1266(x: Int) extends Base
+case class Dummy1267(x: Int) extends Base
+case class Dummy1268(x: Int) extends Base
+case class Dummy1269(x: Int) extends Base
+case class Dummy1270(x: Int) extends Base
+case class Dummy1271(x: Int) extends Base
+case class Dummy1272(x: Int) extends Base
+case class Dummy1273(x: Int) extends Base
+case class Dummy1274(x: Int) extends Base
+case class Dummy1275(x: Int) extends Base
+case class Dummy1276(x: Int) extends Base
+case class Dummy1277(x: Int) extends Base
+case class Dummy1278(x: Int) extends Base
+case class Dummy1279(x: Int) extends Base
+case class Dummy1280(x: Int) extends Base
+case class Dummy1281(x: Int) extends Base
+case class Dummy1282(x: Int) extends Base
+case class Dummy1283(x: Int) extends Base
+case class Dummy1284(x: Int) extends Base
+case class Dummy1285(x: Int) extends Base
+case class Dummy1286(x: Int) extends Base
+case class Dummy1287(x: Int) extends Base
+case class Dummy1288(x: Int) extends Base
+case class Dummy1289(x: Int) extends Base
+case class Dummy1290(x: Int) extends Base
+case class Dummy1291(x: Int) extends Base
+case class Dummy1292(x: Int) extends Base
+case class Dummy1293(x: Int) extends Base
+case class Dummy1294(x: Int) extends Base
+case class Dummy1295(x: Int) extends Base
+case class Dummy1296(x: Int) extends Base
+case class Dummy1297(x: Int) extends Base
+case class Dummy1298(x: Int) extends Base
+case class Dummy1299(x: Int) extends Base
+case class Dummy1300(x: Int) extends Base
+case class Dummy1301(x: Int) extends Base
+case class Dummy1302(x: Int) extends Base
+case class Dummy1303(x: Int) extends Base
+case class Dummy1304(x: Int) extends Base
+case class Dummy1305(x: Int) extends Base
+case class Dummy1306(x: Int) extends Base
+case class Dummy1307(x: Int) extends Base
+case class Dummy1308(x: Int) extends Base
+case class Dummy1309(x: Int) extends Base
+case class Dummy1310(x: Int) extends Base
+case class Dummy1311(x: Int) extends Base
+case class Dummy1312(x: Int) extends Base
+case class Dummy1313(x: Int) extends Base
+case class Dummy1314(x: Int) extends Base
+case class Dummy1315(x: Int) extends Base
+case class Dummy1316(x: Int) extends Base
+case class Dummy1317(x: Int) extends Base
+case class Dummy1318(x: Int) extends Base
+case class Dummy1319(x: Int) extends Base
+case class Dummy1320(x: Int) extends Base
+case class Dummy1321(x: Int) extends Base
+case class Dummy1322(x: Int) extends Base
+case class Dummy1323(x: Int) extends Base
+case class Dummy1324(x: Int) extends Base
+case class Dummy1325(x: Int) extends Base
+case class Dummy1326(x: Int) extends Base
+case class Dummy1327(x: Int) extends Base
+case class Dummy1328(x: Int) extends Base
+case class Dummy1329(x: Int) extends Base
+case class Dummy1330(x: Int) extends Base
+case class Dummy1331(x: Int) extends Base
+case class Dummy1332(x: Int) extends Base
+case class Dummy1333(x: Int) extends Base
+case class Dummy1334(x: Int) extends Base
+case class Dummy1335(x: Int) extends Base
+case class Dummy1336(x: Int) extends Base
+case class Dummy1337(x: Int) extends Base
+case class Dummy1338(x: Int) extends Base
+case class Dummy1339(x: Int) extends Base
+case class Dummy1340(x: Int) extends Base
+case class Dummy1341(x: Int) extends Base
+case class Dummy1342(x: Int) extends Base
+case class Dummy1343(x: Int) extends Base
+case class Dummy1344(x: Int) extends Base
+case class Dummy1345(x: Int) extends Base
+case class Dummy1346(x: Int) extends Base
+case class Dummy1347(x: Int) extends Base
+case class Dummy1348(x: Int) extends Base
+case class Dummy1349(x: Int) extends Base
+case class Dummy1350(x: Int) extends Base
+case class Dummy1351(x: Int) extends Base
+case class Dummy1352(x: Int) extends Base
+case class Dummy1353(x: Int) extends Base
+case class Dummy1354(x: Int) extends Base
+case class Dummy1355(x: Int) extends Base
+case class Dummy1356(x: Int) extends Base
+case class Dummy1357(x: Int) extends Base
+case class Dummy1358(x: Int) extends Base
+case class Dummy1359(x: Int) extends Base
+case class Dummy1360(x: Int) extends Base
+case class Dummy1361(x: Int) extends Base
+case class Dummy1362(x: Int) extends Base
+case class Dummy1363(x: Int) extends Base
+case class Dummy1364(x: Int) extends Base
+case class Dummy1365(x: Int) extends Base
+case class Dummy1366(x: Int) extends Base
+case class Dummy1367(x: Int) extends Base
+case class Dummy1368(x: Int) extends Base
+case class Dummy1369(x: Int) extends Base
+case class Dummy1370(x: Int) extends Base
+case class Dummy1371(x: Int) extends Base
+case class Dummy1372(x: Int) extends Base
+case class Dummy1373(x: Int) extends Base
+case class Dummy1374(x: Int) extends Base
+case class Dummy1375(x: Int) extends Base
+case class Dummy1376(x: Int) extends Base
+case class Dummy1377(x: Int) extends Base
+case class Dummy1378(x: Int) extends Base
+case class Dummy1379(x: Int) extends Base
+case class Dummy1380(x: Int) extends Base
+case class Dummy1381(x: Int) extends Base
+case class Dummy1382(x: Int) extends Base
+case class Dummy1383(x: Int) extends Base
+case class Dummy1384(x: Int) extends Base
+case class Dummy1385(x: Int) extends Base
+case class Dummy1386(x: Int) extends Base
+case class Dummy1387(x: Int) extends Base
+case class Dummy1388(x: Int) extends Base
+case class Dummy1389(x: Int) extends Base
+case class Dummy1390(x: Int) extends Base
+case class Dummy1391(x: Int) extends Base
+case class Dummy1392(x: Int) extends Base
+case class Dummy1393(x: Int) extends Base
+case class Dummy1394(x: Int) extends Base
+case class Dummy1395(x: Int) extends Base
+case class Dummy1396(x: Int) extends Base
+case class Dummy1397(x: Int) extends Base
+case class Dummy1398(x: Int) extends Base
+case class Dummy1399(x: Int) extends Base
+case class Dummy1400(x: Int) extends Base
+case class Dummy1401(x: Int) extends Base
+case class Dummy1402(x: Int) extends Base
+case class Dummy1403(x: Int) extends Base
+case class Dummy1404(x: Int) extends Base
+case class Dummy1405(x: Int) extends Base
+case class Dummy1406(x: Int) extends Base
+case class Dummy1407(x: Int) extends Base
+case class Dummy1408(x: Int) extends Base
+case class Dummy1409(x: Int) extends Base
+case class Dummy1410(x: Int) extends Base
+case class Dummy1411(x: Int) extends Base
+case class Dummy1412(x: Int) extends Base
+case class Dummy1413(x: Int) extends Base
+case class Dummy1414(x: Int) extends Base
+case class Dummy1415(x: Int) extends Base
+case class Dummy1416(x: Int) extends Base
+case class Dummy1417(x: Int) extends Base
+case class Dummy1418(x: Int) extends Base
+case class Dummy1419(x: Int) extends Base
+case class Dummy1420(x: Int) extends Base
+case class Dummy1421(x: Int) extends Base
+case class Dummy1422(x: Int) extends Base
+case class Dummy1423(x: Int) extends Base
+case class Dummy1424(x: Int) extends Base
+case class Dummy1425(x: Int) extends Base
+case class Dummy1426(x: Int) extends Base
+case class Dummy1427(x: Int) extends Base
+case class Dummy1428(x: Int) extends Base
+case class Dummy1429(x: Int) extends Base
+case class Dummy1430(x: Int) extends Base
+case class Dummy1431(x: Int) extends Base
+case class Dummy1432(x: Int) extends Base
+case class Dummy1433(x: Int) extends Base
+case class Dummy1434(x: Int) extends Base
+case class Dummy1435(x: Int) extends Base
+case class Dummy1436(x: Int) extends Base
+case class Dummy1437(x: Int) extends Base
+case class Dummy1438(x: Int) extends Base
+case class Dummy1439(x: Int) extends Base
+case class Dummy1440(x: Int) extends Base
+case class Dummy1441(x: Int) extends Base
+case class Dummy1442(x: Int) extends Base
+case class Dummy1443(x: Int) extends Base
+case class Dummy1444(x: Int) extends Base
+case class Dummy1445(x: Int) extends Base
+case class Dummy1446(x: Int) extends Base
+case class Dummy1447(x: Int) extends Base
+case class Dummy1448(x: Int) extends Base
+case class Dummy1449(x: Int) extends Base
+case class Dummy1450(x: Int) extends Base
+case class Dummy1451(x: Int) extends Base
+case class Dummy1452(x: Int) extends Base
+case class Dummy1453(x: Int) extends Base
+case class Dummy1454(x: Int) extends Base
+case class Dummy1455(x: Int) extends Base
+case class Dummy1456(x: Int) extends Base
+case class Dummy1457(x: Int) extends Base
+case class Dummy1458(x: Int) extends Base
+case class Dummy1459(x: Int) extends Base
+case class Dummy1460(x: Int) extends Base
+case class Dummy1461(x: Int) extends Base
+case class Dummy1462(x: Int) extends Base
+case class Dummy1463(x: Int) extends Base
+case class Dummy1464(x: Int) extends Base
+case class Dummy1465(x: Int) extends Base
+case class Dummy1466(x: Int) extends Base
+case class Dummy1467(x: Int) extends Base
+case class Dummy1468(x: Int) extends Base
+case class Dummy1469(x: Int) extends Base
+case class Dummy1470(x: Int) extends Base
+case class Dummy1471(x: Int) extends Base
+case class Dummy1472(x: Int) extends Base
+case class Dummy1473(x: Int) extends Base
+case class Dummy1474(x: Int) extends Base
+case class Dummy1475(x: Int) extends Base
+case class Dummy1476(x: Int) extends Base
+case class Dummy1477(x: Int) extends Base
+case class Dummy1478(x: Int) extends Base
+case class Dummy1479(x: Int) extends Base
+case class Dummy1480(x: Int) extends Base
+case class Dummy1481(x: Int) extends Base
+case class Dummy1482(x: Int) extends Base
+case class Dummy1483(x: Int) extends Base
+case class Dummy1484(x: Int) extends Base
+case class Dummy1485(x: Int) extends Base
+case class Dummy1486(x: Int) extends Base
+case class Dummy1487(x: Int) extends Base
+case class Dummy1488(x: Int) extends Base
+case class Dummy1489(x: Int) extends Base
+case class Dummy1490(x: Int) extends Base
+case class Dummy1491(x: Int) extends Base
+case class Dummy1492(x: Int) extends Base
+case class Dummy1493(x: Int) extends Base
+case class Dummy1494(x: Int) extends Base
+case class Dummy1495(x: Int) extends Base
+case class Dummy1496(x: Int) extends Base
+case class Dummy1497(x: Int) extends Base
+case class Dummy1498(x: Int) extends Base
+case class Dummy1499(x: Int) extends Base
+case class Dummy1500(x: Int) extends Base
+case class Dummy1501(x: Int) extends Base
+case class Dummy1502(x: Int) extends Base
+case class Dummy1503(x: Int) extends Base
+case class Dummy1504(x: Int) extends Base
+case class Dummy1505(x: Int) extends Base
+case class Dummy1506(x: Int) extends Base
+case class Dummy1507(x: Int) extends Base
+case class Dummy1508(x: Int) extends Base
+case class Dummy1509(x: Int) extends Base
+case class Dummy1510(x: Int) extends Base
+case class Dummy1511(x: Int) extends Base
+case class Dummy1512(x: Int) extends Base
+case class Dummy1513(x: Int) extends Base
+case class Dummy1514(x: Int) extends Base
+case class Dummy1515(x: Int) extends Base
+case class Dummy1516(x: Int) extends Base
+case class Dummy1517(x: Int) extends Base
+case class Dummy1518(x: Int) extends Base
+case class Dummy1519(x: Int) extends Base
+case class Dummy1520(x: Int) extends Base
+case class Dummy1521(x: Int) extends Base
+case class Dummy1522(x: Int) extends Base
+case class Dummy1523(x: Int) extends Base
+case class Dummy1524(x: Int) extends Base
+case class Dummy1525(x: Int) extends Base
+case class Dummy1526(x: Int) extends Base
+case class Dummy1527(x: Int) extends Base
+case class Dummy1528(x: Int) extends Base
+case class Dummy1529(x: Int) extends Base
+case class Dummy1530(x: Int) extends Base
+case class Dummy1531(x: Int) extends Base
+case class Dummy1532(x: Int) extends Base
+case class Dummy1533(x: Int) extends Base
+case class Dummy1534(x: Int) extends Base
+case class Dummy1535(x: Int) extends Base
+case class Dummy1536(x: Int) extends Base
+case class Dummy1537(x: Int) extends Base
+case class Dummy1538(x: Int) extends Base
+case class Dummy1539(x: Int) extends Base
+case class Dummy1540(x: Int) extends Base
+case class Dummy1541(x: Int) extends Base
+case class Dummy1542(x: Int) extends Base
+case class Dummy1543(x: Int) extends Base
+case class Dummy1544(x: Int) extends Base
+case class Dummy1545(x: Int) extends Base
+case class Dummy1546(x: Int) extends Base
+case class Dummy1547(x: Int) extends Base
+case class Dummy1548(x: Int) extends Base
+case class Dummy1549(x: Int) extends Base
+case class Dummy1550(x: Int) extends Base
+case class Dummy1551(x: Int) extends Base
+case class Dummy1552(x: Int) extends Base
+case class Dummy1553(x: Int) extends Base
+case class Dummy1554(x: Int) extends Base
+case class Dummy1555(x: Int) extends Base
+case class Dummy1556(x: Int) extends Base
+case class Dummy1557(x: Int) extends Base
+case class Dummy1558(x: Int) extends Base
+case class Dummy1559(x: Int) extends Base
+case class Dummy1560(x: Int) extends Base
+case class Dummy1561(x: Int) extends Base
+case class Dummy1562(x: Int) extends Base
+case class Dummy1563(x: Int) extends Base
+case class Dummy1564(x: Int) extends Base
+case class Dummy1565(x: Int) extends Base
+case class Dummy1566(x: Int) extends Base
+case class Dummy1567(x: Int) extends Base
+case class Dummy1568(x: Int) extends Base
+case class Dummy1569(x: Int) extends Base
+case class Dummy1570(x: Int) extends Base
+case class Dummy1571(x: Int) extends Base
+case class Dummy1572(x: Int) extends Base
+case class Dummy1573(x: Int) extends Base
+case class Dummy1574(x: Int) extends Base
+case class Dummy1575(x: Int) extends Base
+case class Dummy1576(x: Int) extends Base
+case class Dummy1577(x: Int) extends Base
+case class Dummy1578(x: Int) extends Base
+case class Dummy1579(x: Int) extends Base
+case class Dummy1580(x: Int) extends Base
+case class Dummy1581(x: Int) extends Base
+case class Dummy1582(x: Int) extends Base
+case class Dummy1583(x: Int) extends Base
+case class Dummy1584(x: Int) extends Base
+case class Dummy1585(x: Int) extends Base
+case class Dummy1586(x: Int) extends Base
+case class Dummy1587(x: Int) extends Base
+case class Dummy1588(x: Int) extends Base
+case class Dummy1589(x: Int) extends Base
+case class Dummy1590(x: Int) extends Base
+case class Dummy1591(x: Int) extends Base
+case class Dummy1592(x: Int) extends Base
+case class Dummy1593(x: Int) extends Base
+case class Dummy1594(x: Int) extends Base
+case class Dummy1595(x: Int) extends Base
+case class Dummy1596(x: Int) extends Base
+case class Dummy1597(x: Int) extends Base
+case class Dummy1598(x: Int) extends Base
+case class Dummy1599(x: Int) extends Base
+case class Dummy1600(x: Int) extends Base
+case class Dummy1601(x: Int) extends Base
+case class Dummy1602(x: Int) extends Base
+case class Dummy1603(x: Int) extends Base
+case class Dummy1604(x: Int) extends Base
+case class Dummy1605(x: Int) extends Base
+case class Dummy1606(x: Int) extends Base
+case class Dummy1607(x: Int) extends Base
+case class Dummy1608(x: Int) extends Base
+case class Dummy1609(x: Int) extends Base
+case class Dummy1610(x: Int) extends Base
+case class Dummy1611(x: Int) extends Base
+case class Dummy1612(x: Int) extends Base
+case class Dummy1613(x: Int) extends Base
+case class Dummy1614(x: Int) extends Base
+case class Dummy1615(x: Int) extends Base
+case class Dummy1616(x: Int) extends Base
+case class Dummy1617(x: Int) extends Base
+case class Dummy1618(x: Int) extends Base
+case class Dummy1619(x: Int) extends Base
+case class Dummy1620(x: Int) extends Base
+case class Dummy1621(x: Int) extends Base
+case class Dummy1622(x: Int) extends Base
+case class Dummy1623(x: Int) extends Base
+case class Dummy1624(x: Int) extends Base
+case class Dummy1625(x: Int) extends Base
+case class Dummy1626(x: Int) extends Base
+case class Dummy1627(x: Int) extends Base
+case class Dummy1628(x: Int) extends Base
+case class Dummy1629(x: Int) extends Base
+case class Dummy1630(x: Int) extends Base
+case class Dummy1631(x: Int) extends Base
+case class Dummy1632(x: Int) extends Base
+case class Dummy1633(x: Int) extends Base
+case class Dummy1634(x: Int) extends Base
+case class Dummy1635(x: Int) extends Base
+case class Dummy1636(x: Int) extends Base
+case class Dummy1637(x: Int) extends Base
+case class Dummy1638(x: Int) extends Base
+case class Dummy1639(x: Int) extends Base
+case class Dummy1640(x: Int) extends Base
+case class Dummy1641(x: Int) extends Base
+case class Dummy1642(x: Int) extends Base
+case class Dummy1643(x: Int) extends Base
+case class Dummy1644(x: Int) extends Base
+case class Dummy1645(x: Int) extends Base
+case class Dummy1646(x: Int) extends Base
+case class Dummy1647(x: Int) extends Base
+case class Dummy1648(x: Int) extends Base
+case class Dummy1649(x: Int) extends Base
+case class Dummy1650(x: Int) extends Base
+case class Dummy1651(x: Int) extends Base
+case class Dummy1652(x: Int) extends Base
+case class Dummy1653(x: Int) extends Base
+case class Dummy1654(x: Int) extends Base
+case class Dummy1655(x: Int) extends Base
+case class Dummy1656(x: Int) extends Base
+case class Dummy1657(x: Int) extends Base
+case class Dummy1658(x: Int) extends Base
+case class Dummy1659(x: Int) extends Base
+case class Dummy1660(x: Int) extends Base
+case class Dummy1661(x: Int) extends Base
+case class Dummy1662(x: Int) extends Base
+case class Dummy1663(x: Int) extends Base
+case class Dummy1664(x: Int) extends Base
+case class Dummy1665(x: Int) extends Base
+case class Dummy1666(x: Int) extends Base
+case class Dummy1667(x: Int) extends Base
+case class Dummy1668(x: Int) extends Base
+case class Dummy1669(x: Int) extends Base
+case class Dummy1670(x: Int) extends Base
+case class Dummy1671(x: Int) extends Base
+case class Dummy1672(x: Int) extends Base
+case class Dummy1673(x: Int) extends Base
+case class Dummy1674(x: Int) extends Base
+case class Dummy1675(x: Int) extends Base
+case class Dummy1676(x: Int) extends Base
+case class Dummy1677(x: Int) extends Base
+case class Dummy1678(x: Int) extends Base
+case class Dummy1679(x: Int) extends Base
+case class Dummy1680(x: Int) extends Base
+case class Dummy1681(x: Int) extends Base
+case class Dummy1682(x: Int) extends Base
+case class Dummy1683(x: Int) extends Base
+case class Dummy1684(x: Int) extends Base
+case class Dummy1685(x: Int) extends Base
+case class Dummy1686(x: Int) extends Base
+case class Dummy1687(x: Int) extends Base
+case class Dummy1688(x: Int) extends Base
+case class Dummy1689(x: Int) extends Base
+case class Dummy1690(x: Int) extends Base
+case class Dummy1691(x: Int) extends Base
+case class Dummy1692(x: Int) extends Base
+case class Dummy1693(x: Int) extends Base
+case class Dummy1694(x: Int) extends Base
+case class Dummy1695(x: Int) extends Base
+case class Dummy1696(x: Int) extends Base
+case class Dummy1697(x: Int) extends Base
+case class Dummy1698(x: Int) extends Base
+case class Dummy1699(x: Int) extends Base
+case class Dummy1700(x: Int) extends Base
+case class Dummy1701(x: Int) extends Base
+case class Dummy1702(x: Int) extends Base
+case class Dummy1703(x: Int) extends Base
+case class Dummy1704(x: Int) extends Base
+case class Dummy1705(x: Int) extends Base
+case class Dummy1706(x: Int) extends Base
+case class Dummy1707(x: Int) extends Base
+case class Dummy1708(x: Int) extends Base
+case class Dummy1709(x: Int) extends Base
+case class Dummy1710(x: Int) extends Base
+case class Dummy1711(x: Int) extends Base
+case class Dummy1712(x: Int) extends Base
+case class Dummy1713(x: Int) extends Base
+case class Dummy1714(x: Int) extends Base
+case class Dummy1715(x: Int) extends Base
+case class Dummy1716(x: Int) extends Base
+case class Dummy1717(x: Int) extends Base
+case class Dummy1718(x: Int) extends Base
+case class Dummy1719(x: Int) extends Base
+case class Dummy1720(x: Int) extends Base
+case class Dummy1721(x: Int) extends Base
+case class Dummy1722(x: Int) extends Base
+case class Dummy1723(x: Int) extends Base
+case class Dummy1724(x: Int) extends Base
+case class Dummy1725(x: Int) extends Base
+case class Dummy1726(x: Int) extends Base
+case class Dummy1727(x: Int) extends Base
+case class Dummy1728(x: Int) extends Base
+case class Dummy1729(x: Int) extends Base
+case class Dummy1730(x: Int) extends Base
+case class Dummy1731(x: Int) extends Base
+case class Dummy1732(x: Int) extends Base
+case class Dummy1733(x: Int) extends Base
+case class Dummy1734(x: Int) extends Base
+case class Dummy1735(x: Int) extends Base
+case class Dummy1736(x: Int) extends Base
+case class Dummy1737(x: Int) extends Base
+case class Dummy1738(x: Int) extends Base
+case class Dummy1739(x: Int) extends Base
+case class Dummy1740(x: Int) extends Base
+case class Dummy1741(x: Int) extends Base
+case class Dummy1742(x: Int) extends Base
+case class Dummy1743(x: Int) extends Base
+case class Dummy1744(x: Int) extends Base
+case class Dummy1745(x: Int) extends Base
+case class Dummy1746(x: Int) extends Base
+case class Dummy1747(x: Int) extends Base
+case class Dummy1748(x: Int) extends Base
+case class Dummy1749(x: Int) extends Base
+case class Dummy1750(x: Int) extends Base
+case class Dummy1751(x: Int) extends Base
+case class Dummy1752(x: Int) extends Base
+case class Dummy1753(x: Int) extends Base
+case class Dummy1754(x: Int) extends Base
+case class Dummy1755(x: Int) extends Base
+case class Dummy1756(x: Int) extends Base
+case class Dummy1757(x: Int) extends Base
+case class Dummy1758(x: Int) extends Base
+case class Dummy1759(x: Int) extends Base
+case class Dummy1760(x: Int) extends Base
+case class Dummy1761(x: Int) extends Base
+case class Dummy1762(x: Int) extends Base
+case class Dummy1763(x: Int) extends Base
+case class Dummy1764(x: Int) extends Base
+case class Dummy1765(x: Int) extends Base
+case class Dummy1766(x: Int) extends Base
+case class Dummy1767(x: Int) extends Base
+case class Dummy1768(x: Int) extends Base
+case class Dummy1769(x: Int) extends Base
+case class Dummy1770(x: Int) extends Base
+case class Dummy1771(x: Int) extends Base
+case class Dummy1772(x: Int) extends Base
+case class Dummy1773(x: Int) extends Base
+case class Dummy1774(x: Int) extends Base
+case class Dummy1775(x: Int) extends Base
+case class Dummy1776(x: Int) extends Base
+case class Dummy1777(x: Int) extends Base
+case class Dummy1778(x: Int) extends Base
+case class Dummy1779(x: Int) extends Base
+case class Dummy1780(x: Int) extends Base
+case class Dummy1781(x: Int) extends Base
+case class Dummy1782(x: Int) extends Base
+case class Dummy1783(x: Int) extends Base
+case class Dummy1784(x: Int) extends Base
+case class Dummy1785(x: Int) extends Base
+case class Dummy1786(x: Int) extends Base
+case class Dummy1787(x: Int) extends Base
+case class Dummy1788(x: Int) extends Base
+case class Dummy1789(x: Int) extends Base
+case class Dummy1790(x: Int) extends Base
+case class Dummy1791(x: Int) extends Base
+case class Dummy1792(x: Int) extends Base
+case class Dummy1793(x: Int) extends Base
+case class Dummy1794(x: Int) extends Base
+case class Dummy1795(x: Int) extends Base
+case class Dummy1796(x: Int) extends Base
+case class Dummy1797(x: Int) extends Base
+case class Dummy1798(x: Int) extends Base
+case class Dummy1799(x: Int) extends Base
+case class Dummy1800(x: Int) extends Base
+case class Dummy1801(x: Int) extends Base
+case class Dummy1802(x: Int) extends Base
+case class Dummy1803(x: Int) extends Base
+case class Dummy1804(x: Int) extends Base
+case class Dummy1805(x: Int) extends Base
+case class Dummy1806(x: Int) extends Base
+case class Dummy1807(x: Int) extends Base
+case class Dummy1808(x: Int) extends Base
+case class Dummy1809(x: Int) extends Base
+case class Dummy1810(x: Int) extends Base
+case class Dummy1811(x: Int) extends Base
+case class Dummy1812(x: Int) extends Base
+case class Dummy1813(x: Int) extends Base
+case class Dummy1814(x: Int) extends Base
+case class Dummy1815(x: Int) extends Base
+case class Dummy1816(x: Int) extends Base
+case class Dummy1817(x: Int) extends Base
+case class Dummy1818(x: Int) extends Base
+case class Dummy1819(x: Int) extends Base
+case class Dummy1820(x: Int) extends Base
+case class Dummy1821(x: Int) extends Base
+case class Dummy1822(x: Int) extends Base
+case class Dummy1823(x: Int) extends Base
+case class Dummy1824(x: Int) extends Base
+case class Dummy1825(x: Int) extends Base
+case class Dummy1826(x: Int) extends Base
+case class Dummy1827(x: Int) extends Base
+case class Dummy1828(x: Int) extends Base
+case class Dummy1829(x: Int) extends Base
+case class Dummy1830(x: Int) extends Base
+case class Dummy1831(x: Int) extends Base
+case class Dummy1832(x: Int) extends Base
+case class Dummy1833(x: Int) extends Base
+case class Dummy1834(x: Int) extends Base
+case class Dummy1835(x: Int) extends Base
+case class Dummy1836(x: Int) extends Base
+case class Dummy1837(x: Int) extends Base
+case class Dummy1838(x: Int) extends Base
+case class Dummy1839(x: Int) extends Base
+case class Dummy1840(x: Int) extends Base
+case class Dummy1841(x: Int) extends Base
+case class Dummy1842(x: Int) extends Base
+case class Dummy1843(x: Int) extends Base
+case class Dummy1844(x: Int) extends Base
+case class Dummy1845(x: Int) extends Base
+case class Dummy1846(x: Int) extends Base
+case class Dummy1847(x: Int) extends Base
+case class Dummy1848(x: Int) extends Base
+case class Dummy1849(x: Int) extends Base
+case class Dummy1850(x: Int) extends Base
+case class Dummy1851(x: Int) extends Base
+case class Dummy1852(x: Int) extends Base
+case class Dummy1853(x: Int) extends Base
+case class Dummy1854(x: Int) extends Base
+case class Dummy1855(x: Int) extends Base
+case class Dummy1856(x: Int) extends Base
+case class Dummy1857(x: Int) extends Base
+case class Dummy1858(x: Int) extends Base
+case class Dummy1859(x: Int) extends Base
+case class Dummy1860(x: Int) extends Base
+case class Dummy1861(x: Int) extends Base
+case class Dummy1862(x: Int) extends Base
+case class Dummy1863(x: Int) extends Base
+case class Dummy1864(x: Int) extends Base
+case class Dummy1865(x: Int) extends Base
+case class Dummy1866(x: Int) extends Base
+case class Dummy1867(x: Int) extends Base
+case class Dummy1868(x: Int) extends Base
+case class Dummy1869(x: Int) extends Base
+case class Dummy1870(x: Int) extends Base
+case class Dummy1871(x: Int) extends Base
+case class Dummy1872(x: Int) extends Base
+case class Dummy1873(x: Int) extends Base
+case class Dummy1874(x: Int) extends Base
+case class Dummy1875(x: Int) extends Base
+case class Dummy1876(x: Int) extends Base
+case class Dummy1877(x: Int) extends Base
+case class Dummy1878(x: Int) extends Base
+case class Dummy1879(x: Int) extends Base
+case class Dummy1880(x: Int) extends Base
+case class Dummy1881(x: Int) extends Base
+case class Dummy1882(x: Int) extends Base
+case class Dummy1883(x: Int) extends Base
+case class Dummy1884(x: Int) extends Base
+case class Dummy1885(x: Int) extends Base
+case class Dummy1886(x: Int) extends Base
+case class Dummy1887(x: Int) extends Base
+case class Dummy1888(x: Int) extends Base
+case class Dummy1889(x: Int) extends Base
+case class Dummy1890(x: Int) extends Base
+case class Dummy1891(x: Int) extends Base
+case class Dummy1892(x: Int) extends Base
+case class Dummy1893(x: Int) extends Base
+case class Dummy1894(x: Int) extends Base
+case class Dummy1895(x: Int) extends Base
+case class Dummy1896(x: Int) extends Base
+case class Dummy1897(x: Int) extends Base
+case class Dummy1898(x: Int) extends Base
+case class Dummy1899(x: Int) extends Base
+case class Dummy1900(x: Int) extends Base
+case class Dummy1901(x: Int) extends Base
+case class Dummy1902(x: Int) extends Base
+case class Dummy1903(x: Int) extends Base
+case class Dummy1904(x: Int) extends Base
+case class Dummy1905(x: Int) extends Base
+case class Dummy1906(x: Int) extends Base
+case class Dummy1907(x: Int) extends Base
+case class Dummy1908(x: Int) extends Base
+case class Dummy1909(x: Int) extends Base
+case class Dummy1910(x: Int) extends Base
+case class Dummy1911(x: Int) extends Base
+case class Dummy1912(x: Int) extends Base
+case class Dummy1913(x: Int) extends Base
+case class Dummy1914(x: Int) extends Base
+case class Dummy1915(x: Int) extends Base
+case class Dummy1916(x: Int) extends Base
+case class Dummy1917(x: Int) extends Base
+case class Dummy1918(x: Int) extends Base
+case class Dummy1919(x: Int) extends Base
+case class Dummy1920(x: Int) extends Base
+case class Dummy1921(x: Int) extends Base
+case class Dummy1922(x: Int) extends Base
+case class Dummy1923(x: Int) extends Base
+case class Dummy1924(x: Int) extends Base
+case class Dummy1925(x: Int) extends Base
+case class Dummy1926(x: Int) extends Base
+case class Dummy1927(x: Int) extends Base
+case class Dummy1928(x: Int) extends Base
+case class Dummy1929(x: Int) extends Base
+case class Dummy1930(x: Int) extends Base
+case class Dummy1931(x: Int) extends Base
+case class Dummy1932(x: Int) extends Base
+case class Dummy1933(x: Int) extends Base
+case class Dummy1934(x: Int) extends Base
+case class Dummy1935(x: Int) extends Base
+case class Dummy1936(x: Int) extends Base
+case class Dummy1937(x: Int) extends Base
+case class Dummy1938(x: Int) extends Base
+case class Dummy1939(x: Int) extends Base
+case class Dummy1940(x: Int) extends Base
+case class Dummy1941(x: Int) extends Base
+case class Dummy1942(x: Int) extends Base
+case class Dummy1943(x: Int) extends Base
+case class Dummy1944(x: Int) extends Base
+case class Dummy1945(x: Int) extends Base
+case class Dummy1946(x: Int) extends Base
+case class Dummy1947(x: Int) extends Base
+case class Dummy1948(x: Int) extends Base
+case class Dummy1949(x: Int) extends Base
+case class Dummy1950(x: Int) extends Base
+case class Dummy1951(x: Int) extends Base
+case class Dummy1952(x: Int) extends Base
+case class Dummy1953(x: Int) extends Base
+case class Dummy1954(x: Int) extends Base
+case class Dummy1955(x: Int) extends Base
+case class Dummy1956(x: Int) extends Base
+case class Dummy1957(x: Int) extends Base
+case class Dummy1958(x: Int) extends Base
+case class Dummy1959(x: Int) extends Base
+case class Dummy1960(x: Int) extends Base
+case class Dummy1961(x: Int) extends Base
+case class Dummy1962(x: Int) extends Base
+case class Dummy1963(x: Int) extends Base
+case class Dummy1964(x: Int) extends Base
+case class Dummy1965(x: Int) extends Base
+case class Dummy1966(x: Int) extends Base
+case class Dummy1967(x: Int) extends Base
+case class Dummy1968(x: Int) extends Base
+case class Dummy1969(x: Int) extends Base
+case class Dummy1970(x: Int) extends Base
+case class Dummy1971(x: Int) extends Base
+case class Dummy1972(x: Int) extends Base
+case class Dummy1973(x: Int) extends Base
+case class Dummy1974(x: Int) extends Base
+case class Dummy1975(x: Int) extends Base
+case class Dummy1976(x: Int) extends Base
+case class Dummy1977(x: Int) extends Base
+case class Dummy1978(x: Int) extends Base
+case class Dummy1979(x: Int) extends Base
+case class Dummy1980(x: Int) extends Base
+case class Dummy1981(x: Int) extends Base
+case class Dummy1982(x: Int) extends Base
+case class Dummy1983(x: Int) extends Base
+case class Dummy1984(x: Int) extends Base
+case class Dummy1985(x: Int) extends Base
+case class Dummy1986(x: Int) extends Base
+case class Dummy1987(x: Int) extends Base
+case class Dummy1988(x: Int) extends Base
+case class Dummy1989(x: Int) extends Base
+case class Dummy1990(x: Int) extends Base
+case class Dummy1991(x: Int) extends Base
+case class Dummy1992(x: Int) extends Base
+case class Dummy1993(x: Int) extends Base
+case class Dummy1994(x: Int) extends Base
+case class Dummy1995(x: Int) extends Base
+case class Dummy1996(x: Int) extends Base
+case class Dummy1997(x: Int) extends Base
+case class Dummy1998(x: Int) extends Base
+case class Dummy1999(x: Int) extends Base
+def test(y: Base) = y match {
+ case Dummy0(p) => p
+ case Dummy1(p) => p
+ case Dummy2(p) => p
+ case Dummy3(p) => p
+ case Dummy4(p) => p
+ case Dummy5(p) => p
+ case Dummy6(p) => p
+ case Dummy7(p) => p
+ case Dummy8(p) => p
+ case Dummy9(p) => p
+ case Dummy10(p) => p
+ case Dummy11(p) => p
+ case Dummy12(p) => p
+ case Dummy13(p) => p
+ case Dummy14(p) => p
+ case Dummy15(p) => p
+ case Dummy16(p) => p
+ case Dummy17(p) => p
+ case Dummy18(p) => p
+ case Dummy19(p) => p
+ case Dummy20(p) => p
+ case Dummy21(p) => p
+ case Dummy22(p) => p
+ case Dummy23(p) => p
+ case Dummy24(p) => p
+ case Dummy25(p) => p
+ case Dummy26(p) => p
+ case Dummy27(p) => p
+ case Dummy28(p) => p
+ case Dummy29(p) => p
+ case Dummy30(p) => p
+ case Dummy31(p) => p
+ case Dummy32(p) => p
+ case Dummy33(p) => p
+ case Dummy34(p) => p
+ case Dummy35(p) => p
+ case Dummy36(p) => p
+ case Dummy37(p) => p
+ case Dummy38(p) => p
+ case Dummy39(p) => p
+ case Dummy40(p) => p
+ case Dummy41(p) => p
+ case Dummy42(p) => p
+ case Dummy43(p) => p
+ case Dummy44(p) => p
+ case Dummy45(p) => p
+ case Dummy46(p) => p
+ case Dummy47(p) => p
+ case Dummy48(p) => p
+ case Dummy49(p) => p
+ case Dummy50(p) => p
+ case Dummy51(p) => p
+ case Dummy52(p) => p
+ case Dummy53(p) => p
+ case Dummy54(p) => p
+ case Dummy55(p) => p
+ case Dummy56(p) => p
+ case Dummy57(p) => p
+ case Dummy58(p) => p
+ case Dummy59(p) => p
+ case Dummy60(p) => p
+ case Dummy61(p) => p
+ case Dummy62(p) => p
+ case Dummy63(p) => p
+ case Dummy64(p) => p
+ case Dummy65(p) => p
+ case Dummy66(p) => p
+ case Dummy67(p) => p
+ case Dummy68(p) => p
+ case Dummy69(p) => p
+ case Dummy70(p) => p
+ case Dummy71(p) => p
+ case Dummy72(p) => p
+ case Dummy73(p) => p
+ case Dummy74(p) => p
+ case Dummy75(p) => p
+ case Dummy76(p) => p
+ case Dummy77(p) => p
+ case Dummy78(p) => p
+ case Dummy79(p) => p
+ case Dummy80(p) => p
+ case Dummy81(p) => p
+ case Dummy82(p) => p
+ case Dummy83(p) => p
+ case Dummy84(p) => p
+ case Dummy85(p) => p
+ case Dummy86(p) => p
+ case Dummy87(p) => p
+ case Dummy88(p) => p
+ case Dummy89(p) => p
+ case Dummy90(p) => p
+ case Dummy91(p) => p
+ case Dummy92(p) => p
+ case Dummy93(p) => p
+ case Dummy94(p) => p
+ case Dummy95(p) => p
+ case Dummy96(p) => p
+ case Dummy97(p) => p
+ case Dummy98(p) => p
+ case Dummy99(p) => p
+ case Dummy100(p) => p
+ case Dummy101(p) => p
+ case Dummy102(p) => p
+ case Dummy103(p) => p
+ case Dummy104(p) => p
+ case Dummy105(p) => p
+ case Dummy106(p) => p
+ case Dummy107(p) => p
+ case Dummy108(p) => p
+ case Dummy109(p) => p
+ case Dummy110(p) => p
+ case Dummy111(p) => p
+ case Dummy112(p) => p
+ case Dummy113(p) => p
+ case Dummy114(p) => p
+ case Dummy115(p) => p
+ case Dummy116(p) => p
+ case Dummy117(p) => p
+ case Dummy118(p) => p
+ case Dummy119(p) => p
+ case Dummy120(p) => p
+ case Dummy121(p) => p
+ case Dummy122(p) => p
+ case Dummy123(p) => p
+ case Dummy124(p) => p
+ case Dummy125(p) => p
+ case Dummy126(p) => p
+ case Dummy127(p) => p
+ case Dummy128(p) => p
+ case Dummy129(p) => p
+ case Dummy130(p) => p
+ case Dummy131(p) => p
+ case Dummy132(p) => p
+ case Dummy133(p) => p
+ case Dummy134(p) => p
+ case Dummy135(p) => p
+ case Dummy136(p) => p
+ case Dummy137(p) => p
+ case Dummy138(p) => p
+ case Dummy139(p) => p
+ case Dummy140(p) => p
+ case Dummy141(p) => p
+ case Dummy142(p) => p
+ case Dummy143(p) => p
+ case Dummy144(p) => p
+ case Dummy145(p) => p
+ case Dummy146(p) => p
+ case Dummy147(p) => p
+ case Dummy148(p) => p
+ case Dummy149(p) => p
+ case Dummy150(p) => p
+ case Dummy151(p) => p
+ case Dummy152(p) => p
+ case Dummy153(p) => p
+ case Dummy154(p) => p
+ case Dummy155(p) => p
+ case Dummy156(p) => p
+ case Dummy157(p) => p
+ case Dummy158(p) => p
+ case Dummy159(p) => p
+ case Dummy160(p) => p
+ case Dummy161(p) => p
+ case Dummy162(p) => p
+ case Dummy163(p) => p
+ case Dummy164(p) => p
+ case Dummy165(p) => p
+ case Dummy166(p) => p
+ case Dummy167(p) => p
+ case Dummy168(p) => p
+ case Dummy169(p) => p
+ case Dummy170(p) => p
+ case Dummy171(p) => p
+ case Dummy172(p) => p
+ case Dummy173(p) => p
+ case Dummy174(p) => p
+ case Dummy175(p) => p
+ case Dummy176(p) => p
+ case Dummy177(p) => p
+ case Dummy178(p) => p
+ case Dummy179(p) => p
+ case Dummy180(p) => p
+ case Dummy181(p) => p
+ case Dummy182(p) => p
+ case Dummy183(p) => p
+ case Dummy184(p) => p
+ case Dummy185(p) => p
+ case Dummy186(p) => p
+ case Dummy187(p) => p
+ case Dummy188(p) => p
+ case Dummy189(p) => p
+ case Dummy190(p) => p
+ case Dummy191(p) => p
+ case Dummy192(p) => p
+ case Dummy193(p) => p
+ case Dummy194(p) => p
+ case Dummy195(p) => p
+ case Dummy196(p) => p
+ case Dummy197(p) => p
+ case Dummy198(p) => p
+ case Dummy199(p) => p
+ case Dummy200(p) => p
+ case Dummy201(p) => p
+ case Dummy202(p) => p
+ case Dummy203(p) => p
+ case Dummy204(p) => p
+ case Dummy205(p) => p
+ case Dummy206(p) => p
+ case Dummy207(p) => p
+ case Dummy208(p) => p
+ case Dummy209(p) => p
+ case Dummy210(p) => p
+ case Dummy211(p) => p
+ case Dummy212(p) => p
+ case Dummy213(p) => p
+ case Dummy214(p) => p
+ case Dummy215(p) => p
+ case Dummy216(p) => p
+ case Dummy217(p) => p
+ case Dummy218(p) => p
+ case Dummy219(p) => p
+ case Dummy220(p) => p
+ case Dummy221(p) => p
+ case Dummy222(p) => p
+ case Dummy223(p) => p
+ case Dummy224(p) => p
+ case Dummy225(p) => p
+ case Dummy226(p) => p
+ case Dummy227(p) => p
+ case Dummy228(p) => p
+ case Dummy229(p) => p
+ case Dummy230(p) => p
+ case Dummy231(p) => p
+ case Dummy232(p) => p
+ case Dummy233(p) => p
+ case Dummy234(p) => p
+ case Dummy235(p) => p
+ case Dummy236(p) => p
+ case Dummy237(p) => p
+ case Dummy238(p) => p
+ case Dummy239(p) => p
+ case Dummy240(p) => p
+ case Dummy241(p) => p
+ case Dummy242(p) => p
+ case Dummy243(p) => p
+ case Dummy244(p) => p
+ case Dummy245(p) => p
+ case Dummy246(p) => p
+ case Dummy247(p) => p
+ case Dummy248(p) => p
+ case Dummy249(p) => p
+ case Dummy250(p) => p
+ case Dummy251(p) => p
+ case Dummy252(p) => p
+ case Dummy253(p) => p
+ case Dummy254(p) => p
+ case Dummy255(p) => p
+ case Dummy256(p) => p
+ case Dummy257(p) => p
+ case Dummy258(p) => p
+ case Dummy259(p) => p
+ case Dummy260(p) => p
+ case Dummy261(p) => p
+ case Dummy262(p) => p
+ case Dummy263(p) => p
+ case Dummy264(p) => p
+ case Dummy265(p) => p
+ case Dummy266(p) => p
+ case Dummy267(p) => p
+ case Dummy268(p) => p
+ case Dummy269(p) => p
+ case Dummy270(p) => p
+ case Dummy271(p) => p
+ case Dummy272(p) => p
+ case Dummy273(p) => p
+ case Dummy274(p) => p
+ case Dummy275(p) => p
+ case Dummy276(p) => p
+ case Dummy277(p) => p
+ case Dummy278(p) => p
+ case Dummy279(p) => p
+ case Dummy280(p) => p
+ case Dummy281(p) => p
+ case Dummy282(p) => p
+ case Dummy283(p) => p
+ case Dummy284(p) => p
+ case Dummy285(p) => p
+ case Dummy286(p) => p
+ case Dummy287(p) => p
+ case Dummy288(p) => p
+ case Dummy289(p) => p
+ case Dummy290(p) => p
+ case Dummy291(p) => p
+ case Dummy292(p) => p
+ case Dummy293(p) => p
+ case Dummy294(p) => p
+ case Dummy295(p) => p
+ case Dummy296(p) => p
+ case Dummy297(p) => p
+ case Dummy298(p) => p
+ case Dummy299(p) => p
+ case Dummy300(p) => p
+ case Dummy301(p) => p
+ case Dummy302(p) => p
+ case Dummy303(p) => p
+ case Dummy304(p) => p
+ case Dummy305(p) => p
+ case Dummy306(p) => p
+ case Dummy307(p) => p
+ case Dummy308(p) => p
+ case Dummy309(p) => p
+ case Dummy310(p) => p
+ case Dummy311(p) => p
+ case Dummy312(p) => p
+ case Dummy313(p) => p
+ case Dummy314(p) => p
+ case Dummy315(p) => p
+ case Dummy316(p) => p
+ case Dummy317(p) => p
+ case Dummy318(p) => p
+ case Dummy319(p) => p
+ case Dummy320(p) => p
+ case Dummy321(p) => p
+ case Dummy322(p) => p
+ case Dummy323(p) => p
+ case Dummy324(p) => p
+ case Dummy325(p) => p
+ case Dummy326(p) => p
+ case Dummy327(p) => p
+ case Dummy328(p) => p
+ case Dummy329(p) => p
+ case Dummy330(p) => p
+ case Dummy331(p) => p
+ case Dummy332(p) => p
+ case Dummy333(p) => p
+ case Dummy334(p) => p
+ case Dummy335(p) => p
+ case Dummy336(p) => p
+ case Dummy337(p) => p
+ case Dummy338(p) => p
+ case Dummy339(p) => p
+ case Dummy340(p) => p
+ case Dummy341(p) => p
+ case Dummy342(p) => p
+ case Dummy343(p) => p
+ case Dummy344(p) => p
+ case Dummy345(p) => p
+ case Dummy346(p) => p
+ case Dummy347(p) => p
+ case Dummy348(p) => p
+ case Dummy349(p) => p
+ case Dummy350(p) => p
+ case Dummy351(p) => p
+ case Dummy352(p) => p
+ case Dummy353(p) => p
+ case Dummy354(p) => p
+ case Dummy355(p) => p
+ case Dummy356(p) => p
+ case Dummy357(p) => p
+ case Dummy358(p) => p
+ case Dummy359(p) => p
+ case Dummy360(p) => p
+ case Dummy361(p) => p
+ case Dummy362(p) => p
+ case Dummy363(p) => p
+ case Dummy364(p) => p
+ case Dummy365(p) => p
+ case Dummy366(p) => p
+ case Dummy367(p) => p
+ case Dummy368(p) => p
+ case Dummy369(p) => p
+ case Dummy370(p) => p
+ case Dummy371(p) => p
+ case Dummy372(p) => p
+ case Dummy373(p) => p
+ case Dummy374(p) => p
+ case Dummy375(p) => p
+ case Dummy376(p) => p
+ case Dummy377(p) => p
+ case Dummy378(p) => p
+ case Dummy379(p) => p
+ case Dummy380(p) => p
+ case Dummy381(p) => p
+ case Dummy382(p) => p
+ case Dummy383(p) => p
+ case Dummy384(p) => p
+ case Dummy385(p) => p
+ case Dummy386(p) => p
+ case Dummy387(p) => p
+ case Dummy388(p) => p
+ case Dummy389(p) => p
+ case Dummy390(p) => p
+ case Dummy391(p) => p
+ case Dummy392(p) => p
+ case Dummy393(p) => p
+ case Dummy394(p) => p
+ case Dummy395(p) => p
+ case Dummy396(p) => p
+ case Dummy397(p) => p
+ case Dummy398(p) => p
+ case Dummy399(p) => p
+ case Dummy400(p) => p
+ case Dummy401(p) => p
+ case Dummy402(p) => p
+ case Dummy403(p) => p
+ case Dummy404(p) => p
+ case Dummy405(p) => p
+ case Dummy406(p) => p
+ case Dummy407(p) => p
+ case Dummy408(p) => p
+ case Dummy409(p) => p
+ case Dummy410(p) => p
+ case Dummy411(p) => p
+ case Dummy412(p) => p
+ case Dummy413(p) => p
+ case Dummy414(p) => p
+ case Dummy415(p) => p
+ case Dummy416(p) => p
+ case Dummy417(p) => p
+ case Dummy418(p) => p
+ case Dummy419(p) => p
+ case Dummy420(p) => p
+ case Dummy421(p) => p
+ case Dummy422(p) => p
+ case Dummy423(p) => p
+ case Dummy424(p) => p
+ case Dummy425(p) => p
+ case Dummy426(p) => p
+ case Dummy427(p) => p
+ case Dummy428(p) => p
+ case Dummy429(p) => p
+ case Dummy430(p) => p
+ case Dummy431(p) => p
+ case Dummy432(p) => p
+ case Dummy433(p) => p
+ case Dummy434(p) => p
+ case Dummy435(p) => p
+ case Dummy436(p) => p
+ case Dummy437(p) => p
+ case Dummy438(p) => p
+ case Dummy439(p) => p
+ case Dummy440(p) => p
+ case Dummy441(p) => p
+ case Dummy442(p) => p
+ case Dummy443(p) => p
+ case Dummy444(p) => p
+ case Dummy445(p) => p
+ case Dummy446(p) => p
+ case Dummy447(p) => p
+ case Dummy448(p) => p
+ case Dummy449(p) => p
+ case Dummy450(p) => p
+ case Dummy451(p) => p
+ case Dummy452(p) => p
+ case Dummy453(p) => p
+ case Dummy454(p) => p
+ case Dummy455(p) => p
+ case Dummy456(p) => p
+ case Dummy457(p) => p
+ case Dummy458(p) => p
+ case Dummy459(p) => p
+ case Dummy460(p) => p
+ case Dummy461(p) => p
+ case Dummy462(p) => p
+ case Dummy463(p) => p
+ case Dummy464(p) => p
+ case Dummy465(p) => p
+ case Dummy466(p) => p
+ case Dummy467(p) => p
+ case Dummy468(p) => p
+ case Dummy469(p) => p
+ case Dummy470(p) => p
+ case Dummy471(p) => p
+ case Dummy472(p) => p
+ case Dummy473(p) => p
+ case Dummy474(p) => p
+ case Dummy475(p) => p
+ case Dummy476(p) => p
+ case Dummy477(p) => p
+ case Dummy478(p) => p
+ case Dummy479(p) => p
+ case Dummy480(p) => p
+ case Dummy481(p) => p
+ case Dummy482(p) => p
+ case Dummy483(p) => p
+ case Dummy484(p) => p
+ case Dummy485(p) => p
+ case Dummy486(p) => p
+ case Dummy487(p) => p
+ case Dummy488(p) => p
+ case Dummy489(p) => p
+ case Dummy490(p) => p
+ case Dummy491(p) => p
+ case Dummy492(p) => p
+ case Dummy493(p) => p
+ case Dummy494(p) => p
+ case Dummy495(p) => p
+ case Dummy496(p) => p
+ case Dummy497(p) => p
+ case Dummy498(p) => p
+ case Dummy499(p) => p
+ case Dummy500(p) => p
+ case Dummy501(p) => p
+ case Dummy502(p) => p
+ case Dummy503(p) => p
+ case Dummy504(p) => p
+ case Dummy505(p) => p
+ case Dummy506(p) => p
+ case Dummy507(p) => p
+ case Dummy508(p) => p
+ case Dummy509(p) => p
+ case Dummy510(p) => p
+ case Dummy511(p) => p
+ case Dummy512(p) => p
+ case Dummy513(p) => p
+ case Dummy514(p) => p
+ case Dummy515(p) => p
+ case Dummy516(p) => p
+ case Dummy517(p) => p
+ case Dummy518(p) => p
+ case Dummy519(p) => p
+ case Dummy520(p) => p
+ case Dummy521(p) => p
+ case Dummy522(p) => p
+ case Dummy523(p) => p
+ case Dummy524(p) => p
+ case Dummy525(p) => p
+ case Dummy526(p) => p
+ case Dummy527(p) => p
+ case Dummy528(p) => p
+ case Dummy529(p) => p
+ case Dummy530(p) => p
+ case Dummy531(p) => p
+ case Dummy532(p) => p
+ case Dummy533(p) => p
+ case Dummy534(p) => p
+ case Dummy535(p) => p
+ case Dummy536(p) => p
+ case Dummy537(p) => p
+ case Dummy538(p) => p
+ case Dummy539(p) => p
+ case Dummy540(p) => p
+ case Dummy541(p) => p
+ case Dummy542(p) => p
+ case Dummy543(p) => p
+ case Dummy544(p) => p
+ case Dummy545(p) => p
+ case Dummy546(p) => p
+ case Dummy547(p) => p
+ case Dummy548(p) => p
+ case Dummy549(p) => p
+ case Dummy550(p) => p
+ case Dummy551(p) => p
+ case Dummy552(p) => p
+ case Dummy553(p) => p
+ case Dummy554(p) => p
+ case Dummy555(p) => p
+ case Dummy556(p) => p
+ case Dummy557(p) => p
+ case Dummy558(p) => p
+ case Dummy559(p) => p
+ case Dummy560(p) => p
+ case Dummy561(p) => p
+ case Dummy562(p) => p
+ case Dummy563(p) => p
+ case Dummy564(p) => p
+ case Dummy565(p) => p
+ case Dummy566(p) => p
+ case Dummy567(p) => p
+ case Dummy568(p) => p
+ case Dummy569(p) => p
+ case Dummy570(p) => p
+ case Dummy571(p) => p
+ case Dummy572(p) => p
+ case Dummy573(p) => p
+ case Dummy574(p) => p
+ case Dummy575(p) => p
+ case Dummy576(p) => p
+ case Dummy577(p) => p
+ case Dummy578(p) => p
+ case Dummy579(p) => p
+ case Dummy580(p) => p
+ case Dummy581(p) => p
+ case Dummy582(p) => p
+ case Dummy583(p) => p
+ case Dummy584(p) => p
+ case Dummy585(p) => p
+ case Dummy586(p) => p
+ case Dummy587(p) => p
+ case Dummy588(p) => p
+ case Dummy589(p) => p
+ case Dummy590(p) => p
+ case Dummy591(p) => p
+ case Dummy592(p) => p
+ case Dummy593(p) => p
+ case Dummy594(p) => p
+ case Dummy595(p) => p
+ case Dummy596(p) => p
+ case Dummy597(p) => p
+ case Dummy598(p) => p
+ case Dummy599(p) => p
+ case Dummy600(p) => p
+ case Dummy601(p) => p
+ case Dummy602(p) => p
+ case Dummy603(p) => p
+ case Dummy604(p) => p
+ case Dummy605(p) => p
+ case Dummy606(p) => p
+ case Dummy607(p) => p
+ case Dummy608(p) => p
+ case Dummy609(p) => p
+ case Dummy610(p) => p
+ case Dummy611(p) => p
+ case Dummy612(p) => p
+ case Dummy613(p) => p
+ case Dummy614(p) => p
+ case Dummy615(p) => p
+ case Dummy616(p) => p
+ case Dummy617(p) => p
+ case Dummy618(p) => p
+ case Dummy619(p) => p
+ case Dummy620(p) => p
+ case Dummy621(p) => p
+ case Dummy622(p) => p
+ case Dummy623(p) => p
+ case Dummy624(p) => p
+ case Dummy625(p) => p
+ case Dummy626(p) => p
+ case Dummy627(p) => p
+ case Dummy628(p) => p
+ case Dummy629(p) => p
+ case Dummy630(p) => p
+ case Dummy631(p) => p
+ case Dummy632(p) => p
+ case Dummy633(p) => p
+ case Dummy634(p) => p
+ case Dummy635(p) => p
+ case Dummy636(p) => p
+ case Dummy637(p) => p
+ case Dummy638(p) => p
+ case Dummy639(p) => p
+ case Dummy640(p) => p
+ case Dummy641(p) => p
+ case Dummy642(p) => p
+ case Dummy643(p) => p
+ case Dummy644(p) => p
+ case Dummy645(p) => p
+ case Dummy646(p) => p
+ case Dummy647(p) => p
+ case Dummy648(p) => p
+ case Dummy649(p) => p
+ case Dummy650(p) => p
+ case Dummy651(p) => p
+ case Dummy652(p) => p
+ case Dummy653(p) => p
+ case Dummy654(p) => p
+ case Dummy655(p) => p
+ case Dummy656(p) => p
+ case Dummy657(p) => p
+ case Dummy658(p) => p
+ case Dummy659(p) => p
+ case Dummy660(p) => p
+ case Dummy661(p) => p
+ case Dummy662(p) => p
+ case Dummy663(p) => p
+ case Dummy664(p) => p
+ case Dummy665(p) => p
+ case Dummy666(p) => p
+ case Dummy667(p) => p
+ case Dummy668(p) => p
+ case Dummy669(p) => p
+ case Dummy670(p) => p
+ case Dummy671(p) => p
+ case Dummy672(p) => p
+ case Dummy673(p) => p
+ case Dummy674(p) => p
+ case Dummy675(p) => p
+ case Dummy676(p) => p
+ case Dummy677(p) => p
+ case Dummy678(p) => p
+ case Dummy679(p) => p
+ case Dummy680(p) => p
+ case Dummy681(p) => p
+ case Dummy682(p) => p
+ case Dummy683(p) => p
+ case Dummy684(p) => p
+ case Dummy685(p) => p
+ case Dummy686(p) => p
+ case Dummy687(p) => p
+ case Dummy688(p) => p
+ case Dummy689(p) => p
+ case Dummy690(p) => p
+ case Dummy691(p) => p
+ case Dummy692(p) => p
+ case Dummy693(p) => p
+ case Dummy694(p) => p
+ case Dummy695(p) => p
+ case Dummy696(p) => p
+ case Dummy697(p) => p
+ case Dummy698(p) => p
+ case Dummy699(p) => p
+ case Dummy700(p) => p
+ case Dummy701(p) => p
+ case Dummy702(p) => p
+ case Dummy703(p) => p
+ case Dummy704(p) => p
+ case Dummy705(p) => p
+ case Dummy706(p) => p
+ case Dummy707(p) => p
+ case Dummy708(p) => p
+ case Dummy709(p) => p
+ case Dummy710(p) => p
+ case Dummy711(p) => p
+ case Dummy712(p) => p
+ case Dummy713(p) => p
+ case Dummy714(p) => p
+ case Dummy715(p) => p
+ case Dummy716(p) => p
+ case Dummy717(p) => p
+ case Dummy718(p) => p
+ case Dummy719(p) => p
+ case Dummy720(p) => p
+ case Dummy721(p) => p
+ case Dummy722(p) => p
+ case Dummy723(p) => p
+ case Dummy724(p) => p
+ case Dummy725(p) => p
+ case Dummy726(p) => p
+ case Dummy727(p) => p
+ case Dummy728(p) => p
+ case Dummy729(p) => p
+ case Dummy730(p) => p
+ case Dummy731(p) => p
+ case Dummy732(p) => p
+ case Dummy733(p) => p
+ case Dummy734(p) => p
+ case Dummy735(p) => p
+ case Dummy736(p) => p
+ case Dummy737(p) => p
+ case Dummy738(p) => p
+ case Dummy739(p) => p
+ case Dummy740(p) => p
+ case Dummy741(p) => p
+ case Dummy742(p) => p
+ case Dummy743(p) => p
+ case Dummy744(p) => p
+ case Dummy745(p) => p
+ case Dummy746(p) => p
+ case Dummy747(p) => p
+ case Dummy748(p) => p
+ case Dummy749(p) => p
+ case Dummy750(p) => p
+ case Dummy751(p) => p
+ case Dummy752(p) => p
+ case Dummy753(p) => p
+ case Dummy754(p) => p
+ case Dummy755(p) => p
+ case Dummy756(p) => p
+ case Dummy757(p) => p
+ case Dummy758(p) => p
+ case Dummy759(p) => p
+ case Dummy760(p) => p
+ case Dummy761(p) => p
+ case Dummy762(p) => p
+ case Dummy763(p) => p
+ case Dummy764(p) => p
+ case Dummy765(p) => p
+ case Dummy766(p) => p
+ case Dummy767(p) => p
+ case Dummy768(p) => p
+ case Dummy769(p) => p
+ case Dummy770(p) => p
+ case Dummy771(p) => p
+ case Dummy772(p) => p
+ case Dummy773(p) => p
+ case Dummy774(p) => p
+ case Dummy775(p) => p
+ case Dummy776(p) => p
+ case Dummy777(p) => p
+ case Dummy778(p) => p
+ case Dummy779(p) => p
+ case Dummy780(p) => p
+ case Dummy781(p) => p
+ case Dummy782(p) => p
+ case Dummy783(p) => p
+ case Dummy784(p) => p
+ case Dummy785(p) => p
+ case Dummy786(p) => p
+ case Dummy787(p) => p
+ case Dummy788(p) => p
+ case Dummy789(p) => p
+ case Dummy790(p) => p
+ case Dummy791(p) => p
+ case Dummy792(p) => p
+ case Dummy793(p) => p
+ case Dummy794(p) => p
+ case Dummy795(p) => p
+ case Dummy796(p) => p
+ case Dummy797(p) => p
+ case Dummy798(p) => p
+ case Dummy799(p) => p
+ case Dummy800(p) => p
+ case Dummy801(p) => p
+ case Dummy802(p) => p
+ case Dummy803(p) => p
+ case Dummy804(p) => p
+ case Dummy805(p) => p
+ case Dummy806(p) => p
+ case Dummy807(p) => p
+ case Dummy808(p) => p
+ case Dummy809(p) => p
+ case Dummy810(p) => p
+ case Dummy811(p) => p
+ case Dummy812(p) => p
+ case Dummy813(p) => p
+ case Dummy814(p) => p
+ case Dummy815(p) => p
+ case Dummy816(p) => p
+ case Dummy817(p) => p
+ case Dummy818(p) => p
+ case Dummy819(p) => p
+ case Dummy820(p) => p
+ case Dummy821(p) => p
+ case Dummy822(p) => p
+ case Dummy823(p) => p
+ case Dummy824(p) => p
+ case Dummy825(p) => p
+ case Dummy826(p) => p
+ case Dummy827(p) => p
+ case Dummy828(p) => p
+ case Dummy829(p) => p
+ case Dummy830(p) => p
+ case Dummy831(p) => p
+ case Dummy832(p) => p
+ case Dummy833(p) => p
+ case Dummy834(p) => p
+ case Dummy835(p) => p
+ case Dummy836(p) => p
+ case Dummy837(p) => p
+ case Dummy838(p) => p
+ case Dummy839(p) => p
+ case Dummy840(p) => p
+ case Dummy841(p) => p
+ case Dummy842(p) => p
+ case Dummy843(p) => p
+ case Dummy844(p) => p
+ case Dummy845(p) => p
+ case Dummy846(p) => p
+ case Dummy847(p) => p
+ case Dummy848(p) => p
+ case Dummy849(p) => p
+ case Dummy850(p) => p
+ case Dummy851(p) => p
+ case Dummy852(p) => p
+ case Dummy853(p) => p
+ case Dummy854(p) => p
+ case Dummy855(p) => p
+ case Dummy856(p) => p
+ case Dummy857(p) => p
+ case Dummy858(p) => p
+ case Dummy859(p) => p
+ case Dummy860(p) => p
+ case Dummy861(p) => p
+ case Dummy862(p) => p
+ case Dummy863(p) => p
+ case Dummy864(p) => p
+ case Dummy865(p) => p
+ case Dummy866(p) => p
+ case Dummy867(p) => p
+ case Dummy868(p) => p
+ case Dummy869(p) => p
+ case Dummy870(p) => p
+ case Dummy871(p) => p
+ case Dummy872(p) => p
+ case Dummy873(p) => p
+ case Dummy874(p) => p
+ case Dummy875(p) => p
+ case Dummy876(p) => p
+ case Dummy877(p) => p
+ case Dummy878(p) => p
+ case Dummy879(p) => p
+ case Dummy880(p) => p
+ case Dummy881(p) => p
+ case Dummy882(p) => p
+ case Dummy883(p) => p
+ case Dummy884(p) => p
+ case Dummy885(p) => p
+ case Dummy886(p) => p
+ case Dummy887(p) => p
+ case Dummy888(p) => p
+ case Dummy889(p) => p
+ case Dummy890(p) => p
+ case Dummy891(p) => p
+ case Dummy892(p) => p
+ case Dummy893(p) => p
+ case Dummy894(p) => p
+ case Dummy895(p) => p
+ case Dummy896(p) => p
+ case Dummy897(p) => p
+ case Dummy898(p) => p
+ case Dummy899(p) => p
+ case Dummy900(p) => p
+ case Dummy901(p) => p
+ case Dummy902(p) => p
+ case Dummy903(p) => p
+ case Dummy904(p) => p
+ case Dummy905(p) => p
+ case Dummy906(p) => p
+ case Dummy907(p) => p
+ case Dummy908(p) => p
+ case Dummy909(p) => p
+ case Dummy910(p) => p
+ case Dummy911(p) => p
+ case Dummy912(p) => p
+ case Dummy913(p) => p
+ case Dummy914(p) => p
+ case Dummy915(p) => p
+ case Dummy916(p) => p
+ case Dummy917(p) => p
+ case Dummy918(p) => p
+ case Dummy919(p) => p
+ case Dummy920(p) => p
+ case Dummy921(p) => p
+ case Dummy922(p) => p
+ case Dummy923(p) => p
+ case Dummy924(p) => p
+ case Dummy925(p) => p
+ case Dummy926(p) => p
+ case Dummy927(p) => p
+ case Dummy928(p) => p
+ case Dummy929(p) => p
+ case Dummy930(p) => p
+ case Dummy931(p) => p
+ case Dummy932(p) => p
+ case Dummy933(p) => p
+ case Dummy934(p) => p
+ case Dummy935(p) => p
+ case Dummy936(p) => p
+ case Dummy937(p) => p
+ case Dummy938(p) => p
+ case Dummy939(p) => p
+ case Dummy940(p) => p
+ case Dummy941(p) => p
+ case Dummy942(p) => p
+ case Dummy943(p) => p
+ case Dummy944(p) => p
+ case Dummy945(p) => p
+ case Dummy946(p) => p
+ case Dummy947(p) => p
+ case Dummy948(p) => p
+ case Dummy949(p) => p
+ case Dummy950(p) => p
+ case Dummy951(p) => p
+ case Dummy952(p) => p
+ case Dummy953(p) => p
+ case Dummy954(p) => p
+ case Dummy955(p) => p
+ case Dummy956(p) => p
+ case Dummy957(p) => p
+ case Dummy958(p) => p
+ case Dummy959(p) => p
+ case Dummy960(p) => p
+ case Dummy961(p) => p
+ case Dummy962(p) => p
+ case Dummy963(p) => p
+ case Dummy964(p) => p
+ case Dummy965(p) => p
+ case Dummy966(p) => p
+ case Dummy967(p) => p
+ case Dummy968(p) => p
+ case Dummy969(p) => p
+ case Dummy970(p) => p
+ case Dummy971(p) => p
+ case Dummy972(p) => p
+ case Dummy973(p) => p
+ case Dummy974(p) => p
+ case Dummy975(p) => p
+ case Dummy976(p) => p
+ case Dummy977(p) => p
+ case Dummy978(p) => p
+ case Dummy979(p) => p
+ case Dummy980(p) => p
+ case Dummy981(p) => p
+ case Dummy982(p) => p
+ case Dummy983(p) => p
+ case Dummy984(p) => p
+ case Dummy985(p) => p
+ case Dummy986(p) => p
+ case Dummy987(p) => p
+ case Dummy988(p) => p
+ case Dummy989(p) => p
+ case Dummy990(p) => p
+ case Dummy991(p) => p
+ case Dummy992(p) => p
+ case Dummy993(p) => p
+ case Dummy994(p) => p
+ case Dummy995(p) => p
+ case Dummy996(p) => p
+ case Dummy997(p) => p
+ case Dummy998(p) => p
+ case Dummy999(p) => p
+ case Dummy1000(p) => p
+ case Dummy1001(p) => p
+ case Dummy1002(p) => p
+ case Dummy1003(p) => p
+ case Dummy1004(p) => p
+ case Dummy1005(p) => p
+ case Dummy1006(p) => p
+ case Dummy1007(p) => p
+ case Dummy1008(p) => p
+ case Dummy1009(p) => p
+ case Dummy1010(p) => p
+ case Dummy1011(p) => p
+ case Dummy1012(p) => p
+ case Dummy1013(p) => p
+ case Dummy1014(p) => p
+ case Dummy1015(p) => p
+ case Dummy1016(p) => p
+ case Dummy1017(p) => p
+ case Dummy1018(p) => p
+ case Dummy1019(p) => p
+ case Dummy1020(p) => p
+ case Dummy1021(p) => p
+ case Dummy1022(p) => p
+ case Dummy1023(p) => p
+ case Dummy1024(p) => p
+ case Dummy1025(p) => p
+ case Dummy1026(p) => p
+ case Dummy1027(p) => p
+ case Dummy1028(p) => p
+ case Dummy1029(p) => p
+ case Dummy1030(p) => p
+ case Dummy1031(p) => p
+ case Dummy1032(p) => p
+ case Dummy1033(p) => p
+ case Dummy1034(p) => p
+ case Dummy1035(p) => p
+ case Dummy1036(p) => p
+ case Dummy1037(p) => p
+ case Dummy1038(p) => p
+ case Dummy1039(p) => p
+ case Dummy1040(p) => p
+ case Dummy1041(p) => p
+ case Dummy1042(p) => p
+ case Dummy1043(p) => p
+ case Dummy1044(p) => p
+ case Dummy1045(p) => p
+ case Dummy1046(p) => p
+ case Dummy1047(p) => p
+ case Dummy1048(p) => p
+ case Dummy1049(p) => p
+ case Dummy1050(p) => p
+ case Dummy1051(p) => p
+ case Dummy1052(p) => p
+ case Dummy1053(p) => p
+ case Dummy1054(p) => p
+ case Dummy1055(p) => p
+ case Dummy1056(p) => p
+ case Dummy1057(p) => p
+ case Dummy1058(p) => p
+ case Dummy1059(p) => p
+ case Dummy1060(p) => p
+ case Dummy1061(p) => p
+ case Dummy1062(p) => p
+ case Dummy1063(p) => p
+ case Dummy1064(p) => p
+ case Dummy1065(p) => p
+ case Dummy1066(p) => p
+ case Dummy1067(p) => p
+ case Dummy1068(p) => p
+ case Dummy1069(p) => p
+ case Dummy1070(p) => p
+ case Dummy1071(p) => p
+ case Dummy1072(p) => p
+ case Dummy1073(p) => p
+ case Dummy1074(p) => p
+ case Dummy1075(p) => p
+ case Dummy1076(p) => p
+ case Dummy1077(p) => p
+ case Dummy1078(p) => p
+ case Dummy1079(p) => p
+ case Dummy1080(p) => p
+ case Dummy1081(p) => p
+ case Dummy1082(p) => p
+ case Dummy1083(p) => p
+ case Dummy1084(p) => p
+ case Dummy1085(p) => p
+ case Dummy1086(p) => p
+ case Dummy1087(p) => p
+ case Dummy1088(p) => p
+ case Dummy1089(p) => p
+ case Dummy1090(p) => p
+ case Dummy1091(p) => p
+ case Dummy1092(p) => p
+ case Dummy1093(p) => p
+ case Dummy1094(p) => p
+ case Dummy1095(p) => p
+ case Dummy1096(p) => p
+ case Dummy1097(p) => p
+ case Dummy1098(p) => p
+ case Dummy1099(p) => p
+ case Dummy1100(p) => p
+ case Dummy1101(p) => p
+ case Dummy1102(p) => p
+ case Dummy1103(p) => p
+ case Dummy1104(p) => p
+ case Dummy1105(p) => p
+ case Dummy1106(p) => p
+ case Dummy1107(p) => p
+ case Dummy1108(p) => p
+ case Dummy1109(p) => p
+ case Dummy1110(p) => p
+ case Dummy1111(p) => p
+ case Dummy1112(p) => p
+ case Dummy1113(p) => p
+ case Dummy1114(p) => p
+ case Dummy1115(p) => p
+ case Dummy1116(p) => p
+ case Dummy1117(p) => p
+ case Dummy1118(p) => p
+ case Dummy1119(p) => p
+ case Dummy1120(p) => p
+ case Dummy1121(p) => p
+ case Dummy1122(p) => p
+ case Dummy1123(p) => p
+ case Dummy1124(p) => p
+ case Dummy1125(p) => p
+ case Dummy1126(p) => p
+ case Dummy1127(p) => p
+ case Dummy1128(p) => p
+ case Dummy1129(p) => p
+ case Dummy1130(p) => p
+ case Dummy1131(p) => p
+ case Dummy1132(p) => p
+ case Dummy1133(p) => p
+ case Dummy1134(p) => p
+ case Dummy1135(p) => p
+ case Dummy1136(p) => p
+ case Dummy1137(p) => p
+ case Dummy1138(p) => p
+ case Dummy1139(p) => p
+ case Dummy1140(p) => p
+ case Dummy1141(p) => p
+ case Dummy1142(p) => p
+ case Dummy1143(p) => p
+ case Dummy1144(p) => p
+ case Dummy1145(p) => p
+ case Dummy1146(p) => p
+ case Dummy1147(p) => p
+ case Dummy1148(p) => p
+ case Dummy1149(p) => p
+ case Dummy1150(p) => p
+ case Dummy1151(p) => p
+ case Dummy1152(p) => p
+ case Dummy1153(p) => p
+ case Dummy1154(p) => p
+ case Dummy1155(p) => p
+ case Dummy1156(p) => p
+ case Dummy1157(p) => p
+ case Dummy1158(p) => p
+ case Dummy1159(p) => p
+ case Dummy1160(p) => p
+ case Dummy1161(p) => p
+ case Dummy1162(p) => p
+ case Dummy1163(p) => p
+ case Dummy1164(p) => p
+ case Dummy1165(p) => p
+ case Dummy1166(p) => p
+ case Dummy1167(p) => p
+ case Dummy1168(p) => p
+ case Dummy1169(p) => p
+ case Dummy1170(p) => p
+ case Dummy1171(p) => p
+ case Dummy1172(p) => p
+ case Dummy1173(p) => p
+ case Dummy1174(p) => p
+ case Dummy1175(p) => p
+ case Dummy1176(p) => p
+ case Dummy1177(p) => p
+ case Dummy1178(p) => p
+ case Dummy1179(p) => p
+ case Dummy1180(p) => p
+ case Dummy1181(p) => p
+ case Dummy1182(p) => p
+ case Dummy1183(p) => p
+ case Dummy1184(p) => p
+ case Dummy1185(p) => p
+ case Dummy1186(p) => p
+ case Dummy1187(p) => p
+ case Dummy1188(p) => p
+ case Dummy1189(p) => p
+ case Dummy1190(p) => p
+ case Dummy1191(p) => p
+ case Dummy1192(p) => p
+ case Dummy1193(p) => p
+ case Dummy1194(p) => p
+ case Dummy1195(p) => p
+ case Dummy1196(p) => p
+ case Dummy1197(p) => p
+ case Dummy1198(p) => p
+ case Dummy1199(p) => p
+ case Dummy1200(p) => p
+ case Dummy1201(p) => p
+ case Dummy1202(p) => p
+ case Dummy1203(p) => p
+ case Dummy1204(p) => p
+ case Dummy1205(p) => p
+ case Dummy1206(p) => p
+ case Dummy1207(p) => p
+ case Dummy1208(p) => p
+ case Dummy1209(p) => p
+ case Dummy1210(p) => p
+ case Dummy1211(p) => p
+ case Dummy1212(p) => p
+ case Dummy1213(p) => p
+ case Dummy1214(p) => p
+ case Dummy1215(p) => p
+ case Dummy1216(p) => p
+ case Dummy1217(p) => p
+ case Dummy1218(p) => p
+ case Dummy1219(p) => p
+ case Dummy1220(p) => p
+ case Dummy1221(p) => p
+ case Dummy1222(p) => p
+ case Dummy1223(p) => p
+ case Dummy1224(p) => p
+ case Dummy1225(p) => p
+ case Dummy1226(p) => p
+ case Dummy1227(p) => p
+ case Dummy1228(p) => p
+ case Dummy1229(p) => p
+ case Dummy1230(p) => p
+ case Dummy1231(p) => p
+ case Dummy1232(p) => p
+ case Dummy1233(p) => p
+ case Dummy1234(p) => p
+ case Dummy1235(p) => p
+ case Dummy1236(p) => p
+ case Dummy1237(p) => p
+ case Dummy1238(p) => p
+ case Dummy1239(p) => p
+ case Dummy1240(p) => p
+ case Dummy1241(p) => p
+ case Dummy1242(p) => p
+ case Dummy1243(p) => p
+ case Dummy1244(p) => p
+ case Dummy1245(p) => p
+ case Dummy1246(p) => p
+ case Dummy1247(p) => p
+ case Dummy1248(p) => p
+ case Dummy1249(p) => p
+ case Dummy1250(p) => p
+ case Dummy1251(p) => p
+ case Dummy1252(p) => p
+ case Dummy1253(p) => p
+ case Dummy1254(p) => p
+ case Dummy1255(p) => p
+ case Dummy1256(p) => p
+ case Dummy1257(p) => p
+ case Dummy1258(p) => p
+ case Dummy1259(p) => p
+ case Dummy1260(p) => p
+ case Dummy1261(p) => p
+ case Dummy1262(p) => p
+ case Dummy1263(p) => p
+ case Dummy1264(p) => p
+ case Dummy1265(p) => p
+ case Dummy1266(p) => p
+ case Dummy1267(p) => p
+ case Dummy1268(p) => p
+ case Dummy1269(p) => p
+ case Dummy1270(p) => p
+ case Dummy1271(p) => p
+ case Dummy1272(p) => p
+ case Dummy1273(p) => p
+ case Dummy1274(p) => p
+ case Dummy1275(p) => p
+ case Dummy1276(p) => p
+ case Dummy1277(p) => p
+ case Dummy1278(p) => p
+ case Dummy1279(p) => p
+ case Dummy1280(p) => p
+ case Dummy1281(p) => p
+ case Dummy1282(p) => p
+ case Dummy1283(p) => p
+ case Dummy1284(p) => p
+ case Dummy1285(p) => p
+ case Dummy1286(p) => p
+ case Dummy1287(p) => p
+ case Dummy1288(p) => p
+ case Dummy1289(p) => p
+ case Dummy1290(p) => p
+ case Dummy1291(p) => p
+ case Dummy1292(p) => p
+ case Dummy1293(p) => p
+ case Dummy1294(p) => p
+ case Dummy1295(p) => p
+ case Dummy1296(p) => p
+ case Dummy1297(p) => p
+ case Dummy1298(p) => p
+ case Dummy1299(p) => p
+ case Dummy1300(p) => p
+ case Dummy1301(p) => p
+ case Dummy1302(p) => p
+ case Dummy1303(p) => p
+ case Dummy1304(p) => p
+ case Dummy1305(p) => p
+ case Dummy1306(p) => p
+ case Dummy1307(p) => p
+ case Dummy1308(p) => p
+ case Dummy1309(p) => p
+ case Dummy1310(p) => p
+ case Dummy1311(p) => p
+ case Dummy1312(p) => p
+ case Dummy1313(p) => p
+ case Dummy1314(p) => p
+ case Dummy1315(p) => p
+ case Dummy1316(p) => p
+ case Dummy1317(p) => p
+ case Dummy1318(p) => p
+ case Dummy1319(p) => p
+ case Dummy1320(p) => p
+ case Dummy1321(p) => p
+ case Dummy1322(p) => p
+ case Dummy1323(p) => p
+ case Dummy1324(p) => p
+ case Dummy1325(p) => p
+ case Dummy1326(p) => p
+ case Dummy1327(p) => p
+ case Dummy1328(p) => p
+ case Dummy1329(p) => p
+ case Dummy1330(p) => p
+ case Dummy1331(p) => p
+ case Dummy1332(p) => p
+ case Dummy1333(p) => p
+ case Dummy1334(p) => p
+ case Dummy1335(p) => p
+ case Dummy1336(p) => p
+ case Dummy1337(p) => p
+ case Dummy1338(p) => p
+ case Dummy1339(p) => p
+ case Dummy1340(p) => p
+ case Dummy1341(p) => p
+ case Dummy1342(p) => p
+ case Dummy1343(p) => p
+ case Dummy1344(p) => p
+ case Dummy1345(p) => p
+ case Dummy1346(p) => p
+ case Dummy1347(p) => p
+ case Dummy1348(p) => p
+ case Dummy1349(p) => p
+ case Dummy1350(p) => p
+ case Dummy1351(p) => p
+ case Dummy1352(p) => p
+ case Dummy1353(p) => p
+ case Dummy1354(p) => p
+ case Dummy1355(p) => p
+ case Dummy1356(p) => p
+ case Dummy1357(p) => p
+ case Dummy1358(p) => p
+ case Dummy1359(p) => p
+ case Dummy1360(p) => p
+ case Dummy1361(p) => p
+ case Dummy1362(p) => p
+ case Dummy1363(p) => p
+ case Dummy1364(p) => p
+ case Dummy1365(p) => p
+ case Dummy1366(p) => p
+ case Dummy1367(p) => p
+ case Dummy1368(p) => p
+ case Dummy1369(p) => p
+ case Dummy1370(p) => p
+ case Dummy1371(p) => p
+ case Dummy1372(p) => p
+ case Dummy1373(p) => p
+ case Dummy1374(p) => p
+ case Dummy1375(p) => p
+ case Dummy1376(p) => p
+ case Dummy1377(p) => p
+ case Dummy1378(p) => p
+ case Dummy1379(p) => p
+ case Dummy1380(p) => p
+ case Dummy1381(p) => p
+ case Dummy1382(p) => p
+ case Dummy1383(p) => p
+ case Dummy1384(p) => p
+ case Dummy1385(p) => p
+ case Dummy1386(p) => p
+ case Dummy1387(p) => p
+ case Dummy1388(p) => p
+ case Dummy1389(p) => p
+ case Dummy1390(p) => p
+ case Dummy1391(p) => p
+ case Dummy1392(p) => p
+ case Dummy1393(p) => p
+ case Dummy1394(p) => p
+ case Dummy1395(p) => p
+ case Dummy1396(p) => p
+ case Dummy1397(p) => p
+ case Dummy1398(p) => p
+ case Dummy1399(p) => p
+ case Dummy1400(p) => p
+ case Dummy1401(p) => p
+ case Dummy1402(p) => p
+ case Dummy1403(p) => p
+ case Dummy1404(p) => p
+ case Dummy1405(p) => p
+ case Dummy1406(p) => p
+ case Dummy1407(p) => p
+ case Dummy1408(p) => p
+ case Dummy1409(p) => p
+ case Dummy1410(p) => p
+ case Dummy1411(p) => p
+ case Dummy1412(p) => p
+ case Dummy1413(p) => p
+ case Dummy1414(p) => p
+ case Dummy1415(p) => p
+ case Dummy1416(p) => p
+ case Dummy1417(p) => p
+ case Dummy1418(p) => p
+ case Dummy1419(p) => p
+ case Dummy1420(p) => p
+ case Dummy1421(p) => p
+ case Dummy1422(p) => p
+ case Dummy1423(p) => p
+ case Dummy1424(p) => p
+ case Dummy1425(p) => p
+ case Dummy1426(p) => p
+ case Dummy1427(p) => p
+ case Dummy1428(p) => p
+ case Dummy1429(p) => p
+ case Dummy1430(p) => p
+ case Dummy1431(p) => p
+ case Dummy1432(p) => p
+ case Dummy1433(p) => p
+ case Dummy1434(p) => p
+ case Dummy1435(p) => p
+ case Dummy1436(p) => p
+ case Dummy1437(p) => p
+ case Dummy1438(p) => p
+ case Dummy1439(p) => p
+ case Dummy1440(p) => p
+ case Dummy1441(p) => p
+ case Dummy1442(p) => p
+ case Dummy1443(p) => p
+ case Dummy1444(p) => p
+ case Dummy1445(p) => p
+ case Dummy1446(p) => p
+ case Dummy1447(p) => p
+ case Dummy1448(p) => p
+ case Dummy1449(p) => p
+ case Dummy1450(p) => p
+ case Dummy1451(p) => p
+ case Dummy1452(p) => p
+ case Dummy1453(p) => p
+ case Dummy1454(p) => p
+ case Dummy1455(p) => p
+ case Dummy1456(p) => p
+ case Dummy1457(p) => p
+ case Dummy1458(p) => p
+ case Dummy1459(p) => p
+ case Dummy1460(p) => p
+ case Dummy1461(p) => p
+ case Dummy1462(p) => p
+ case Dummy1463(p) => p
+ case Dummy1464(p) => p
+ case Dummy1465(p) => p
+ case Dummy1466(p) => p
+ case Dummy1467(p) => p
+ case Dummy1468(p) => p
+ case Dummy1469(p) => p
+ case Dummy1470(p) => p
+ case Dummy1471(p) => p
+ case Dummy1472(p) => p
+ case Dummy1473(p) => p
+ case Dummy1474(p) => p
+ case Dummy1475(p) => p
+ case Dummy1476(p) => p
+ case Dummy1477(p) => p
+ case Dummy1478(p) => p
+ case Dummy1479(p) => p
+ case Dummy1480(p) => p
+ case Dummy1481(p) => p
+ case Dummy1482(p) => p
+ case Dummy1483(p) => p
+ case Dummy1484(p) => p
+ case Dummy1485(p) => p
+ case Dummy1486(p) => p
+ case Dummy1487(p) => p
+ case Dummy1488(p) => p
+ case Dummy1489(p) => p
+ case Dummy1490(p) => p
+ case Dummy1491(p) => p
+ case Dummy1492(p) => p
+ case Dummy1493(p) => p
+ case Dummy1494(p) => p
+ case Dummy1495(p) => p
+ case Dummy1496(p) => p
+ case Dummy1497(p) => p
+ case Dummy1498(p) => p
+ case Dummy1499(p) => p
+ case Dummy1500(p) => p
+ case Dummy1501(p) => p
+ case Dummy1502(p) => p
+ case Dummy1503(p) => p
+ case Dummy1504(p) => p
+ case Dummy1505(p) => p
+ case Dummy1506(p) => p
+ case Dummy1507(p) => p
+ case Dummy1508(p) => p
+ case Dummy1509(p) => p
+ case Dummy1510(p) => p
+ case Dummy1511(p) => p
+ case Dummy1512(p) => p
+ case Dummy1513(p) => p
+ case Dummy1514(p) => p
+ case Dummy1515(p) => p
+ case Dummy1516(p) => p
+ case Dummy1517(p) => p
+ case Dummy1518(p) => p
+ case Dummy1519(p) => p
+ case Dummy1520(p) => p
+ case Dummy1521(p) => p
+ case Dummy1522(p) => p
+ case Dummy1523(p) => p
+ case Dummy1524(p) => p
+ case Dummy1525(p) => p
+ case Dummy1526(p) => p
+ case Dummy1527(p) => p
+ case Dummy1528(p) => p
+ case Dummy1529(p) => p
+ case Dummy1530(p) => p
+ case Dummy1531(p) => p
+ case Dummy1532(p) => p
+ case Dummy1533(p) => p
+ case Dummy1534(p) => p
+ case Dummy1535(p) => p
+ case Dummy1536(p) => p
+ case Dummy1537(p) => p
+ case Dummy1538(p) => p
+ case Dummy1539(p) => p
+ case Dummy1540(p) => p
+ case Dummy1541(p) => p
+ case Dummy1542(p) => p
+ case Dummy1543(p) => p
+ case Dummy1544(p) => p
+ case Dummy1545(p) => p
+ case Dummy1546(p) => p
+ case Dummy1547(p) => p
+ case Dummy1548(p) => p
+ case Dummy1549(p) => p
+ case Dummy1550(p) => p
+ case Dummy1551(p) => p
+ case Dummy1552(p) => p
+ case Dummy1553(p) => p
+ case Dummy1554(p) => p
+ case Dummy1555(p) => p
+ case Dummy1556(p) => p
+ case Dummy1557(p) => p
+ case Dummy1558(p) => p
+ case Dummy1559(p) => p
+ case Dummy1560(p) => p
+ case Dummy1561(p) => p
+ case Dummy1562(p) => p
+ case Dummy1563(p) => p
+ case Dummy1564(p) => p
+ case Dummy1565(p) => p
+ case Dummy1566(p) => p
+ case Dummy1567(p) => p
+ case Dummy1568(p) => p
+ case Dummy1569(p) => p
+ case Dummy1570(p) => p
+ case Dummy1571(p) => p
+ case Dummy1572(p) => p
+ case Dummy1573(p) => p
+ case Dummy1574(p) => p
+ case Dummy1575(p) => p
+ case Dummy1576(p) => p
+ case Dummy1577(p) => p
+ case Dummy1578(p) => p
+ case Dummy1579(p) => p
+ case Dummy1580(p) => p
+ case Dummy1581(p) => p
+ case Dummy1582(p) => p
+ case Dummy1583(p) => p
+ case Dummy1584(p) => p
+ case Dummy1585(p) => p
+ case Dummy1586(p) => p
+ case Dummy1587(p) => p
+ case Dummy1588(p) => p
+ case Dummy1589(p) => p
+ case Dummy1590(p) => p
+ case Dummy1591(p) => p
+ case Dummy1592(p) => p
+ case Dummy1593(p) => p
+ case Dummy1594(p) => p
+ case Dummy1595(p) => p
+ case Dummy1596(p) => p
+ case Dummy1597(p) => p
+ case Dummy1598(p) => p
+ case Dummy1599(p) => p
+ case Dummy1600(p) => p
+ case Dummy1601(p) => p
+ case Dummy1602(p) => p
+ case Dummy1603(p) => p
+ case Dummy1604(p) => p
+ case Dummy1605(p) => p
+ case Dummy1606(p) => p
+ case Dummy1607(p) => p
+ case Dummy1608(p) => p
+ case Dummy1609(p) => p
+ case Dummy1610(p) => p
+ case Dummy1611(p) => p
+ case Dummy1612(p) => p
+ case Dummy1613(p) => p
+ case Dummy1614(p) => p
+ case Dummy1615(p) => p
+ case Dummy1616(p) => p
+ case Dummy1617(p) => p
+ case Dummy1618(p) => p
+ case Dummy1619(p) => p
+ case Dummy1620(p) => p
+ case Dummy1621(p) => p
+ case Dummy1622(p) => p
+ case Dummy1623(p) => p
+ case Dummy1624(p) => p
+ case Dummy1625(p) => p
+ case Dummy1626(p) => p
+ case Dummy1627(p) => p
+ case Dummy1628(p) => p
+ case Dummy1629(p) => p
+ case Dummy1630(p) => p
+ case Dummy1631(p) => p
+ case Dummy1632(p) => p
+ case Dummy1633(p) => p
+ case Dummy1634(p) => p
+ case Dummy1635(p) => p
+ case Dummy1636(p) => p
+ case Dummy1637(p) => p
+ case Dummy1638(p) => p
+ case Dummy1639(p) => p
+ case Dummy1640(p) => p
+ case Dummy1641(p) => p
+ case Dummy1642(p) => p
+ case Dummy1643(p) => p
+ case Dummy1644(p) => p
+ case Dummy1645(p) => p
+ case Dummy1646(p) => p
+ case Dummy1647(p) => p
+ case Dummy1648(p) => p
+ case Dummy1649(p) => p
+ case Dummy1650(p) => p
+ case Dummy1651(p) => p
+ case Dummy1652(p) => p
+ case Dummy1653(p) => p
+ case Dummy1654(p) => p
+ case Dummy1655(p) => p
+ case Dummy1656(p) => p
+ case Dummy1657(p) => p
+ case Dummy1658(p) => p
+ case Dummy1659(p) => p
+ case Dummy1660(p) => p
+ case Dummy1661(p) => p
+ case Dummy1662(p) => p
+ case Dummy1663(p) => p
+ case Dummy1664(p) => p
+ case Dummy1665(p) => p
+ case Dummy1666(p) => p
+ case Dummy1667(p) => p
+ case Dummy1668(p) => p
+ case Dummy1669(p) => p
+ case Dummy1670(p) => p
+ case Dummy1671(p) => p
+ case Dummy1672(p) => p
+ case Dummy1673(p) => p
+ case Dummy1674(p) => p
+ case Dummy1675(p) => p
+ case Dummy1676(p) => p
+ case Dummy1677(p) => p
+ case Dummy1678(p) => p
+ case Dummy1679(p) => p
+ case Dummy1680(p) => p
+ case Dummy1681(p) => p
+ case Dummy1682(p) => p
+ case Dummy1683(p) => p
+ case Dummy1684(p) => p
+ case Dummy1685(p) => p
+ case Dummy1686(p) => p
+ case Dummy1687(p) => p
+ case Dummy1688(p) => p
+ case Dummy1689(p) => p
+ case Dummy1690(p) => p
+ case Dummy1691(p) => p
+ case Dummy1692(p) => p
+ case Dummy1693(p) => p
+ case Dummy1694(p) => p
+ case Dummy1695(p) => p
+ case Dummy1696(p) => p
+ case Dummy1697(p) => p
+ case Dummy1698(p) => p
+ case Dummy1699(p) => p
+ case Dummy1700(p) => p
+ case Dummy1701(p) => p
+ case Dummy1702(p) => p
+ case Dummy1703(p) => p
+ case Dummy1704(p) => p
+ case Dummy1705(p) => p
+ case Dummy1706(p) => p
+ case Dummy1707(p) => p
+ case Dummy1708(p) => p
+ case Dummy1709(p) => p
+ case Dummy1710(p) => p
+ case Dummy1711(p) => p
+ case Dummy1712(p) => p
+ case Dummy1713(p) => p
+ case Dummy1714(p) => p
+ case Dummy1715(p) => p
+ case Dummy1716(p) => p
+ case Dummy1717(p) => p
+ case Dummy1718(p) => p
+ case Dummy1719(p) => p
+ case Dummy1720(p) => p
+ case Dummy1721(p) => p
+ case Dummy1722(p) => p
+ case Dummy1723(p) => p
+ case Dummy1724(p) => p
+ case Dummy1725(p) => p
+ case Dummy1726(p) => p
+ case Dummy1727(p) => p
+ case Dummy1728(p) => p
+ case Dummy1729(p) => p
+ case Dummy1730(p) => p
+ case Dummy1731(p) => p
+ case Dummy1732(p) => p
+ case Dummy1733(p) => p
+ case Dummy1734(p) => p
+ case Dummy1735(p) => p
+ case Dummy1736(p) => p
+ case Dummy1737(p) => p
+ case Dummy1738(p) => p
+ case Dummy1739(p) => p
+ case Dummy1740(p) => p
+ case Dummy1741(p) => p
+ case Dummy1742(p) => p
+ case Dummy1743(p) => p
+ case Dummy1744(p) => p
+ case Dummy1745(p) => p
+ case Dummy1746(p) => p
+ case Dummy1747(p) => p
+ case Dummy1748(p) => p
+ case Dummy1749(p) => p
+ case Dummy1750(p) => p
+ case Dummy1751(p) => p
+ case Dummy1752(p) => p
+ case Dummy1753(p) => p
+ case Dummy1754(p) => p
+ case Dummy1755(p) => p
+ case Dummy1756(p) => p
+ case Dummy1757(p) => p
+ case Dummy1758(p) => p
+ case Dummy1759(p) => p
+ case Dummy1760(p) => p
+ case Dummy1761(p) => p
+ case Dummy1762(p) => p
+ case Dummy1763(p) => p
+ case Dummy1764(p) => p
+ case Dummy1765(p) => p
+ case Dummy1766(p) => p
+ case Dummy1767(p) => p
+ case Dummy1768(p) => p
+ case Dummy1769(p) => p
+ case Dummy1770(p) => p
+ case Dummy1771(p) => p
+ case Dummy1772(p) => p
+ case Dummy1773(p) => p
+ case Dummy1774(p) => p
+ case Dummy1775(p) => p
+ case Dummy1776(p) => p
+ case Dummy1777(p) => p
+ case Dummy1778(p) => p
+ case Dummy1779(p) => p
+ case Dummy1780(p) => p
+ case Dummy1781(p) => p
+ case Dummy1782(p) => p
+ case Dummy1783(p) => p
+ case Dummy1784(p) => p
+ case Dummy1785(p) => p
+ case Dummy1786(p) => p
+ case Dummy1787(p) => p
+ case Dummy1788(p) => p
+ case Dummy1789(p) => p
+ case Dummy1790(p) => p
+ case Dummy1791(p) => p
+ case Dummy1792(p) => p
+ case Dummy1793(p) => p
+ case Dummy1794(p) => p
+ case Dummy1795(p) => p
+ case Dummy1796(p) => p
+ case Dummy1797(p) => p
+ case Dummy1798(p) => p
+ case Dummy1799(p) => p
+ case Dummy1800(p) => p
+ case Dummy1801(p) => p
+ case Dummy1802(p) => p
+ case Dummy1803(p) => p
+ case Dummy1804(p) => p
+ case Dummy1805(p) => p
+ case Dummy1806(p) => p
+ case Dummy1807(p) => p
+ case Dummy1808(p) => p
+ case Dummy1809(p) => p
+ case Dummy1810(p) => p
+ case Dummy1811(p) => p
+ case Dummy1812(p) => p
+ case Dummy1813(p) => p
+ case Dummy1814(p) => p
+ case Dummy1815(p) => p
+ case Dummy1816(p) => p
+ case Dummy1817(p) => p
+ case Dummy1818(p) => p
+ case Dummy1819(p) => p
+ case Dummy1820(p) => p
+ case Dummy1821(p) => p
+ case Dummy1822(p) => p
+ case Dummy1823(p) => p
+ case Dummy1824(p) => p
+ case Dummy1825(p) => p
+ case Dummy1826(p) => p
+ case Dummy1827(p) => p
+ case Dummy1828(p) => p
+ case Dummy1829(p) => p
+ case Dummy1830(p) => p
+ case Dummy1831(p) => p
+ case Dummy1832(p) => p
+ case Dummy1833(p) => p
+ case Dummy1834(p) => p
+ case Dummy1835(p) => p
+ case Dummy1836(p) => p
+ case Dummy1837(p) => p
+ case Dummy1838(p) => p
+ case Dummy1839(p) => p
+ case Dummy1840(p) => p
+ case Dummy1841(p) => p
+ case Dummy1842(p) => p
+ case Dummy1843(p) => p
+ case Dummy1844(p) => p
+ case Dummy1845(p) => p
+ case Dummy1846(p) => p
+ case Dummy1847(p) => p
+ case Dummy1848(p) => p
+ case Dummy1849(p) => p
+ case Dummy1850(p) => p
+ case Dummy1851(p) => p
+ case Dummy1852(p) => p
+ case Dummy1853(p) => p
+ case Dummy1854(p) => p
+ case Dummy1855(p) => p
+ case Dummy1856(p) => p
+ case Dummy1857(p) => p
+ case Dummy1858(p) => p
+ case Dummy1859(p) => p
+ case Dummy1860(p) => p
+ case Dummy1861(p) => p
+ case Dummy1862(p) => p
+ case Dummy1863(p) => p
+ case Dummy1864(p) => p
+ case Dummy1865(p) => p
+ case Dummy1866(p) => p
+ case Dummy1867(p) => p
+ case Dummy1868(p) => p
+ case Dummy1869(p) => p
+ case Dummy1870(p) => p
+ case Dummy1871(p) => p
+ case Dummy1872(p) => p
+ case Dummy1873(p) => p
+ case Dummy1874(p) => p
+ case Dummy1875(p) => p
+ case Dummy1876(p) => p
+ case Dummy1877(p) => p
+ case Dummy1878(p) => p
+ case Dummy1879(p) => p
+ case Dummy1880(p) => p
+ case Dummy1881(p) => p
+ case Dummy1882(p) => p
+ case Dummy1883(p) => p
+ case Dummy1884(p) => p
+ case Dummy1885(p) => p
+ case Dummy1886(p) => p
+ case Dummy1887(p) => p
+ case Dummy1888(p) => p
+ case Dummy1889(p) => p
+ case Dummy1890(p) => p
+ case Dummy1891(p) => p
+ case Dummy1892(p) => p
+ case Dummy1893(p) => p
+ case Dummy1894(p) => p
+ case Dummy1895(p) => p
+ case Dummy1896(p) => p
+ case Dummy1897(p) => p
+ case Dummy1898(p) => p
+ case Dummy1899(p) => p
+ case Dummy1900(p) => p
+ case Dummy1901(p) => p
+ case Dummy1902(p) => p
+ case Dummy1903(p) => p
+ case Dummy1904(p) => p
+ case Dummy1905(p) => p
+ case Dummy1906(p) => p
+ case Dummy1907(p) => p
+ case Dummy1908(p) => p
+ case Dummy1909(p) => p
+ case Dummy1910(p) => p
+ case Dummy1911(p) => p
+ case Dummy1912(p) => p
+ case Dummy1913(p) => p
+ case Dummy1914(p) => p
+ case Dummy1915(p) => p
+ case Dummy1916(p) => p
+ case Dummy1917(p) => p
+ case Dummy1918(p) => p
+ case Dummy1919(p) => p
+ case Dummy1920(p) => p
+ case Dummy1921(p) => p
+ case Dummy1922(p) => p
+ case Dummy1923(p) => p
+ case Dummy1924(p) => p
+ case Dummy1925(p) => p
+ case Dummy1926(p) => p
+ case Dummy1927(p) => p
+ case Dummy1928(p) => p
+ case Dummy1929(p) => p
+ case Dummy1930(p) => p
+ case Dummy1931(p) => p
+ case Dummy1932(p) => p
+ case Dummy1933(p) => p
+ case Dummy1934(p) => p
+ case Dummy1935(p) => p
+ case Dummy1936(p) => p
+ case Dummy1937(p) => p
+ case Dummy1938(p) => p
+ case Dummy1939(p) => p
+ case Dummy1940(p) => p
+ case Dummy1941(p) => p
+ case Dummy1942(p) => p
+ case Dummy1943(p) => p
+ case Dummy1944(p) => p
+ case Dummy1945(p) => p
+ case Dummy1946(p) => p
+ case Dummy1947(p) => p
+ case Dummy1948(p) => p
+ case Dummy1949(p) => p
+ case Dummy1950(p) => p
+ case Dummy1951(p) => p
+ case Dummy1952(p) => p
+ case Dummy1953(p) => p
+ case Dummy1954(p) => p
+ case Dummy1955(p) => p
+ case Dummy1956(p) => p
+ case Dummy1957(p) => p
+ case Dummy1958(p) => p
+ case Dummy1959(p) => p
+ case Dummy1960(p) => p
+ case Dummy1961(p) => p
+ case Dummy1962(p) => p
+ case Dummy1963(p) => p
+ case Dummy1964(p) => p
+ case Dummy1965(p) => p
+ case Dummy1966(p) => p
+ case Dummy1967(p) => p
+ case Dummy1968(p) => p
+ case Dummy1969(p) => p
+ case Dummy1970(p) => p
+ case Dummy1971(p) => p
+ case Dummy1972(p) => p
+ case Dummy1973(p) => p
+ case Dummy1974(p) => p
+ case Dummy1975(p) => p
+ case Dummy1976(p) => p
+ case Dummy1977(p) => p
+ case Dummy1978(p) => p
+ case Dummy1979(p) => p
+ case Dummy1980(p) => p
+ case Dummy1981(p) => p
+ case Dummy1982(p) => p
+ case Dummy1983(p) => p
+ case Dummy1984(p) => p
+ case Dummy1985(p) => p
+ case Dummy1986(p) => p
+ case Dummy1987(p) => p
+ case Dummy1988(p) => p
+ case Dummy1989(p) => p
+ case Dummy1990(p) => p
+ case Dummy1991(p) => p
+ case Dummy1992(p) => p
+ case Dummy1993(p) => p
+ case Dummy1994(p) => p
+ case Dummy1995(p) => p
+ case Dummy1996(p) => p
+ case Dummy1997(p) => p
+ case Dummy1998(p) => p
+ case Dummy1999(p) => p
+}
+}
diff --git a/test/files/run/t6987.check b/test/disabled/run/t6987.check
index 86fc96c679..86fc96c679 100644
--- a/test/files/run/t6987.check
+++ b/test/disabled/run/t6987.check
diff --git a/test/files/run/t6987.scala b/test/disabled/run/t6987.scala
index 37e91d61ae..37e91d61ae 100644
--- a/test/files/run/t6987.scala
+++ b/test/disabled/run/t6987.scala
diff --git a/test/files/disabled/run/t4602.scala b/test/files/disabled/run/t4602.scala
new file mode 100644
index 0000000000..73ba231ccf
--- /dev/null
+++ b/test/files/disabled/run/t4602.scala
@@ -0,0 +1,57 @@
+import java.io.{File, FileOutputStream, BufferedOutputStream, FileWriter, ByteArrayOutputStream, PrintStream}
+import tools.nsc.{CompileClient, CompileServer}
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+
+object Test extends App {
+ val startupLatch = new CountDownLatch(1)
+ // we have to explicitly launch our server because when the client launches a server it uses
+ // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
+ // happens to be in the path gets used
+ val t = new Thread(new Runnable {
+ def run() = {
+ CompileServer.execute(() => startupLatch.countDown(), Array[String]())
+ }
+ })
+ t setDaemon true
+ t.start()
+ if (!startupLatch.await(2, TimeUnit.MINUTES))
+ sys error "Timeout waiting for server to start"
+
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ val outdir = scala.reflect.io.Directory(sys.props("partest.output"))
+
+ val dirNameAndPath = (1 to 2).toList map {number =>
+ val name = s"Hello${number}"
+ val dir = outdir / number.toString
+ (dir, name, dir / s"${name}.scala")
+ }
+
+ dirNameAndPath foreach {case (dir, name, path) =>
+ dir.createDirectory()
+ val file = path.jfile
+ val out = new FileWriter(file)
+ try
+ out.write(s"object ${name}\n")
+ finally
+ out.close
+ }
+
+ val success = (scala.Console withOut ps) {
+ dirNameAndPath foreach {case (path, name, _) =>
+ CompileClient.process(Array("-verbose", "-current-dir", path.toString, s"${name}.scala"))
+ }
+
+ CompileClient.process(Array("-shutdown"))
+ }
+
+ // now make sure we got success and the correct normalized paths
+ val msg = baos.toString()
+
+ assert(success, s"got a failure. Full results were: \n${msg}")
+ dirNameAndPath foreach {case (_, _, path) =>
+ val expected = s"Input files after normalizing paths: ${path}"
+ assert(msg contains expected, s"could not find '${expected}' in output. Full results were: \n${msg}")
+ }
+}
diff --git a/test/files/jvm/bytecode-test-example.check b/test/files/jvm/bytecode-test-example.check
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/jvm/bytecode-test-example/Foo_1.scala b/test/files/jvm/bytecode-test-example/Foo_1.scala
new file mode 100644
index 0000000000..4f679d156f
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example/Foo_1.scala
@@ -0,0 +1,9 @@
+class Foo_1 {
+ def foo(x: AnyRef): Int = {
+ val bool = x == null
+ if (x != null)
+ 1
+ else
+ 0
+ }
+}
diff --git a/test/files/jvm/bytecode-test-example/Test.scala b/test/files/jvm/bytecode-test-example/Test.scala
new file mode 100644
index 0000000000..d668059cb7
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example/Test.scala
@@ -0,0 +1,32 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("Foo_1")
+ val methodNode = getMethod(classNode, "foo")
+ println(countNullChecks(methodNode.instructions))
+ }
+
+ def countNullChecks(insnList: InsnList): Int = {
+ /** Is given instruction a null check?
+ * NOTE
+ * This will detect direct null compparsion as in
+ * if (x == null) ...
+ * and not indirect as in
+ * val foo = null
+ * if (x == foo) ...
+ */
+ def isNullCheck(node: asm.tree.AbstractInsnNode): Boolean = {
+ val opcode = node.getOpcode
+ (opcode == asm.Opcodes.IFNULL) || (opcode == asm.Opcodes.IFNONNULL)
+ }
+ insnList.iterator.asScala.count(isNullCheck)
+ }
+}
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index 8674be168c..0efa83fbd9 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -70,7 +70,19 @@ object FutureTests extends MinimalScalaTest {
//FIXME should check
}
}
-
+
+ "The default ExecutionContext" should {
+ "report uncaught exceptions" in {
+ val p = Promise[Throwable]()
+ val logThrowable: Throwable => Unit = p.trySuccess(_)
+ val ec: ExecutionContext = ExecutionContext.fromExecutor(null, logThrowable)
+
+ val t = new NotImplementedError("foo")
+ val f = Future(throw t)(ec)
+ Await.result(p.future, 2.seconds) mustBe t
+ }
+ }
+
"A future with global ExecutionContext" should {
import ExecutionContext.Implicits._
diff --git a/test/files/jvm/patmat_opt_ignore_underscore.check b/test/files/jvm/patmat_opt_ignore_underscore.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_ignore_underscore.flags b/test/files/jvm/patmat_opt_ignore_underscore.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
new file mode 100644
index 0000000000..fa3639380d
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
@@ -0,0 +1,29 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+// this is not the best test for shielding against regressing on this particular issue,
+// but it sets the stage for checking the bytecode emitted by the pattern matcher and
+// comparing it to manually tuned code using if/then/else etc.
+class SameBytecode {
+ case class Foo(x: Any, y: String)
+
+ def a =
+ Foo(1, "a") match {
+ case Foo(_: String, y) => y
+ }
+
+ // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat)
+ // the test checks that bytecode for a and b is identical (modulo line numbers)
+ // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example)
+ // note that the actual tree is quite bad: we do an unnecessary null check, isInstanceOf and local val (x3)
+ // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat)
+ def b: String = {
+ val x1 = Foo(1, "a")
+ if (x1.ne(null)) {
+ if (x1.x.isInstanceOf[String]) {
+ return x1.y
+ }
+ }
+
+ throw new MatchError(x1)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/test.scala b/test/files/jvm/patmat_opt_ignore_underscore/test.scala
new file mode 100644
index 0000000000..6179101a7e
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore/test.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/patmat_opt_no_nullcheck.check b/test/files/jvm/patmat_opt_no_nullcheck.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_no_nullcheck.flags b/test/files/jvm/patmat_opt_no_nullcheck.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
new file mode 100644
index 0000000000..3a594c401e
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
@@ -0,0 +1,24 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+case class Foo(x: Any)
+
+class SameBytecode {
+ def a =
+ (Foo(1): Any) match {
+ case Foo(_: String) =>
+ }
+
+ // there's no null check
+ def b: Unit = {
+ val x1: Any = Foo(1)
+ if (x1.isInstanceOf[Foo]) {
+ val x3 = x1.asInstanceOf[Foo]
+ if (x3.x.isInstanceOf[String]) {
+ val x = ()
+ return
+ }
+ }
+
+ throw new MatchError(x1)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/test.scala b/test/files/jvm/patmat_opt_no_nullcheck/test.scala
new file mode 100644
index 0000000000..2927e763d5
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck/test.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/patmat_opt_primitive_typetest.check b/test/files/jvm/patmat_opt_primitive_typetest.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_primitive_typetest.flags b/test/files/jvm/patmat_opt_primitive_typetest.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
new file mode 100644
index 0000000000..e5db6c4dd0
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
@@ -0,0 +1,24 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+class SameBytecode {
+ case class Foo(x: Int, y: String)
+
+ def a =
+ Foo(1, "a") match {
+ case Foo(_: Int, y) => y
+ }
+
+ // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat)
+ // the test checks that bytecode for a and b is identical (modulo line numbers)
+ // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example)
+ // note that the actual tree is quite bad: we do an unnecessary null check, and local val (x3)
+ // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat)
+ def b: String = {
+ val x1 = Foo(1, "a")
+ if (x1.ne(null)) {
+ return x1.y
+ }
+
+ throw new MatchError(x1)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/test.scala b/test/files/jvm/patmat_opt_primitive_typetest/test.scala
new file mode 100644
index 0000000000..2927e763d5
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest/test.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/t6941.check b/test/files/jvm/t6941.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/t6941.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/t6941.flags b/test/files/jvm/t6941.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/jvm/t6941.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/jvm/t6941/Analyzed_1.scala b/test/files/jvm/t6941/Analyzed_1.scala
new file mode 100644
index 0000000000..549abd5e64
--- /dev/null
+++ b/test/files/jvm/t6941/Analyzed_1.scala
@@ -0,0 +1,11 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+class SameBytecode {
+ def a(xs: List[Int]) = xs match {
+ case x :: _ => x
+ }
+
+ def b(xs: List[Int]) = xs match {
+ case xs: ::[Int] => xs.hd$1
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/t6941/test.scala b/test/files/jvm/t6941/test.scala
new file mode 100644
index 0000000000..248617f71f
--- /dev/null
+++ b/test/files/jvm/t6941/test.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ similarBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"), equalsModuloVar)
+ }
+}
diff --git a/test/files/jvm/throws-annot-from-java.check b/test/files/jvm/throws-annot-from-java.check
new file mode 100644
index 0000000000..be3ba412f8
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java.check
@@ -0,0 +1,47 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> :paste
+// Entering paste mode (ctrl-D to finish)
+
+{
+ val clazz = rootMirror.getClassByName(newTermName("test.ThrowsDeclaration_2"));
+ {
+ val method = clazz.info.member(newTermName("foo"))
+ val throwsAnn = method.annotations.head
+ val atp = throwsAnn.atp
+ println("foo")
+ println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+ println
+
+ {
+ val method = clazz.info.member(newTermName("bar"))
+ val throwsAnn = method.annotations.head
+ val Literal(const) = throwsAnn.args.head
+ val tp = const.typeValue
+ println("bar")
+ println("tp.typeParams.isEmpty: " + tp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+}
+
+// Exiting paste mode, now interpreting.
+
+foo
+atp.typeParams.isEmpty: true
+throws[IllegalStateException](classOf[java.lang.IllegalStateException])
+
+bar
+tp.typeParams.isEmpty: true
+throws[test.PolymorphicException[_]](classOf[test.PolymorphicException])
+
+scala>
diff --git a/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala b/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala
new file mode 100644
index 0000000000..58fa536f0b
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala
@@ -0,0 +1,3 @@
+package test
+
+class PolymorphicException[T] extends Exception
diff --git a/test/files/jvm/throws-annot-from-java/Test_3.scala b/test/files/jvm/throws-annot-from-java/Test_3.scala
new file mode 100644
index 0000000000..de1d984573
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/Test_3.scala
@@ -0,0 +1,29 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """:power
+:paste
+{
+ val clazz = rootMirror.getClassByName(newTermName("test.ThrowsDeclaration_2"));
+ {
+ val method = clazz.info.member(newTermName("foo"))
+ val throwsAnn = method.annotations.head
+ val atp = throwsAnn.atp
+ println("foo")
+ println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+ println
+
+ {
+ val method = clazz.info.member(newTermName("bar"))
+ val throwsAnn = method.annotations.head
+ val Literal(const) = throwsAnn.args.head
+ val tp = const.typeValue
+ println("bar")
+ println("tp.typeParams.isEmpty: " + tp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+}
+"""
+}
diff --git a/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java b/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java
new file mode 100644
index 0000000000..3708fe626b
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java
@@ -0,0 +1,6 @@
+package test;
+
+public class ThrowsDeclaration_2 {
+ public void foo() throws IllegalStateException {};
+ public void bar() throws PolymorphicException {};
+}
diff --git a/test/files/neg/macro-false-deprecation-warning.check b/test/files/neg/macro-false-deprecation-warning.check
new file mode 100644
index 0000000000..7d56505ec4
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning.check
@@ -0,0 +1,4 @@
+Impls_Macros_1.scala:5: error: illegal start of simple expression
+}
+^
+one error found
diff --git a/test/files/neg/macro-false-deprecation-warning.flags b/test/files/neg/macro-false-deprecation-warning.flags
new file mode 100644
index 0000000000..59af162db6
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning.flags
@@ -0,0 +1 @@
+-language:experimental.macros -deprecation \ No newline at end of file
diff --git a/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
new file mode 100644
index 0000000000..6dc2ea114b
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.Context
+
+object Helper {
+ def unapplySeq[T](x: List[T]): Option[Seq[T]] =
+}
+
+object Macros {
+ def impl[T: c.WeakTypeTag](c: Context)(x: c.Expr[List[T]]) = {
+ c.universe.reify(Helper.unapplySeq(x.splice))
+ }
+
+ object UnapplyMacro {
+ def unapplySeq[T](x: List[T]): Option[Seq[T]] = macro impl[T]
+ }
+}
diff --git a/test/files/neg/t2968.check b/test/files/neg/t2968.check
new file mode 100644
index 0000000000..5d2387f98c
--- /dev/null
+++ b/test/files/neg/t2968.check
@@ -0,0 +1,10 @@
+t2968.scala:8: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+t2968.scala:17: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+t2968.scala:26: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+three errors found
diff --git a/test/files/neg/t2968.scala b/test/files/neg/t2968.scala
new file mode 100644
index 0000000000..41c3a798a5
--- /dev/null
+++ b/test/files/neg/t2968.scala
@@ -0,0 +1,26 @@
+object t1 {
+ case object Const {
+ }
+
+ class Var
+ {
+
+} // missing brace
+
+object t2 {
+ case class Const() {
+ }
+
+ class Var
+ {
+
+} // missing brace
+
+object t3 {
+ final case class Const() {
+ }
+
+ class Var
+ {
+
+} // missing brace
diff --git a/test/files/neg/t2968b.check b/test/files/neg/t2968b.check
new file mode 100644
index 0000000000..36d25a2d12
--- /dev/null
+++ b/test/files/neg/t2968b.check
@@ -0,0 +1,4 @@
+t2968b.scala:7: error: '}' expected but eof found.
+// missing brace
+ ^
+one error found
diff --git a/test/files/neg/t2968b.scala b/test/files/neg/t2968b.scala
new file mode 100644
index 0000000000..422b618aba
--- /dev/null
+++ b/test/files/neg/t2968b.scala
@@ -0,0 +1,7 @@
+case class Const()
+{
+}
+
+class Var
+{
+// missing brace
diff --git a/test/files/neg/t5353.check b/test/files/neg/t5353.check
new file mode 100644
index 0000000000..75e2435600
--- /dev/null
+++ b/test/files/neg/t5353.check
@@ -0,0 +1,4 @@
+t5353.scala:2: error: this type parameter must be specified
+ def f(x: Boolean) = if (x) Array("abc") else Array()
+ ^
+one error found
diff --git a/test/files/neg/t5353.scala b/test/files/neg/t5353.scala
new file mode 100644
index 0000000000..1ee869aac1
--- /dev/null
+++ b/test/files/neg/t5353.scala
@@ -0,0 +1,3 @@
+class A {
+ def f(x: Boolean) = if (x) Array("abc") else Array()
+}
diff --git a/test/files/neg/t5378.check b/test/files/neg/t5378.check
new file mode 100644
index 0000000000..c1460083f6
--- /dev/null
+++ b/test/files/neg/t5378.check
@@ -0,0 +1,31 @@
+t5378.scala:7: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains = new { def apply[T1 <: T](value: T1) = ??? }
+ ^
+t5378.scala:8: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains1 = new { def apply[T1 <: A1](value: T1) = ??? }
+ ^
+t5378.scala:9: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains2 = new { def apply[T1 <: A2](value: T1) = ??? }
+ ^
+t5378.scala:15: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: T](value: T1) = ??? }
+ ^
+t5378.scala:16: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: B1](value: T1) = ??? }
+ ^
+t5378.scala:17: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: B2](value: T1) = ??? }
+ ^
+t5378.scala:21: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply1[T1 <: B3](value: T1) = ???
+ ^
+t5378.scala:23: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply3(value: B3) = ???
+ ^
+t5378.scala:28: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply1(s: String)(x: Int)(value: T) = ???
+ ^
+t5378.scala:29: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply2[T1 <: T](s: String)(x: Int)(value: T1) = ???
+ ^
+10 errors found
diff --git a/test/files/neg/t5378.scala b/test/files/neg/t5378.scala
new file mode 100644
index 0000000000..fa6afa02be
--- /dev/null
+++ b/test/files/neg/t5378.scala
@@ -0,0 +1,54 @@
+import scala.language.reflectiveCalls
+
+class Coll[+T] {
+ type A1 <: T
+ type A2 <: A1
+
+ def contains = new { def apply[T1 <: T](value: T1) = ??? }
+ def contains1 = new { def apply[T1 <: A1](value: T1) = ??? }
+ def contains2 = new { def apply[T1 <: A2](value: T1) = ??? }
+ def contains3 = {
+ trait Bippy {
+ type B1 <: T
+ type B2 <: B1
+ }
+ new Bippy { def apply[T1 <: T](value: T1) = ??? }
+ new Bippy { def apply[T1 <: B1](value: T1) = ??? }
+ new Bippy { def apply[T1 <: B2](value: T1) = ??? }
+ new Bippy {
+ type B3 = B2
+ type B4 = List[B2]
+ def apply1[T1 <: B3](value: T1) = ???
+ def apply2[T1 <: B4](value: T1) = ???
+ def apply3(value: B3) = ???
+ def apply4(value: B4) = value.head
+ }
+ }
+ def contains4 = new {
+ def apply1(s: String)(x: Int)(value: T) = ???
+ def apply2[T1 <: T](s: String)(x: Int)(value: T1) = ???
+ }
+ def containsOk = {
+ trait Bippy {
+ type B1 <: AnyRef
+ type B2 <: B1
+ }
+ new Bippy { def apply[T1 <: AnyRef](value: T1) = ??? }
+ new Bippy { type B1 = String ; def apply[T1 <: B1](value: T1) = ??? }
+ new Bippy { type B2 = String ; def apply[T1 <: B2](value: T1) = ??? }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val xs = new Coll[List[String]]
+ val ys: Coll[Traversable[String]] = xs
+
+ println(ys contains Nil)
+ // java.lang.NoSuchMethodException: Coll$$anon$1.apply(scala.collection.Traversable)
+ // at java.lang.Class.getMethod(Class.java:1605)
+ // at Test$.reflMethod$Method1(a.scala:14)
+ // at Test$.main(a.scala:14)
+ // at Test.main(a.scala)
+ }
+}
diff --git a/test/files/neg/t5543.check b/test/files/neg/t5543.check
new file mode 100644
index 0000000000..b61de0f78b
--- /dev/null
+++ b/test/files/neg/t5543.check
@@ -0,0 +1,10 @@
+t5543.scala:3: error: not found: type T
+ def this(x: T) { this() }
+ ^
+t5543.scala:11: error: not found: value x
+ def this(a: Int, b: Int = x) {
+ ^
+t5543.scala:18: error: not found: value x
+ def this(a: Int = x) { this() }
+ ^
+three errors found
diff --git a/test/files/neg/t5543.scala b/test/files/neg/t5543.scala
new file mode 100644
index 0000000000..4e03e6e114
--- /dev/null
+++ b/test/files/neg/t5543.scala
@@ -0,0 +1,19 @@
+class C1 {
+ type T
+ def this(x: T) { this() }
+}
+
+class C1a[T] {
+ def this(x: T) { this() } // works, no error here
+}
+
+class C2(x: Int) {
+ def this(a: Int, b: Int = x) {
+ this(b)
+ }
+}
+
+class C3 {
+ val x = 0
+ def this(a: Int = x) { this() }
+}
diff --git a/test/files/neg/t5692a.check b/test/files/neg/t5692a.check
index ded95a8820..7fbfb5dba7 100644
--- a/test/files/neg/t5692a.check
+++ b/test/files/neg/t5692a.check
@@ -1,4 +1,4 @@
-Test_2.scala:2: error: type parameter not specified
+Test_2.scala:2: error: this type parameter must be specified
def x = Macros.foo
^
one error found
diff --git a/test/files/neg/t5692b.check b/test/files/neg/t5692b.check
index e453870ec8..16796826b4 100644
--- a/test/files/neg/t5692b.check
+++ b/test/files/neg/t5692b.check
@@ -1,4 +1,4 @@
-Test_2.scala:2: error: type parameters not specified
+Test_2.scala:2: error: these type parameters must be specified
def x = Macros.foo
^
one error found
diff --git a/test/files/neg/t6426.check b/test/files/neg/t6426.check
new file mode 100644
index 0000000000..149f74c4de
--- /dev/null
+++ b/test/files/neg/t6426.check
@@ -0,0 +1,7 @@
+t6426.scala:4: error: wildcard invalid as backquoted identifier
+ println(`_`.Buffer(0))
+ ^
+t6426.scala:5: error: ')' expected but '}' found.
+}
+^
+two errors found
diff --git a/test/files/neg/t6426.scala b/test/files/neg/t6426.scala
new file mode 100644
index 0000000000..a27d18eb58
--- /dev/null
+++ b/test/files/neg/t6426.scala
@@ -0,0 +1,5 @@
+class A {
+ import collection.{mutable => _, _}
+
+ println(`_`.Buffer(0))
+}
diff --git a/test/files/neg/t6539/Macro_1.scala b/test/files/neg/t6539/Macro_1.scala
index ed52776d95..4f7d289e2e 100644
--- a/test/files/neg/t6539/Macro_1.scala
+++ b/test/files/neg/t6539/Macro_1.scala
@@ -5,6 +5,6 @@ object M {
def m(a: Any, b: Any): Any = macro mImpl
def mImpl(c: Context)(a: c.Expr[Any], b: c.Expr[Any]) = a
- @reflect.macros.compileTimeOnly("cto may only be used as an argument to " + "m")
+ @reflect.internal.annotations.compileTimeOnly("cto may only be used as an argument to " + "m")
def cto = 0
}
diff --git a/test/files/neg/t6539/Test_2.scala b/test/files/neg/t6539/Test_2.scala
index 5a602879ec..26f4504222 100644
--- a/test/files/neg/t6539/Test_2.scala
+++ b/test/files/neg/t6539/Test_2.scala
@@ -3,4 +3,10 @@ object Test {
M.m(M.cto, ()) // error
M.m((), M.cto) // okay
M.cto // error
+
+ locally {
+ val expr = scala.reflect.runtime.universe.reify(2)
+ val splice = expr.splice
+ val value = expr.value
+ }
}
diff --git a/test/files/neg/t6601.check b/test/files/neg/t6601.check
deleted file mode 100644
index 1410e1b11a..0000000000
--- a/test/files/neg/t6601.check
+++ /dev/null
@@ -1,4 +0,0 @@
-AccessPrivateConstructor_2.scala:2: error: constructor PrivateConstructor in class PrivateConstructor cannot be accessed in class AccessPrivateConstructor
- new PrivateConstructor("") // Scalac should forbid accessing to the private constructor!
- ^
-one error found
diff --git a/test/files/neg/t6601/AccessPrivateConstructor_2.scala b/test/files/neg/t6601/AccessPrivateConstructor_2.scala
deleted file mode 100644
index 816bc10d79..0000000000
--- a/test/files/neg/t6601/AccessPrivateConstructor_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class AccessPrivateConstructor {
- new PrivateConstructor("") // Scalac should forbid accessing to the private constructor!
-}
diff --git a/test/files/neg/t6601/PrivateConstructor_1.scala b/test/files/neg/t6601/PrivateConstructor_1.scala
deleted file mode 100644
index f09d7ad068..0000000000
--- a/test/files/neg/t6601/PrivateConstructor_1.scala
+++ /dev/null
@@ -1 +0,0 @@
-class PrivateConstructor private(val s: String) extends AnyVal
diff --git a/test/files/neg/t6728.check b/test/files/neg/t6728.check
new file mode 100644
index 0000000000..d853d6f724
--- /dev/null
+++ b/test/files/neg/t6728.check
@@ -0,0 +1,4 @@
+t6728.scala:4: error: '(' expected but '}' found.
+ }
+ ^
+one error found
diff --git a/test/files/neg/t6728.scala b/test/files/neg/t6728.scala
new file mode 100644
index 0000000000..ba0b1a0fdf
--- /dev/null
+++ b/test/files/neg/t6728.scala
@@ -0,0 +1,5 @@
+object X {
+ while(true) {
+ for
+ }
+}
diff --git a/test/files/neg/t6829.check b/test/files/neg/t6829.check
index 8ee6d182eb..7c3c66e0f2 100644
--- a/test/files/neg/t6829.check
+++ b/test/files/neg/t6829.check
@@ -1,6 +1,6 @@
t6829.scala:35: error: type mismatch;
found : AgentSimulation.this.state.type (with underlying type G#State)
- required: _10.State
+ required: _9.State
lazy val actions: Map[G#Agent,G#Action] = agents.map(a => a -> a.chooseAction(state)).toMap
^
t6829.scala:45: error: trait AgentSimulation takes type parameters
@@ -17,12 +17,12 @@ t6829.scala:49: error: not found: value nextState
^
t6829.scala:50: error: type mismatch;
found : s.type (with underlying type Any)
- required: _54.State where val _54: G
+ required: _53.State where val _53: G
val r = rewards(agent).r(s,a,s2)
^
t6829.scala:51: error: type mismatch;
found : s.type (with underlying type Any)
- required: _51.State
+ required: _50.State
agent.learn(s,a,s2,r): G#Agent
^
t6829.scala:53: error: not found: value nextState
diff --git a/test/files/neg/t6963.check b/test/files/neg/t6963.check
deleted file mode 100644
index 41cb796b0b..0000000000
--- a/test/files/neg/t6963.check
+++ /dev/null
@@ -1,2 +0,0 @@
-error: -Xmigration is deprecated: This setting is no longer useful and will be removed. Please remove it from your build.
-one error found
diff --git a/test/files/neg/t6963.flags b/test/files/neg/t6963.flags
deleted file mode 100644
index 0b6d71496a..0000000000
--- a/test/files/neg/t6963.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xmigration -deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6963.scala b/test/files/neg/t6963.scala
deleted file mode 100644
index 4da52764f5..0000000000
--- a/test/files/neg/t6963.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-
-object test {
-}
diff --git a/test/files/neg/t6963a.check b/test/files/neg/t6963a.check
new file mode 100644
index 0000000000..159896fd10
--- /dev/null
+++ b/test/files/neg/t6963a.check
@@ -0,0 +1,5 @@
+t6963a.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
+ List(1,2,3,4,5).scanRight(0)(_+_)
+ ^
+one error found
diff --git a/test/files/neg/t6963a.flags b/test/files/neg/t6963a.flags
new file mode 100644
index 0000000000..4c61ed9430
--- /dev/null
+++ b/test/files/neg/t6963a.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xmigration:2.7
diff --git a/test/files/neg/t6963a.scala b/test/files/neg/t6963a.scala
new file mode 100644
index 0000000000..b3366b2557
--- /dev/null
+++ b/test/files/neg/t6963a.scala
@@ -0,0 +1,5 @@
+object Test {
+ import scala.collection.mutable._
+
+ List(1,2,3,4,5).scanRight(0)(_+_)
+}
diff --git a/test/files/neg/t6963b.check b/test/files/neg/t6963b.check
new file mode 100644
index 0000000000..7e205a41d0
--- /dev/null
+++ b/test/files/neg/t6963b.check
@@ -0,0 +1,13 @@
+t6963b.scala:2: error: An Array will no longer match as Seq[_].
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ ^
+t6963b.scala:4: error: An Array will no longer match as Seq[_].
+ case _: Seq[_] => true
+ ^
+t6963b.scala:16: error: An Array will no longer match as Seq[_].
+ case (Some(_: Seq[_]), Nil, _) => 1
+ ^
+t6963b.scala:17: error: An Array will no longer match as Seq[_].
+ case (None, List(_: List[_], _), _) => 2
+ ^
+four errors found
diff --git a/test/files/neg/t6963b.flags b/test/files/neg/t6963b.flags
new file mode 100644
index 0000000000..83caa2b147
--- /dev/null
+++ b/test/files/neg/t6963b.flags
@@ -0,0 +1 @@
+-Xmigration:2.7 -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6963b.scala b/test/files/neg/t6963b.scala
new file mode 100644
index 0000000000..3cfa8f0dca
--- /dev/null
+++ b/test/files/neg/t6963b.scala
@@ -0,0 +1,20 @@
+object Test {
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ def f2(x: Any) = x match {
+ case _: Seq[_] => true
+ case _ => false
+ }
+
+ def f3(x: Any) = x match {
+ case _: Array[_] => true
+ case _ => false
+ }
+
+ def f4(x: Any) = x.isInstanceOf[Traversable[_]]
+
+ def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
+ case (Some(_: Seq[_]), Nil, _) => 1
+ case (None, List(_: List[_], _), _) => 2
+ case _ => 3
+ }
+}
diff --git a/test/files/pos/lubs.scala b/test/files/pos/lubs.scala
new file mode 100644
index 0000000000..d7651f86b0
--- /dev/null
+++ b/test/files/pos/lubs.scala
@@ -0,0 +1,3 @@
+object Test {
+ List(new { def f = 1; def g = 1}, new { def f = 2}).map(_.f)
+}
diff --git a/test/files/pos/presuperContext.scala b/test/files/pos/presuperContext.scala
new file mode 100644
index 0000000000..cc34263073
--- /dev/null
+++ b/test/files/pos/presuperContext.scala
@@ -0,0 +1,13 @@
+class A {
+ class C extends { val x: A = this } with AnyRef
+}
+
+class B(x: Int)
+
+class D {
+ class C(x: Int) extends B({val test: D = this; x}) {
+ def this() {
+ this({val test: D = this; 1})
+ }
+ }
+}
diff --git a/test/files/pos/t1014.scala b/test/files/pos/t1014.scala
index 1ac87b225b..3fc10d10dc 100644
--- a/test/files/pos/t1014.scala
+++ b/test/files/pos/t1014.scala
@@ -1,6 +1,8 @@
import scala.xml.{NodeSeq, Elem}
-class EO extends App with Moo{
+class EO extends App with Moo {
+ // return type is Flog, inherited from overridden method.
+ // implicit conversions are applied because expected type `pt` is `Flog` when `computeType(rhs, pt)`.
def cat = <cat>dog</cat>
implicit def nodeSeqToFlog(in: Elem): Flog = new Flog(in)
diff --git a/test/files/pos/t1803.flags b/test/files/pos/t1803.flags
new file mode 100644
index 0000000000..d1a8244169
--- /dev/null
+++ b/test/files/pos/t1803.flags
@@ -0,0 +1 @@
+-Yinfer-argument-types \ No newline at end of file
diff --git a/test/files/pos/t1803.scala b/test/files/pos/t1803.scala
new file mode 100644
index 0000000000..42f4e784a3
--- /dev/null
+++ b/test/files/pos/t1803.scala
@@ -0,0 +1,2 @@
+class A { def foo[A](a: A) = a }
+class B extends A { override def foo[A](b) = b }
diff --git a/test/files/pos/t3577.scala b/test/files/pos/t3577.scala
new file mode 100644
index 0000000000..80a280f67a
--- /dev/null
+++ b/test/files/pos/t3577.scala
@@ -0,0 +1,29 @@
+case class Check[A](val value: A)
+
+case class C2(checks: Check[_]*);
+
+object C {
+ def m(x : C2): Any = (null: Any) match {
+ case C2(_, rest @ _*) => {
+ rest.map(_.value)
+ }
+ }
+}
+
+///////////////////
+
+object Container {
+ trait Exp[+T]
+ abstract class FuncExp[-S, +T]
+
+ sealed abstract class FoundNode[T, Repr] {
+ def optimize[TupleT, U, That](parentNode: FlatMap[T, Repr, U, That]): Any
+ def optimize2[TupleT, U, That](parentNode: Any): Any
+ }
+
+ class FlatMap[T, Repr, U, That]
+
+ val Seq(fn: FoundNode[t, repr]) = Seq[FoundNode[_, _]]()
+ fn.optimize(null) // was: scala.MatchError: ? (of class BoundedWildcardType) @ Variances#varianceInType
+ fn.optimize2(null) // was: fatal error: bad type: ?(class scala.reflect.internal.Types$BoundedWildcardType) @ Pickle.putType
+}
diff --git a/test/files/pos/t5130.scala b/test/files/pos/t5130.scala
new file mode 100644
index 0000000000..676d3c7050
--- /dev/null
+++ b/test/files/pos/t5130.scala
@@ -0,0 +1,46 @@
+import scala.language.reflectiveCalls
+
+class A {
+ this_a =>
+
+ def b = new B
+ class B { def a: this_a.type = this_a }
+}
+trait A2 { def c = () }
+
+object Test {
+ val v1 = new A { def c = () }
+ val v2 = new A with A2 { }
+ val v3: A { def c: Unit } = null
+ def d1 = new A { def c = () }
+ def d2 = new A with A2 { }
+ def d3: A { def c: Unit } = null
+ var x1 = new A { def c = () }
+ var x2 = new A with A2 { }
+ var x3: A { def c: Unit } = null
+
+ def main(args: Array[String]): Unit = {
+ val mv1 = new A { def c = () }
+ val mv2 = new A with A2 { }
+ val mv3: A { def c: Unit } = null
+ def md1 = new A { def c = () }
+ def md2 = new A with A2 { }
+ def md3: A { def c: Unit } = null
+
+ v1.b.a.c
+ v2.b.a.c
+ v3.b.a.c
+ d1.b.a.c
+ d2.b.a.c
+ d3.b.a.c
+ x1.b.a.c
+ x2.b.a.c
+ x3.b.a.c
+ mv1.b.a.c
+ mv2.b.a.c
+ mv3.b.a.c
+ md1.b.a.c
+ md2.b.a.c
+ md3.b.a.c
+ }
+}
diff --git a/test/files/pos/t5604b/T_1.scala b/test/files/pos/t5604b/T_1.scala
new file mode 100644
index 0000000000..179dcb10c6
--- /dev/null
+++ b/test/files/pos/t5604b/T_1.scala
@@ -0,0 +1,6 @@
+// sandbox/t5604/T.scala
+package t6504
+
+trait T {
+ def foo: Boolean = false
+}
diff --git a/test/files/pos/t5604b/T_2.scala b/test/files/pos/t5604b/T_2.scala
new file mode 100644
index 0000000000..179dcb10c6
--- /dev/null
+++ b/test/files/pos/t5604b/T_2.scala
@@ -0,0 +1,6 @@
+// sandbox/t5604/T.scala
+package t6504
+
+trait T {
+ def foo: Boolean = false
+}
diff --git a/test/files/pos/t5604b/Test_1.scala b/test/files/pos/t5604b/Test_1.scala
new file mode 100644
index 0000000000..f7c58ebe83
--- /dev/null
+++ b/test/files/pos/t5604b/Test_1.scala
@@ -0,0 +1,7 @@
+// sandbox/t5604/Test.scala
+package t6504
+
+object Test {
+ def blerg1(a: Any): Any = if (foo) blerg1(0)
+ def blerg2(a: Any): Any = if (t6504.foo) blerg2(0)
+}
diff --git a/test/files/pos/t5604b/Test_2.scala b/test/files/pos/t5604b/Test_2.scala
new file mode 100644
index 0000000000..f7c58ebe83
--- /dev/null
+++ b/test/files/pos/t5604b/Test_2.scala
@@ -0,0 +1,7 @@
+// sandbox/t5604/Test.scala
+package t6504
+
+object Test {
+ def blerg1(a: Any): Any = if (foo) blerg1(0)
+ def blerg2(a: Any): Any = if (t6504.foo) blerg2(0)
+}
diff --git a/test/files/pos/t5604b/pack_1.scala b/test/files/pos/t5604b/pack_1.scala
new file mode 100644
index 0000000000..f50d568bfa
--- /dev/null
+++ b/test/files/pos/t5604b/pack_1.scala
@@ -0,0 +1,5 @@
+// sandbox/t5604/pack.scala
+package t6504
+
+object `package` extends T {
+}
diff --git a/test/files/pos/t5859.scala b/test/files/pos/t5859.scala
new file mode 100644
index 0000000000..2a31e68ee5
--- /dev/null
+++ b/test/files/pos/t5859.scala
@@ -0,0 +1,15 @@
+
+class A {
+ def f(xs: List[Int], ys: AnyRef*) = ()
+ def f(xs: AnyRef*) = ()
+
+ f()
+ f(List[AnyRef](): _*)
+ f(List(): _*)
+ f(Nil: _*)
+ f(Array(): _*)
+ f(Array[AnyRef](): _*)
+ f(List(1))
+ f(List(1), Nil: _*)
+ f(List(1), Array(): _*)
+}
diff --git a/test/files/pos/t6072.scala b/test/files/pos/t6072.scala
new file mode 100644
index 0000000000..e25ebbffc5
--- /dev/null
+++ b/test/files/pos/t6072.scala
@@ -0,0 +1,3 @@
+class A {
+ object B { def eq(lvl: Int) = ??? }
+}
diff --git a/test/files/pos/t6482.scala b/test/files/pos/t6482.scala
new file mode 100644
index 0000000000..24ea38e519
--- /dev/null
+++ b/test/files/pos/t6482.scala
@@ -0,0 +1,11 @@
+final class TraversableOnceOps[+A](val collection: TraversableOnce[A]) extends AnyVal {
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] =
+ if (collection.isEmpty) None else Some(collection.reduceLeft[B](op))
+}
+// error: type arguments [B] do not conform to method reduceLeft's type parameter bounds [B >: A]
+// if (collection.isEmpty) None else Some(collection.reduceLeft[B](op))
+// ^
+
+class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ def baz[B >: A](x: B): List[B] = x :: xs
+}
diff --git a/test/files/pos/t6516.scala b/test/files/pos/t6516.scala
new file mode 100644
index 0000000000..c004055de2
--- /dev/null
+++ b/test/files/pos/t6516.scala
@@ -0,0 +1,19 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+import scala.collection.TraversableLike
+
+// This one compiles
+object Test {
+ type Alias[T, CC[_]] = Context { type PrefixType = TraversableLike[T, CC[T]] }
+ def f() = macro f_impl
+ def f_impl(c: Alias[Int, List])() = ???
+}
+
+// This one doesn't
+object Test2 {
+ type Ctx = scala.reflect.macros.Context
+ type Alias[T, CC[_]] = Ctx { type PrefixType = TraversableLike[T, CC[T]] }
+
+ def f() = macro f_impl
+ def f_impl(c: Alias[Int, List])() = ???
+}
diff --git a/test/files/pos/t6595.flags b/test/files/pos/t6595.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/t6595.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/t6595.scala b/test/files/pos/t6595.scala
new file mode 100644
index 0000000000..437c0bcf05
--- /dev/null
+++ b/test/files/pos/t6595.scala
@@ -0,0 +1,18 @@
+import scala.annotation.switch
+
+class Foo extends {
+ final val b0 = 5
+} with AnyRef {
+ final val b1 = 10
+
+ // Using the @switch annotation as a means of testing that the
+ // type inferred for b0 is Int(5) and not Int. Only in the former
+ // case can a switch be generated.
+ def f(p: Int) = (p: @switch) match {
+ case `b0` => 1
+ case `b1` => 2
+ case 15 => 3
+ case 20 => 4
+ case _ => 5
+ }
+}
diff --git a/test/files/pos/t6601/PrivateValueClass_1.scala b/test/files/pos/t6601/PrivateValueClass_1.scala
new file mode 100644
index 0000000000..85c3687137
--- /dev/null
+++ b/test/files/pos/t6601/PrivateValueClass_1.scala
@@ -0,0 +1 @@
+class V private (val a: Any) extends AnyVal \ No newline at end of file
diff --git a/test/files/pos/t6601/UsePrivateValueClass_2.scala b/test/files/pos/t6601/UsePrivateValueClass_2.scala
new file mode 100644
index 0000000000..461b8397b2
--- /dev/null
+++ b/test/files/pos/t6601/UsePrivateValueClass_2.scala
@@ -0,0 +1,10 @@
+object Test {
+ // After the first attempt to make seprately compiled value
+ // classes respect the privacy of constructors, we got:
+ //
+ // exception when typing v.a().==(v.a())/class scala.reflect.internal.Trees$Apply
+ // constructor V in class V cannot be accessed in object Test in file test/files/pos/t6601/UsePrivateValueClass_2.scala
+ // scala.reflect.internal.Types$TypeError: constructor V in class V cannot be accessed in object Test
+ def foo(v: V) = v.a == v.a
+ def bar(v: V) = v == v
+}
diff --git a/test/files/pos/t6651.scala b/test/files/pos/t6651.scala
new file mode 100644
index 0000000000..55a3b74e4c
--- /dev/null
+++ b/test/files/pos/t6651.scala
@@ -0,0 +1,33 @@
+class YouAreYourself[A <: AnyRef](val you: A) extends AnyVal {
+ def yourself: you.type = you
+}
+
+object Test {
+ val s = ""
+ val s1: s.type = new YouAreYourself[s.type](s).yourself
+}
+
+trait Path {
+ type Dep <: AnyRef
+}
+
+final class ValueClass[P <: Path](val path: P) extends AnyVal {
+ import path.Dep
+
+ def apply(dep: Dep)(d2: dep.type, foo: Int): (Dep, d2.type) = (d2, d2)
+
+ // This generates dodgy code; note `ValueClass.this`:
+ //
+ // final def bounds$extension[D >: Nothing <: ValueClass.this.path.Dep,
+ // P >: Nothing <: Path]
+ // ($this: ValueClass[P])
+ // (dep: D)
+ // (d2: dep.type, foo: Int): (D, d2.type) = scala.Tuple2.apply[D, d2.type](d2, d2);
+ //
+ // Nothing crashes down the line, but it certainly doesn't conform to best-practices.
+ //
+ // An better alternative would be to add a type parameter for the (singleton) type of
+ // the wrapped value.
+ def bounds[D <: Dep](dep: D)(d2: dep.type, foo: Int): (D, d2.type) = (d2, d2)
+}
+
diff --git a/test/files/pos/t6891.flags b/test/files/pos/t6891.flags
new file mode 100644
index 0000000000..fe048006aa
--- /dev/null
+++ b/test/files/pos/t6891.flags
@@ -0,0 +1 @@
+-Ycheck:extmethods -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6891.scala b/test/files/pos/t6891.scala
new file mode 100644
index 0000000000..bed2d0d777
--- /dev/null
+++ b/test/files/pos/t6891.scala
@@ -0,0 +1,26 @@
+object O {
+ implicit class Foo[A](val value: String) extends AnyVal {
+ def bippy() = {
+ @annotation.tailrec def loop(x: A): Unit = loop(x)
+ ()
+ }
+
+ def boppy() = {
+ @annotation.tailrec def loop(x: value.type): Unit = loop(x)
+ ()
+ }
+
+ def beppy[C](c: => C) = {
+ () => c
+ @annotation.tailrec def loop(x: value.type): Unit = loop(x)
+ () => c
+ ()
+ }
+ }
+ // uncaught exception during compilation: Types$TypeError("type mismatch;
+ // found : A(in method bippy$extension)
+ // required: A(in class Foo)") @ scala.tools.nsc.typechecker.Contexts$Context.issueCommon(Contexts.scala:396)
+ // error: scala.reflect.internal.Types$TypeError: type mismatch;
+ // found : A(in method bippy$extension)
+ // required: A(in class Foo)
+}
diff --git a/test/files/pos/t6963c.flags b/test/files/pos/t6963c.flags
new file mode 100644
index 0000000000..4d6e04914f
--- /dev/null
+++ b/test/files/pos/t6963c.flags
@@ -0,0 +1 @@
+-Xmigration:2.9 -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6963c.scala b/test/files/pos/t6963c.scala
new file mode 100644
index 0000000000..0b6b5c757f
--- /dev/null
+++ b/test/files/pos/t6963c.scala
@@ -0,0 +1,25 @@
+object Test {
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ def f2(x: Any) = x match {
+ case _: Seq[_] => true
+ case _ => false
+ }
+
+ def f3(x: Any) = x match {
+ case _: Array[_] => true
+ case _ => false
+ }
+
+ def f4(x: Any) = x.isInstanceOf[Traversable[_]]
+
+ def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
+ case (Some(_: Seq[_]), Nil, _) => 1
+ case (None, List(_: List[_], _), _) => 2
+ case _ => 3
+ }
+
+ def f5: Unit = {
+ import scala.collection.mutable._
+ List(1,2,3,4,5).scanRight(0)(_+_)
+ }
+}
diff --git a/test/files/pos/t7022.scala b/test/files/pos/t7022.scala
new file mode 100644
index 0000000000..0609e2d250
--- /dev/null
+++ b/test/files/pos/t7022.scala
@@ -0,0 +1,9 @@
+class Catch[+T] {
+ def either[U >: T](body: => U): Either[Throwable, U] = ???
+}
+
+object Test {
+ implicit class RichCatch[T](val c: Catch[T]) extends AnyVal {
+ def validation[U >: T](u: => U): Either[Throwable, U] = c.either(u)
+ }
+}
diff --git a/test/files/pos/t7035.scala b/test/files/pos/t7035.scala
new file mode 100644
index 0000000000..f45bd0a878
--- /dev/null
+++ b/test/files/pos/t7035.scala
@@ -0,0 +1,15 @@
+case class Y(final var x: Int, final private var y: String, final val z1: Boolean, final private val z2: Any) {
+
+ import Test.{y => someY}
+ List(someY.x: Int, someY.y: String, someY.z1: Boolean, someY.z2: Any)
+ someY.y = ""
+}
+
+object Test {
+ val y = Y(0, "", true, new {})
+ val unapp: Option[(Int, String, Boolean, Any)] = // was (Int, Boolean, String, Any) !!
+ Y.unapply(y)
+
+ val Y(a, b, c, d) = y
+ List(a: Int, b: String, c: Boolean, d: Any)
+}
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
new file mode 100644
index 0000000000..8856fef5b3
--- /dev/null
+++ b/test/files/run/analyzerPlugins.check
@@ -0,0 +1,197 @@
+adaptBoundsToAnnots(List( <: Int), List(type T), List(Int @testAnn)) [2]
+annotationsConform(Boolean @testAnn, Boolean) [1]
+annotationsConform(Boolean(false), Boolean @testAnn) [1]
+annotationsConform(Int @testAnn, ?A) [1]
+annotationsConform(Int @testAnn, Any) [1]
+annotationsConform(Int @testAnn, Int) [2]
+annotationsConform(Int(1) @testAnn, Int) [1]
+annotationsConform(Int(1), Int @testAnn) [1]
+annotationsConform(Nothing, Int @testAnn) [2]
+annotationsConform(String @testAnn, String) [1]
+canAdaptAnnotations(Trees$Ident, String) [1]
+canAdaptAnnotations(Trees$Select, ?) [1]
+canAdaptAnnotations(Trees$Select, Boolean @testAnn) [1]
+canAdaptAnnotations(Trees$Select, Boolean) [1]
+canAdaptAnnotations(Trees$Select, String @testAnn) [1]
+canAdaptAnnotations(Trees$TypeTree, ?) [10]
+canAdaptAnnotations(Trees$Typed, ?) [3]
+canAdaptAnnotations(Trees$Typed, Any) [1]
+canAdaptAnnotations(Trees$Typed, Int) [1]
+lub(List(Int @testAnn, Int)) [1]
+pluginsPt(?, Trees$Annotated) [7]
+pluginsPt(?, Trees$Apply) [8]
+pluginsPt(?, Trees$ApplyImplicitView) [2]
+pluginsPt(?, Trees$Assign) [7]
+pluginsPt(?, Trees$Block) [4]
+pluginsPt(?, Trees$ClassDef) [2]
+pluginsPt(?, Trees$DefDef) [14]
+pluginsPt(?, Trees$Ident) [51]
+pluginsPt(?, Trees$If) [2]
+pluginsPt(?, Trees$Literal) [16]
+pluginsPt(?, Trees$New) [5]
+pluginsPt(?, Trees$PackageDef) [1]
+pluginsPt(?, Trees$Return) [1]
+pluginsPt(?, Trees$Select) [51]
+pluginsPt(?, Trees$Super) [2]
+pluginsPt(?, Trees$This) [20]
+pluginsPt(?, Trees$TypeApply) [3]
+pluginsPt(?, Trees$TypeBoundsTree) [2]
+pluginsPt(?, Trees$TypeDef) [1]
+pluginsPt(?, Trees$TypeTree) [38]
+pluginsPt(?, Trees$Typed) [1]
+pluginsPt(?, Trees$ValDef) [21]
+pluginsPt(Any, Trees$Literal) [2]
+pluginsPt(Any, Trees$Typed) [1]
+pluginsPt(Array[Any], Trees$ArrayValue) [1]
+pluginsPt(Boolean @testAnn, Trees$Literal) [1]
+pluginsPt(Boolean @testAnn, Trees$Select) [1]
+pluginsPt(Boolean, Trees$Apply) [1]
+pluginsPt(Boolean, Trees$Ident) [1]
+pluginsPt(Boolean, Trees$Literal) [1]
+pluginsPt(Double, Trees$Select) [1]
+pluginsPt(Int @testAnn, Trees$Literal) [1]
+pluginsPt(Int, Trees$Apply) [1]
+pluginsPt(Int, Trees$Ident) [2]
+pluginsPt(Int, Trees$If) [1]
+pluginsPt(Int, Trees$Literal) [5]
+pluginsPt(Int, Trees$Select) [3]
+pluginsPt(List, Trees$Apply) [1]
+pluginsPt(List[Any], Trees$Select) [1]
+pluginsPt(String @testAnn, Trees$Select) [1]
+pluginsPt(String, Trees$Apply) [1]
+pluginsPt(String, Trees$Block) [2]
+pluginsPt(String, Trees$Ident) [4]
+pluginsPt(String, Trees$Literal) [1]
+pluginsPt(String, Trees$Select) [1]
+pluginsPt(String, Trees$Typed) [1]
+pluginsPt(Unit, Trees$Assign) [1]
+pluginsPt(scala.annotation.Annotation, Trees$Apply) [5]
+pluginsTypeSig(<none>, Trees$Template) [2]
+pluginsTypeSig(class A, Trees$ClassDef) [1]
+pluginsTypeSig(class testAnn, Trees$ClassDef) [1]
+pluginsTypeSig(constructor A, Trees$DefDef) [2]
+pluginsTypeSig(constructor testAnn, Trees$DefDef) [1]
+pluginsTypeSig(method foo, Trees$DefDef) [1]
+pluginsTypeSig(method method, Trees$DefDef) [1]
+pluginsTypeSig(method nested, Trees$DefDef) [1]
+pluginsTypeSig(type T, Trees$TypeDef) [2]
+pluginsTypeSig(value annotField, Trees$ValDef) [2]
+pluginsTypeSig(value f, Trees$ValDef) [1]
+pluginsTypeSig(value inferField, Trees$ValDef) [2]
+pluginsTypeSig(value lub1, Trees$ValDef) [2]
+pluginsTypeSig(value lub2, Trees$ValDef) [2]
+pluginsTypeSig(value param, Trees$ValDef) [2]
+pluginsTypeSig(value str, Trees$ValDef) [1]
+pluginsTypeSig(value x, Trees$ValDef) [4]
+pluginsTypeSig(value y, Trees$ValDef) [4]
+pluginsTypeSig(variable count, Trees$ValDef) [3]
+pluginsTypeSigAccessor(value annotField) [1]
+pluginsTypeSigAccessor(value inferField) [1]
+pluginsTypeSigAccessor(value lub1) [1]
+pluginsTypeSigAccessor(value lub2) [1]
+pluginsTypeSigAccessor(value x) [1]
+pluginsTypeSigAccessor(value y) [1]
+pluginsTypeSigAccessor(variable count) [2]
+pluginsTyped( <: Int, Trees$TypeBoundsTree) [2]
+pluginsTyped(()Object, Trees$Select) [1]
+pluginsTyped(()String, Trees$Ident) [1]
+pluginsTyped(()String, Trees$TypeApply) [1]
+pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1]
+pluginsTyped(()testAnn, Trees$Select) [10]
+pluginsTyped((str: String)A <and> (param: Double)A, Trees$Select) [1]
+pluginsTyped((x$1: Any)Boolean <and> (x: Double)Boolean <and> (x: Float)Boolean <and> (x: Long)Boolean <and> (x: Int)Boolean <and> (x: Char)Boolean <and> (x: Short)Boolean <and> (x: Byte)Boolean, Trees$Select) [1]
+pluginsTyped((x$1: Int)Unit, Trees$Select) [1]
+pluginsTyped((x: Double)Double <and> (x: Float)Float <and> (x: Long)Long <and> (x: Int)Int <and> (x: Char)Int <and> (x: Short)Int <and> (x: Byte)Int <and> (x: String)String, Trees$Select) [1]
+pluginsTyped((x: String)scala.collection.immutable.StringOps, Trees$Select) [2]
+pluginsTyped((xs: Array[Any])scala.collection.mutable.WrappedArray[Any], Trees$TypeApply) [1]
+pluginsTyped(<empty>.type, Trees$Ident) [1]
+pluginsTyped(<error>, Trees$Select) [1]
+pluginsTyped(<notype>, Trees$ClassDef) [2]
+pluginsTyped(<notype>, Trees$DefDef) [14]
+pluginsTyped(<notype>, Trees$PackageDef) [1]
+pluginsTyped(<notype>, Trees$TypeDef) [1]
+pluginsTyped(<notype>, Trees$ValDef) [21]
+pluginsTyped(<root>, Trees$Ident) [1]
+pluginsTyped(=> Boolean @testAnn, Trees$Select) [1]
+pluginsTyped(=> Double, Trees$Select) [4]
+pluginsTyped(=> Int, Trees$Select) [5]
+pluginsTyped(=> Int, Trees$TypeApply) [1]
+pluginsTyped(=> String @testAnn, Trees$Select) [1]
+pluginsTyped(A, Trees$Apply) [1]
+pluginsTyped(A, Trees$Ident) [2]
+pluginsTyped(A, Trees$This) [8]
+pluginsTyped(A, Trees$TypeTree) [4]
+pluginsTyped(A.super.type, Trees$Super) [1]
+pluginsTyped(A.this.type, Trees$This) [11]
+pluginsTyped(Any, Trees$TypeTree) [1]
+pluginsTyped(AnyRef, Trees$Select) [4]
+pluginsTyped(Array[Any], Trees$ArrayValue) [1]
+pluginsTyped(Boolean @testAnn, Trees$Select) [1]
+pluginsTyped(Boolean @testAnn, Trees$TypeTree) [4]
+pluginsTyped(Boolean(false), Trees$Literal) [2]
+pluginsTyped(Boolean, Trees$Apply) [1]
+pluginsTyped(Boolean, Trees$Select) [4]
+pluginsTyped(Char('c'), Trees$Literal) [2]
+pluginsTyped(Double, Trees$Select) [6]
+pluginsTyped(Int @testAnn, Trees$TypeTree) [2]
+pluginsTyped(Int @testAnn, Trees$Typed) [2]
+pluginsTyped(Int(0), Trees$Literal) [3]
+pluginsTyped(Int(1) @testAnn, Trees$Typed) [1]
+pluginsTyped(Int(1), Trees$Literal) [8]
+pluginsTyped(Int(2), Trees$Literal) [1]
+pluginsTyped(Int, Trees$Apply) [1]
+pluginsTyped(Int, Trees$Ident) [2]
+pluginsTyped(Int, Trees$If) [2]
+pluginsTyped(Int, Trees$Select) [15]
+pluginsTyped(Int, Trees$TypeTree) [13]
+pluginsTyped(List, Trees$Apply) [1]
+pluginsTyped(List, Trees$Select) [1]
+pluginsTyped(List[Any], Trees$Apply) [1]
+pluginsTyped(List[Any], Trees$Select) [1]
+pluginsTyped(List[Any], Trees$TypeTree) [3]
+pluginsTyped(Nothing, Trees$Return) [1]
+pluginsTyped(Nothing, Trees$Select) [2]
+pluginsTyped(Object, Trees$Apply) [1]
+pluginsTyped(String @testAnn, Trees$Ident) [1]
+pluginsTyped(String @testAnn, Trees$Select) [1]
+pluginsTyped(String @testAnn, Trees$TypeTree) [4]
+pluginsTyped(String(""), Trees$Literal) [2]
+pluginsTyped(String("huhu"), Trees$Literal) [1]
+pluginsTyped(String("str") @testAnn, Trees$Typed) [1]
+pluginsTyped(String("str"), Trees$Literal) [1]
+pluginsTyped(String("str"), Trees$Typed) [1]
+pluginsTyped(String("two"), Trees$Literal) [2]
+pluginsTyped(String, Trees$Apply) [2]
+pluginsTyped(String, Trees$Block) [2]
+pluginsTyped(String, Trees$Ident) [1]
+pluginsTyped(String, Trees$Select) [9]
+pluginsTyped(String, Trees$TypeTree) [7]
+pluginsTyped(Unit, Trees$Apply) [2]
+pluginsTyped(Unit, Trees$Assign) [8]
+pluginsTyped(Unit, Trees$Block) [4]
+pluginsTyped(Unit, Trees$If) [1]
+pluginsTyped(Unit, Trees$Literal) [5]
+pluginsTyped(Unit, Trees$TypeTree) [1]
+pluginsTyped([A](xs: A*)List[A], Trees$Select) [1]
+pluginsTyped([T <: Int]=> Int, Trees$Select) [1]
+pluginsTyped([T0 >: ? <: ?]()T0, Trees$Select) [1]
+pluginsTyped([T](xs: Array[T])scala.collection.mutable.WrappedArray[T], Trees$Select) [1]
+pluginsTyped(annotation.type, Trees$Select) [4]
+pluginsTyped(math.type, Trees$Select) [9]
+pluginsTyped(scala.annotation.Annotation, Trees$Apply) [1]
+pluginsTyped(scala.annotation.TypeConstraint, Trees$Select) [4]
+pluginsTyped(scala.annotation.TypeConstraint, Trees$TypeTree) [2]
+pluginsTyped(scala.collection.immutable.List.type, Trees$Select) [2]
+pluginsTyped(scala.collection.immutable.StringOps, Trees$ApplyImplicitView) [2]
+pluginsTyped(scala.collection.mutable.WrappedArray[Any], Trees$Apply) [1]
+pluginsTyped(scala.type, Trees$Ident) [1]
+pluginsTyped(scala.type, Trees$Select) [1]
+pluginsTyped(str.type, Trees$Ident) [3]
+pluginsTyped(testAnn, Trees$Apply) [5]
+pluginsTyped(testAnn, Trees$Ident) [5]
+pluginsTyped(testAnn, Trees$New) [5]
+pluginsTyped(testAnn, Trees$This) [1]
+pluginsTyped(testAnn, Trees$TypeTree) [2]
+pluginsTyped(testAnn.super.type, Trees$Super) [1]
+pluginsTyped(type, Trees$Select) [1]
+pluginsTypedReturn(return f, String) [1]
diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala
new file mode 100644
index 0000000000..daef83fa30
--- /dev/null
+++ b/test/files/run/analyzerPlugins.scala
@@ -0,0 +1,121 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp"
+
+ def code = """
+ class testAnn extends annotation.TypeConstraint
+
+ class A(param: Double) extends { val x: Int = 1; val y = "two"; type T = A } with AnyRef {
+ val inferField = ("str": @testAnn)
+ val annotField: Boolean @testAnn = false
+
+ val lub1 = List('c', (1: Int @testAnn), "")
+ val lub2 = if (annotField) (1: @testAnn) else 2
+
+ def foo[T <: Int] = 0
+ foo[Int @testAnn]
+
+ var count = 0
+
+ math.random // some statement
+
+ def method: String = {
+ math.random
+ val f = inferField
+
+ def nested(): String = {
+ if(count == 1)
+ return f
+ "huhu"
+ }
+ nested()
+ }
+
+ def this(str: String) {
+ this(str.toDouble)
+ math.random
+ count += 1
+ }
+ }
+ """.trim
+
+
+ def show() {
+ val global = newCompiler()
+ import global._
+ import analyzer._
+
+ val output = collection.mutable.ListBuffer[String]()
+
+ object annotChecker extends AnnotationChecker {
+ def hasTestAnn(tps: Type*) = {
+ tps exists (_.annotations.map(_.toString) contains "testAnn")
+ }
+
+ def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
+ if (hasTestAnn(tpe1, tpe2))
+ output += s"annotationsConform($tpe1, $tpe2)"
+ true
+ }
+
+ override def annotationsLub(tp: Type, ts: List[Type]): Type = {
+ if (hasTestAnn(ts: _*))
+ output += s"lub($ts)"
+ tp
+ }
+
+ override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
+ if (hasTestAnn(targs: _*))
+ output += s"adaptBoundsToAnnots($bounds, $tparams, $targs)"
+ bounds
+ }
+ }
+
+ object analyzerPlugin extends AnalyzerPlugin {
+ def treeClass(t: Tree) = t.getClass.toString.split('.').last
+
+ override def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = {
+ output += s"pluginsPt($pt, ${treeClass(tree)})"
+ pt
+ }
+
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ output += s"pluginsTyped($tpe, ${treeClass(tree)})"
+ tpe
+ }
+
+ override def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = {
+ output += s"pluginsTypeSig(${defTree.symbol}, ${treeClass(defTree)})"
+ tpe
+ }
+
+ override def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = {
+ output += s"pluginsTypeSigAccessor(${tree.symbol})"
+ tpe
+ }
+
+
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ output += s"canAdaptAnnotations(${treeClass(tree)}, $pt)"
+ false
+ }
+
+ override def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ output += s"pluginsTypedReturn($tree, $pt)"
+ tpe
+ }
+
+ }
+
+ addAnnotationChecker(annotChecker)
+ addAnalyzerPlugin(analyzerPlugin)
+ compileString(global)(code)
+
+ val res = output.groupBy(identity).mapValues(_.size).map { case (k,v) => s"$k [$v]" }.toList.sorted
+ println(res.mkString("\n"))
+ }
+
+}
diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check
index 700af3b81b..80d178cba3 100644
--- a/test/files/run/idempotency-case-classes.check
+++ b/test/files/run/idempotency-case-classes.check
@@ -42,7 +42,7 @@ C(2,3)
C.super.<init>();
()
};
- final override def toString(): String = "C";
+ final override <synthetic> def toString(): String = "C";
case <synthetic> def apply(x: Int, y: Int): C = new C(x, y);
case <synthetic> def unapply(x$0: C): Option[(Int, Int)] = if (x$0.==(null))
scala.this.None
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index 282542a732..f2f0b60687 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -21,59 +21,60 @@
< 92 JUMP 7
<
< 7:
-395c391
+391c387
< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value ex6, value x4, value x5, value x
-397c393
-< blocks: [1,2,3,4,5,8,11,13,14,16]
+393c389
+< blocks: [1,2,3,4,5,8,10,11,13]
---
-> blocks: [1,2,3,5,8,11,13,14,16,17]
-421c417,426
+> blocks: [1,2,3,5,8,10,11,13,14]
+417c413,422
< 103 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 17
+> ? JUMP 14
>
-> 17:
+> 14:
> 101 LOAD_LOCAL(value ex6)
> 101 STORE_LOCAL(value x4)
> 101 SCOPE_ENTER value x4
> 106 LOAD_LOCAL(value x4)
> 106 IS_INSTANCE REF(class MyException)
-> 106 CZJUMP (BOOL)NE ? 5 : 11
-434,436d438
+> 106 CZJUMP (BOOL)NE ? 5 : 8
+430,432d434
< 101 JUMP 4
<
< 4:
-450,453d451
+442,445d443
< 106 LOAD_LOCAL(value x5)
< 106 CALL_METHOD MyException.message (dynamic)
< 106 STORE_LOCAL(value message)
< 106 SCOPE_ENTER value message
-455c453,454
+447c445,446
< 106 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 106 CALL_METHOD MyException.message (dynamic)
-527c526
+519c518
< blocks: [1,2,3,4,6,7,8,9,10]
---
> blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
-556c555,560
+548c547
< 306 THROW(MyException)
---
> ? JUMP 11
->
+549a549,553
> 11:
> ? LOAD_LOCAL(variable monitor4)
> 305 MONITOR_EXIT
> ? JUMP 12
-562c566
+>
+554c558
< ? THROW(Throwable)
---
> ? JUMP 12
-568c572,579
+560c564,571
< ? THROW(Throwable)
---
> ? STORE_LOCAL(value t)
@@ -84,7 +85,7 @@
> 304 MONITOR_EXIT
> ? STORE_LOCAL(value t)
> ? JUMP 13
-583a595,606
+575a587,598
> 13:
> 310 LOAD_MODULE object Predef
> 310 CALL_PRIMITIVE(StartConcat)
@@ -97,35 +98,35 @@
> 310 CALL_METHOD scala.Predef.println (dynamic)
> 310 JUMP 2
>
-592c615
+584c607
< catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
---
> catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
-595c618
+587c610
< catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
---
> catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
-627c650
+619c642
< blocks: [1,2,3,4,5,6,7,9,10]
---
> blocks: [1,2,3,4,5,6,7,9,10,11,12]
-651c674,675
+643c666,667
< 78 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
> ? JUMP 11
-652a677,681
+644a669,673
> 11:
> 81 LOAD_LOCAL(value e)
> ? STORE_LOCAL(variable exc1)
> ? JUMP 12
>
-680c709,710
+672c701,702
< 81 THROW(Exception)
---
> ? STORE_LOCAL(variable exc1)
> ? JUMP 12
-696a727,739
+688a719,731
> 12:
> 83 LOAD_MODULE object Predef
> 83 CONSTANT("finally")
@@ -139,88 +140,88 @@
> 84 LOAD_LOCAL(variable exc1)
> 84 THROW(Throwable)
>
-702c745
+694c737
< catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
---
> catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
-726c769
+718c761
< locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
-728c771
-< blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31]
+720c763
+< blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25]
---
-> blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31,32,33,34]
-752c795,802
+> blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28]
+744c787,794
< 172 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 32
+> ? JUMP 26
>
-> 32:
+> 26:
> 170 LOAD_LOCAL(value ex6)
> 170 STORE_LOCAL(value x4)
> 170 SCOPE_ENTER value x4
-> 170 JUMP 18
-799,802d848
+> 170 JUMP 15
+787,790d836
< 175 LOAD_LOCAL(value x5)
< 175 CALL_METHOD MyException.message (dynamic)
< 175 STORE_LOCAL(value message)
< 175 SCOPE_ENTER value message
-804c850,851
+792c838,839
< 176 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 176 CALL_METHOD MyException.message (dynamic)
-808c855,856
+796c843,844
< 177 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 177 CALL_METHOD MyException.message (dynamic)
-810c858,859
+798c846,847
< 177 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 33
-814c863,864
+> ? JUMP 27
+802c851,852
< 170 THROW(Throwable)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 33
-823a874,879
-> 33:
+> ? JUMP 27
+811a862,867
+> 27:
> 169 LOAD_LOCAL(value ex6)
> 169 STORE_LOCAL(value x4)
> 169 SCOPE_ENTER value x4
> 169 JUMP 5
>
-838,841d893
+822,825d877
< 180 LOAD_LOCAL(value x5)
< 180 CALL_METHOD MyException.message (dynamic)
< 180 STORE_LOCAL(value message)
< 180 SCOPE_ENTER value message
-843c895,896
+827c879,880
< 181 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 181 CALL_METHOD MyException.message (dynamic)
-847c900,901
+831c884,885
< 182 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 182 CALL_METHOD MyException.message (dynamic)
-849c903,904
+833c887,888
< 182 THROW(MyException)
---
> ? STORE_LOCAL(variable exc2)
-> ? JUMP 34
-853c908,909
+> ? JUMP 28
+837c892,893
< 169 THROW(Throwable)
---
> ? STORE_LOCAL(variable exc2)
-> ? JUMP 34
-869a926,938
-> 34:
+> ? JUMP 28
+853a910,922
+> 28:
> 184 LOAD_MODULE object Predef
> 184 CONSTANT("finally")
> 184 CALL_METHOD scala.Predef.println (dynamic)
@@ -233,159 +234,158 @@
> 185 LOAD_LOCAL(variable exc2)
> 185 THROW(Throwable)
>
-875c944
-< catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30) starting at: 4
+859c928
+< catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4
---
-> catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30, 32) starting at: 4
-878c947
-< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30) starting at: 3
+> catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4
+862c931
+< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3
---
-> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30, 32, 33) starting at: 3
-902c971
+> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3
+886c955
< locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value e, value ex6, value x4, value x5, value x
-904c973
-< blocks: [1,2,3,6,7,8,11,14,16,17,19]
+888c957
+< blocks: [1,2,3,6,7,8,11,13,14,16]
---
-> blocks: [1,2,3,6,7,8,11,14,16,17,19,20]
-928c997,1004
+> blocks: [1,2,3,6,7,8,11,13,14,16,17]
+912c981,988
< 124 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 20
+> ? JUMP 17
>
-> 20:
+> 17:
> 122 LOAD_LOCAL(value ex6)
> 122 STORE_LOCAL(value x4)
> 122 SCOPE_ENTER value x4
> 122 JUMP 7
-957,960d1032
+937,940d1012
< 127 LOAD_LOCAL(value x5)
< 127 CALL_METHOD MyException.message (dynamic)
< 127 STORE_LOCAL(value message)
< 127 SCOPE_ENTER value message
-962c1034,1035
+942c1014,1015
< 127 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 127 CALL_METHOD MyException.message (dynamic)
-991c1064
-< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19) starting at: 3
+971c1044
+< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
---
-> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19, 20) starting at: 3
-1015c1088
+> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
+995c1068
< locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
---
> locals: value args, variable result, value ex6, value x4, value x5, value x, value e
-1017c1090
-< blocks: [1,2,3,4,5,8,11,15,16,17,19]
+997c1070
+< blocks: [1,2,3,4,5,8,12,13,14,16]
---
-> blocks: [1,2,3,5,8,11,15,16,17,19,20]
-1041c1114,1123
+> blocks: [1,2,3,5,8,12,13,14,16,17]
+1021c1094,1103
< 148 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 20
+> ? JUMP 17
>
-> 20:
+> 17:
> 145 LOAD_LOCAL(value ex6)
> 145 STORE_LOCAL(value x4)
> 145 SCOPE_ENTER value x4
> 154 LOAD_LOCAL(value x4)
> 154 IS_INSTANCE REF(class MyException)
-> 154 CZJUMP (BOOL)NE ? 5 : 11
-1062,1064d1143
+> 154 CZJUMP (BOOL)NE ? 5 : 8
+1042,1044d1123
< 145 JUMP 4
<
< 4:
-1078,1081d1156
+1054,1057d1132
< 154 LOAD_LOCAL(value x5)
< 154 CALL_METHOD MyException.message (dynamic)
< 154 STORE_LOCAL(value message)
< 154 SCOPE_ENTER value message
-1083c1158,1159
+1059c1134,1135
< 154 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 154 CALL_METHOD MyException.message (dynamic)
-1300c1376
+1276c1352
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1324c1400,1401
+1300c1376,1383
< 38 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
> ? JUMP 8
-1325a1403,1408
+>
> 8:
> 42 LOAD_MODULE object Predef
> 42 CONSTANT("IllegalArgumentException")
> 42 CALL_METHOD scala.Predef.println (dynamic)
> 42 JUMP 2
->
-1371c1454
+1347c1430
< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value ex6, value x4, value x5, value x
-1373c1456
-< blocks: [1,2,3,4,5,8,11,13,14,16,17,19]
+1349c1432
+< blocks: [1,2,3,4,5,8,10,11,13,14,16]
---
-> blocks: [1,2,3,5,8,11,13,14,16,17,19,20]
-1397c1480,1481
+> blocks: [1,2,3,5,8,10,11,13,14,16,17]
+1373c1456,1457
< 203 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 20
-1417c1501,1510
+> ? JUMP 17
+1393c1477,1486
< 209 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 20
+> ? JUMP 17
>
-> 20:
+> 17:
> 200 LOAD_LOCAL(value ex6)
> 200 STORE_LOCAL(value x4)
> 200 SCOPE_ENTER value x4
> 212 LOAD_LOCAL(value x4)
> 212 IS_INSTANCE REF(class MyException)
-> 212 CZJUMP (BOOL)NE ? 5 : 11
-1430,1432d1522
+> 212 CZJUMP (BOOL)NE ? 5 : 8
+1406,1408d1498
< 200 JUMP 4
<
< 4:
-1446,1449d1535
+1418,1421d1507
< 212 LOAD_LOCAL(value x5)
< 212 CALL_METHOD MyException.message (dynamic)
< 212 STORE_LOCAL(value message)
< 212 SCOPE_ENTER value message
-1451c1537,1538
+1423c1509,1510
< 213 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 213 CALL_METHOD MyException.message (dynamic)
-1495c1582
+1467c1554
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1519c1606,1607
+1491c1578,1579
< 58 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
> ? JUMP 8
-1520a1609,1614
+1492a1581,1586
> 8:
> 62 LOAD_MODULE object Predef
> 62 CONSTANT("RuntimeException")
> 62 CALL_METHOD scala.Predef.println (dynamic)
> 62 JUMP 2
>
-1568c1662
+1540c1634
< blocks: [1,2,3,4]
---
> blocks: [1,2,3,4,5]
-1588c1682,1687
+1560c1654,1659
< 229 THROW(MyException)
---
> ? JUMP 5
@@ -394,19 +394,19 @@
> ? LOAD_LOCAL(variable monitor1)
> 228 MONITOR_EXIT
> 228 THROW(Throwable)
-1594c1693
+1566c1665
< ? THROW(Throwable)
---
> 228 THROW(Throwable)
-1622c1721
+1594c1693
< locals: value args, variable result, variable monitor2, variable monitorResult1
---
> locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
-1624c1723
+1596c1695
< blocks: [1,2,3,4]
---
> blocks: [1,2,3,4,5]
-1647c1746,1754
+1619c1718,1726
< 245 THROW(MyException)
---
> ? STORE_LOCAL(value exception$1)
@@ -418,7 +418,7 @@
> ? LOAD_LOCAL(variable monitor2)
> 244 MONITOR_EXIT
> 244 THROW(Throwable)
-1653c1760
+1625c1732
< ? THROW(Throwable)
---
> 244 THROW(Throwable)
diff --git a/test/files/run/reify-staticXXX.scala b/test/files/run/reify-staticXXX.scala
index dc861f843e..e80157dd8f 100644
--- a/test/files/run/reify-staticXXX.scala
+++ b/test/files/run/reify-staticXXX.scala
@@ -4,12 +4,12 @@ import scala.tools.reflect.Eval
object B { override def toString = "object" }
class C { override def toString = "class" }
-package foo {
+package foo1 {
object B { override def toString = "package > object" }
class C { override def toString = "package > class" }
}
-object foo {
+object Foo2 {
object B { override def toString = "object > object" }
class C { override def toString = "object > class" }
}
@@ -20,14 +20,14 @@ object packageless {
println(reify(B).eval)
println(new C)
println(reify(new C).eval)
- println(foo.B)
- println(reify(foo.B).eval)
- println(new foo.C)
- println(reify(new foo.C).eval)
- println(_root_.foo.B)
- println(reify(_root_.foo.B).eval)
- println(new _root_.foo.C)
- println(reify(new _root_.foo.C).eval)
+ println(Foo2.B)
+ println(reify(Foo2.B).eval)
+ println(new Foo2.C)
+ println(reify(new Foo2.C).eval)
+ println(_root_.foo1.B)
+ println(reify(_root_.foo1.B).eval)
+ println(new _root_.foo1.C)
+ println(reify(new _root_.foo1.C).eval)
}
}
@@ -38,14 +38,14 @@ package packageful {
println(reify(B).eval)
println(new C)
println(reify(new C).eval)
- println(foo.B)
- println(reify(foo.B).eval)
- println(new foo.C)
- println(reify(new foo.C).eval)
- println(_root_.foo.B)
- println(reify(_root_.foo.B).eval)
- println(new _root_.foo.C)
- println(reify(new _root_.foo.C).eval)
+ println(Foo2.B)
+ println(reify(Foo2.B).eval)
+ println(new Foo2.C)
+ println(reify(new Foo2.C).eval)
+ println(_root_.foo1.B)
+ println(reify(_root_.foo1.B).eval)
+ println(new _root_.foo1.C)
+ println(reify(new _root_.foo1.C).eval)
}
}
}
diff --git a/test/files/run/t2418.check b/test/files/run/t2418.check
new file mode 100644
index 0000000000..f599e28b8a
--- /dev/null
+++ b/test/files/run/t2418.check
@@ -0,0 +1 @@
+10
diff --git a/test/files/run/t2418.scala b/test/files/run/t2418.scala
new file mode 100644
index 0000000000..f330bef60a
--- /dev/null
+++ b/test/files/run/t2418.scala
@@ -0,0 +1,10 @@
+class Foo {
+ @volatile final var x=10
+ override def toString = "" + x
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println((new Foo))
+ }
+}
diff --git a/test/files/run/t2818.check b/test/files/run/t2818.check
new file mode 100644
index 0000000000..31286c990b
--- /dev/null
+++ b/test/files/run/t2818.check
@@ -0,0 +1,4 @@
+105
+499999500000
+0
+1
diff --git a/test/files/run/t2818.scala b/test/files/run/t2818.scala
new file mode 100644
index 0000000000..19b67cbc88
--- /dev/null
+++ b/test/files/run/t2818.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ println((List.range(1L, 15L) :\ 0L) (_ + _))
+ println((List.range(1L, 1000000L) :\ 0L) (_ + _))
+ println((List.fill(5)(1) :\ 1) (_ - _))
+ println((List.fill(1000000)(1) :\ 1) (_ - _))
+}
diff --git a/test/files/run/t3353.check b/test/files/run/t3353.check
new file mode 100644
index 0000000000..8b4ae1fe69
--- /dev/null
+++ b/test/files/run/t3353.check
@@ -0,0 +1 @@
+Got: foo and None
diff --git a/test/files/run/t3353.scala b/test/files/run/t3353.scala
new file mode 100644
index 0000000000..eeb63c1b05
--- /dev/null
+++ b/test/files/run/t3353.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+
+ "foo" match {
+ case Matcher(result) => println(result)
+ }
+
+ object Matcher{
+ def unapply(s: String)(implicit secondParam: Option[String] = None) = Some("Got: " + s + " and " + secondParam)
+ }
+}
diff --git a/test/files/run/t4729.check b/test/files/run/t4729.check
new file mode 100644
index 0000000000..9a2aa56d99
--- /dev/null
+++ b/test/files/run/t4729.check
@@ -0,0 +1,4 @@
+WrappedArray(1, 2)
+WrappedArray(1, 2)
+WrappedArray(1, 2)
+WrappedArray(1, 2)
diff --git a/test/files/run/t4729/J_1.java b/test/files/run/t4729/J_1.java
new file mode 100644
index 0000000000..2ffb5a88d1
--- /dev/null
+++ b/test/files/run/t4729/J_1.java
@@ -0,0 +1,4 @@
+// Java Interface:
+public interface J_1 {
+ public void method(String... s);
+}
diff --git a/test/files/run/t4729/S_2.scala b/test/files/run/t4729/S_2.scala
new file mode 100644
index 0000000000..e34e3d34d4
--- /dev/null
+++ b/test/files/run/t4729/S_2.scala
@@ -0,0 +1,29 @@
+ // Scala class:
+class ScalaVarArgs extends J_1 {
+ // -- no problem on overriding it using ordinary class
+ def method(s: String*) { println(s) }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ //[1] Ok - no problem using inferred type
+ val varArgs = new J_1 {
+ def method(s: String*) { println(s) }
+ }
+ varArgs.method("1", "2")
+
+ //[2] Ok -- no problem when explicit set its type after construction
+ val b: J_1 = varArgs
+ b.method("1", "2")
+
+ //[3] Ok -- no problem on calling its method
+ (new ScalaVarArgs).method("1", "2")
+ (new ScalaVarArgs: J_1).method("1", "2")
+
+ //[4] Not Ok -- error when assigning anonymous class to a explictly typed val
+ // Compiler error: object creation impossible, since method method in trait VarArgs of type (s: <repeated...>[java.lang.String])Unit is not defined
+ val tagged: J_1 = new J_1 {
+ def method(s: String*) { println(s) }
+ }
+ }
+}
diff --git a/test/files/run/t5313.check b/test/files/run/t5313.check
new file mode 100644
index 0000000000..7a48b2b711
--- /dev/null
+++ b/test/files/run/t5313.check
@@ -0,0 +1,12 @@
+STORE_LOCAL(variable kept1)
+STORE_LOCAL(value result)
+STORE_LOCAL(variable kept1)
+STORE_LOCAL(variable kept2)
+STORE_LOCAL(value kept3)
+STORE_LOCAL(variable kept2)
+STORE_LOCAL(variable kept4)
+STORE_LOCAL(variable kept4)
+STORE_LOCAL(variable kept5)
+STORE_LOCAL(variable kept5)
+STORE_LOCAL(variable kept6)
+STORE_LOCAL(variable kept6)
diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala
new file mode 100644
index 0000000000..7da8726a1f
--- /dev/null
+++ b/test/files/run/t5313.scala
@@ -0,0 +1,54 @@
+import scala.tools.partest.IcodeTest
+
+object Test extends IcodeTest {
+ override def printIcodeAfterPhase = "dce"
+
+ override def extraSettings: String = super.extraSettings + " -optimize"
+
+ override def code =
+ """class Foo {
+ def randomBoolean = util.Random.nextInt % 2 == 0
+ def bar = {
+ var kept1 = new Object
+ val result = new java.lang.ref.WeakReference(kept1)
+ kept1 = null // we can't eliminate this assigment because result can observe
+ // when the object has no more references. See SI-5313
+ kept1 = new Object // but we can eliminate this one because kept1 has already been clobbered
+ var erased2 = null // we can eliminate this store because it's never used
+ val erased3 = erased2 // and this
+ var erased4 = erased2 // and this
+ val erased5 = erased4 // and this
+ var kept2: Object = new Object // ultimately can't be eliminated
+ while(randomBoolean) {
+ val kept3 = kept2
+ kept2 = null // this can't, because it clobbers kept2, which is used
+ erased4 = null // safe to eliminate
+ println(kept3)
+ }
+ var kept4 = new Object // have to keep, it's used
+ try
+ println(kept4)
+ catch {
+ case _ : Throwable => kept4 = null // have to keep, it clobbers kept4 which is used
+ }
+ var kept5 = new Object
+ print(kept5)
+ kept5 = null // can't eliminate it's a clobber and it's used
+ print(kept5)
+ kept5 = null // can eliminate because we don't care about clobbers of nulls
+ while(randomBoolean) {
+ var kept6: AnyRef = null // not used, but have to keep because it clobbers the next used store
+ // on the back edge of the loop
+ kept6 = new Object // used
+ println(kept6)
+ }
+ result
+ }
+ }""".stripMargin
+
+ override def show() {
+ val storeLocal = "STORE_LOCAL"
+ val lines1 = collectIcode("") filter (_ contains storeLocal) map (x => x.drop(x.indexOf(storeLocal)))
+ println(lines1 mkString "\n")
+ }
+}
diff --git a/test/files/run/t5543.check b/test/files/run/t5543.check
index 517038f4c7..2ef2d51ff4 100644
--- a/test/files/run/t5543.check
+++ b/test/files/run/t5543.check
@@ -1,3 +1,9 @@
Test, 7, 119
m, 3, 19
Test, 5, 85
+T
+C
+T
+T
+D
+T
diff --git a/test/files/run/t5543.scala b/test/files/run/t5543.scala
index 651bc7f2b2..3684bf9690 100644
--- a/test/files/run/t5543.scala
+++ b/test/files/run/t5543.scala
@@ -22,5 +22,24 @@ object Test extends Function0[Int] {
println(sut.toString)
println(sut.m())
println(A.init()())
+
+ println((new T.C()).x)
+ println((new T.D(0,0)).x)
+ }
+}
+
+object T {
+ override def toString = "T"
+
+ // `this` refers to T
+ class C(val x: Any = {println(this); this}) { // prints T
+ println(this) // prints C
+ override def toString() = "C"
+ }
+
+ class D(val x: Any) {
+ override def toString() = "D"
+ // `this` refers again to T
+ def this(a: Int, b: Int, c: Any = {println(this); this}) { this(c); println(this) } // prints T, then prints D
}
}
diff --git a/test/files/run/t5604.check b/test/files/run/t5604.check
new file mode 100644
index 0000000000..53a2fc8894
--- /dev/null
+++ b/test/files/run/t5604.check
@@ -0,0 +1,8 @@
+long
+double
+long
+double
+long
+double
+long
+double
diff --git a/test/files/run/t5604.scala b/test/files/run/t5604.scala
new file mode 100644
index 0000000000..a06c8aab3e
--- /dev/null
+++ b/test/files/run/t5604.scala
@@ -0,0 +1,50 @@
+// a.scala
+// Fri Jan 13 11:31:47 PST 2012
+
+package foo {
+ object regular extends Duh {
+ def buh(n: Long) = println("long")
+ def buh(n: Double) = println("double")
+ }
+ class regular {
+ import regular._
+
+ duh(33L)
+ duh(3.0d)
+ foo.regular.duh(33L)
+ foo.regular.duh(3.0d)
+ buh(66L)
+ buh(6.0d)
+ foo.regular.buh(66L)
+ foo.regular.buh(6.0d)
+ }
+
+ trait Duh {
+ def duh(n: Long) = println("long")
+ def duh(n: Double) = println("double")
+ }
+ package object bar extends Duh {
+ def buh(n: Long) = println("long")
+ def buh(n: Double) = println("double")
+ }
+ package bar {
+ object Main {
+ def main(args:Array[String]) {
+ duh(33L)
+ duh(3.0d)
+ foo.bar.duh(33L)
+ foo.bar.duh(3.0d)
+ buh(66L)
+ buh(6.0d)
+ foo.bar.buh(66L)
+ foo.bar.buh(6.0d)
+ }
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ foo.bar.Main.main(null)
+ }
+}
diff --git a/test/files/run/t6154.check b/test/files/run/t6154.check
new file mode 100644
index 0000000000..9766475a41
--- /dev/null
+++ b/test/files/run/t6154.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t6154.scala b/test/files/run/t6154.scala
new file mode 100644
index 0000000000..02ef62905f
--- /dev/null
+++ b/test/files/run/t6154.scala
@@ -0,0 +1,10 @@
+object Test {
+ def foo(a: Int) {
+ var bar: Int = 0
+ bar = try { 0 } catch { case ex: Throwable => 0 }
+ new { foo(bar) }
+ }
+
+ def main(args: Array[String]): Unit =
+ try foo(0) catch { case _: java.lang.StackOverflowError => println("ok") }
+}
diff --git a/test/files/run/t6206.check b/test/files/run/t6206.check
new file mode 100644
index 0000000000..8064573667
--- /dev/null
+++ b/test/files/run/t6206.check
@@ -0,0 +1,4 @@
+outer
+outer
+inner
+inner
diff --git a/test/files/run/t6206.scala b/test/files/run/t6206.scala
new file mode 100644
index 0000000000..07ff246d02
--- /dev/null
+++ b/test/files/run/t6206.scala
@@ -0,0 +1,37 @@
+class Outer {
+ def apply( position : Inner ) {}
+ class Inner
+
+ this.apply(new Inner)
+ this (new Inner) // error,
+}
+
+
+class Outer1 {
+
+ self =>
+
+ def apply( position : Inner ) : String = "outer"
+
+ class Inner( ) {
+
+ def apply(arg: Inner): String = "inner"
+
+ def testMe = {
+ List(
+ self.apply( this ), // a) this works
+ self( this ), // b) this does not work!
+ this apply this,
+ this(this)
+ ) foreach println
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val o = new Outer1
+ val i = new o.Inner
+ i.testMe
+ }
+}
diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check
index af6bd5d269..4895c2c007 100644
--- a/test/files/run/t6288.check
+++ b/test/files/run/t6288.check
@@ -11,10 +11,7 @@
[64]case5()[84]{
[84]<synthetic> val o7: [84]Option[Int] = [84][84]Case3.unapply([84]x1);
[84]if ([84]o7.isEmpty.unary_!)
- [84]{
- [90]val nr: [90]Int = [90]o7.get;
- [97][97]matchEnd4([97]())
- }
+ [97][97]matchEnd4([97]())
else
[84][84]case6()
};
@@ -38,10 +35,7 @@
[195]<synthetic> val o7: [195]Option[List[Int]] = [195][195]Case4.unapplySeq([195]x1);
[195]if ([195]o7.isEmpty.unary_!)
[195]if ([195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0)))
- [195]{
- [201]val nr: [201]Int = [201][201]o7.get.apply([201]0);
- [208][208]matchEnd4([208]())
- }
+ [208][208]matchEnd4([208]())
else
[195][195]case6()
else
diff --git a/test/files/run/t6288b-jump-position.check b/test/files/run/t6288b-jump-position.check
index 45ec31c308..ece88b18f0 100644
--- a/test/files/run/t6288b-jump-position.check
+++ b/test/files/run/t6288b-jump-position.check
@@ -19,7 +19,7 @@ object Case3 extends Object {
Exception handlers:
def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
- locals: value args, value x1, value x2, value x
+ locals: value args, value x1, value x
startBlock: 1
blocks: [1,2,3,6,7]
@@ -35,10 +35,6 @@ object Case3 extends Object {
5 CZJUMP (BOOL)NE ? 3 : 6
3:
- 5 LOAD_LOCAL(value x1)
- 5 CHECK_CAST REF(class String)
- 5 STORE_LOCAL(value x2)
- 5 SCOPE_ENTER value x2
6 LOAD_MODULE object Predef
6 CONSTANT("case 0")
6 CALL_METHOD scala.Predef.println (dynamic)
diff --git a/test/files/run/t6572/bar_1.scala b/test/files/run/t6572/bar_1.scala
new file mode 100644
index 0000000000..5518ced7af
--- /dev/null
+++ b/test/files/run/t6572/bar_1.scala
@@ -0,0 +1,19 @@
+package bar
+
+abstract class IntBase[V] extends Base[Int, V]
+
+class DefaultIntBase[V <: IntProvider] extends IntBase[V] {
+ override protected def hashCode(key: Int) = key
+}
+
+trait IntProvider {
+ def int: Int
+}
+
+abstract class Base[@specialized K, V] {
+
+ protected def hashCode(key: K) = key.hashCode
+
+ def get(key: K): V = throw new RuntimeException
+
+} \ No newline at end of file
diff --git a/test/files/run/t6572/foo_2.scala b/test/files/run/t6572/foo_2.scala
new file mode 100644
index 0000000000..465f0b7c3c
--- /dev/null
+++ b/test/files/run/t6572/foo_2.scala
@@ -0,0 +1,17 @@
+//package foo
+
+import bar._
+
+class FooProvider extends IntProvider {
+ def int = 3
+}
+
+class Wrapper(users: DefaultIntBase[FooProvider]) {
+ final def user(userId: Int) = users.get(userId)
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new Wrapper(new DefaultIntBase)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t6584.check b/test/files/run/t6584.check
new file mode 100644
index 0000000000..35c8688751
--- /dev/null
+++ b/test/files/run/t6584.check
@@ -0,0 +1,8 @@
+Array: 102400
+Vector: 102400
+List: 102400
+Stream: 102400
+Array: 102400
+Vector: 102400
+List: 102400
+Stream: 102400
diff --git a/test/files/run/t6584.scala b/test/files/run/t6584.scala
new file mode 100644
index 0000000000..24c236ef35
--- /dev/null
+++ b/test/files/run/t6584.scala
@@ -0,0 +1,16 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val size = 100 * 1024
+ val doubled = (1 to size) ++ (1 to size)
+
+ println("Array: " + Array.tabulate(size)(x => x).distinct.size)
+ println("Vector: " + Vector.tabulate(size)(x => x).distinct.size)
+ println("List: " + List.tabulate(size)(x => x).distinct.size)
+ println("Stream: " + Stream.tabulate(size)(x => x).distinct.size)
+
+ println("Array: " + doubled.toArray.distinct.size)
+ println("Vector: " + doubled.toVector.distinct.size)
+ println("List: " + doubled.toList.distinct.size)
+ println("Stream: " + doubled.toStream.distinct.size)
+ }
+}
diff --git a/test/files/run/t6669.scala b/test/files/run/t6669.scala
new file mode 100644
index 0000000000..b55718b12b
--- /dev/null
+++ b/test/files/run/t6669.scala
@@ -0,0 +1,26 @@
+import java.io.{ByteArrayOutputStream, PrintStream}
+
+object Test extends App {
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ // first test with the default classpath
+ (scala.Console withOut ps) {
+ scala.tools.scalap.Main.main(Array("-verbose", "java.lang.Object"))
+ }
+
+ // now make sure we saw the '.' in the classpath
+ val msg1 = baos.toString()
+ assert(msg1 contains "directory classpath: .", s"Did not see '.' in the default class path. Full results were:\n$msg1")
+
+ // then test again with a user specified classpath
+ baos.reset
+
+ (scala.Console withOut ps) {
+ scala.tools.scalap.Main.main(Array("-verbose", "-cp", "whatever", "java.lang.Object"))
+ }
+
+ // now make sure we did not see the '.' in the classpath
+ val msg2 = baos.toString()
+ assert(!(msg2 contains "directory classpath: ."), s"Did saw '.' in the user specified class path. Full results were:\n$msg2")
+}
diff --git a/test/files/run/t6853.scala b/test/files/run/t6853.scala
new file mode 100644
index 0000000000..352375c99c
--- /dev/null
+++ b/test/files/run/t6853.scala
@@ -0,0 +1,18 @@
+// Test cases: the only place we can cut and paste without crying
+// ourself to sleep.
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ // First testing the basic operations
+ val m = collection.mutable.ListMap[String, Int]()
+ var i = 0
+ while(i < 2) { m += ("foo" + i) -> i; i = i+1}
+ assert(m == Map("foo1"->1,"foo0"->0))
+ m-= "foo0"
+ assert(m == Map("foo1"->1))
+ // Now checking if it scales as described in SI-6853
+ i = 0
+ while(i < 80000) { m += ("foo" + i) -> i; i = i+1}
+ assert(m.size == 80000)
+ }
+}
diff --git a/test/files/run/t6863.scala b/test/files/run/t6863.scala
new file mode 100644
index 0000000000..d77adb6af4
--- /dev/null
+++ b/test/files/run/t6863.scala
@@ -0,0 +1,114 @@
+/** Make sure that when a variable is captured its initialization expression is handled properly */
+object Test {
+ def lazyVal() = {
+ // internally lazy vals become vars which are initialized with "_", so they need to be tested just like vars do
+ lazy val x = "42"
+ assert({ () => x }.apply == "42")
+ }
+ def ident() = {
+ val y = "42"
+ var x = y
+ assert({ () => x }.apply == "42")
+ }
+ def apply() = {
+ def y(x : Int) = x.toString
+ var x = y(42)
+ assert({ () => x }.apply == "42")
+ }
+ def literal() = {
+ var x = "42"
+ assert({ () => x }.apply == "42")
+ }
+ def `new`() = {
+ var x = new String("42")
+ assert({ () => x }.apply == "42")
+ }
+ def select() = {
+ object Foo{val bar = "42"}
+ var x = Foo.bar
+ assert({ () => x }.apply == "42")
+ }
+ def `throw`() = {
+ var x = if (true) "42" else throw new Exception("42")
+ assert({ () => x }.apply == "42")
+ }
+ def assign() = {
+ var y = 1
+ var x = y = 42
+ assert({ () => x}.apply == ())
+ }
+ def valDef() = {
+ var x = {val y = 42}
+ assert({ () => x}.apply == ())
+ }
+ def `return`(): String = {
+ var x = if (true) return "42" else ()
+ assert({ () => x}.apply == ())
+ "42"
+ }
+ def tryFinally() = {
+ var x = try { "42" } finally ()
+ assert({ () => x }.apply == "42")
+ }
+ def tryCatch() = {
+ var x = try { "42" } catch { case _ => "43" }
+ assert({ () => x }.apply == "42")
+ }
+ def `if`() = {
+ var x = if (true) ()
+ assert({ () => x }.apply == ())
+ }
+ def ifElse() = {
+ var x = if(true) "42" else "43"
+ assert({ () => x }.apply == "42")
+ }
+ def matchCase() = {
+ var x = 100 match {
+ case 100 => "42"
+ case _ => "43"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def block() = {
+ var x = {
+ val y = 42
+ "42"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def labelDef() = {
+ var x = 100 match {
+ case 100 => try "42" finally ()
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def nested() = {
+ var x = {
+ val y = 42
+ if(true) try "42" catch {case _ => "43"}
+ else "44"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def main(args: Array[String]) {
+ lazyVal()
+ ident()
+ apply()
+ literal()
+ `new`()
+ select()
+ `throw`()
+ assign()
+ valDef()
+ `return`()
+ tryFinally()
+ tryCatch()
+ ifElse()
+ `if`()
+ matchCase()
+ block()
+ labelDef()
+ nested()
+ }
+}
+
diff --git a/test/files/run/t6968.check b/test/files/run/t6968.check
new file mode 100644
index 0000000000..7a18941537
--- /dev/null
+++ b/test/files/run/t6968.check
@@ -0,0 +1 @@
+1, 3, 5
diff --git a/test/files/run/t6968.scala b/test/files/run/t6968.scala
new file mode 100644
index 0000000000..b5cadfd9e1
--- /dev/null
+++ b/test/files/run/t6968.scala
@@ -0,0 +1,7 @@
+object Test {
+ def main(args: Array[String]) {
+ val mixedList = List(1,(1,2),4,(3,1),(5,4),6)
+ val as = for((a,b) <- mixedList) yield a
+ println(as.mkString(", "))
+ }
+}
diff --git a/test/files/run/t7046.check b/test/files/run/t7046.check
new file mode 100644
index 0000000000..427f1ce610
--- /dev/null
+++ b/test/files/run/t7046.check
@@ -0,0 +1,2 @@
+Set(class D, class E)
+Set(class D, class E)
diff --git a/test/files/run/t7046.scala b/test/files/run/t7046.scala
new file mode 100644
index 0000000000..647a15cd18
--- /dev/null
+++ b/test/files/run/t7046.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+sealed class C
+class D extends C
+class E extends C
+
+object Test extends App {
+ val c = cm.staticClass("C")
+ println(c.knownDirectSubclasses)
+ c.typeSignature
+ println(c.knownDirectSubclasses)
+} \ No newline at end of file
diff --git a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
index 05237bace8..b6af8f41bd 100644
--- a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
@@ -20,7 +20,7 @@ abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterable
property("gets iterated keys") = forAll(collectionPairs) {
case (t, coll) =>
val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
- val containsSelf = for ((k, v) <- coll) yield (coll.get(k) == Some(v))
+ val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) }
("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
("Par contains elements of itself" |: containsSelf.forall(_ == true))
}
diff --git a/test/pending/neg/t5378.scala b/test/pending/neg/t5378.scala
deleted file mode 100644
index cada29b0a0..0000000000
--- a/test/pending/neg/t5378.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import language.reflectiveCalls
-
-class Coll[+T] {
- def contains = new { def apply[T1 <: T](value: T1) = ??? }
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val xs = new Coll[List[String]]
- val ys: Coll[Traversable[String]] = xs
-
- println(ys contains Nil)
- // java.lang.NoSuchMethodException: Coll$$anon$1.apply(scala.collection.Traversable)
- // at java.lang.Class.getMethod(Class.java:1605)
- // at Test$.reflMethod$Method1(a.scala:14)
- // at Test$.main(a.scala:14)
- // at Test.main(a.scala)
- }
-}
diff --git a/test/files/neg/t5589neg.check b/test/pending/neg/t5589neg.check
index f1dad94df3..f1dad94df3 100644
--- a/test/files/neg/t5589neg.check
+++ b/test/pending/neg/t5589neg.check
diff --git a/test/files/neg/t5589neg.scala b/test/pending/neg/t5589neg.scala
index 31ff2c3693..31ff2c3693 100644
--- a/test/files/neg/t5589neg.scala
+++ b/test/pending/neg/t5589neg.scala
diff --git a/test/files/neg/t5589neg2.scala b/test/pending/neg/t5589neg2.scala
index b7c7ab7218..b7c7ab7218 100644
--- a/test/files/neg/t5589neg2.scala
+++ b/test/pending/neg/t5589neg2.scala
diff --git a/test/files/pos/t1336.scala b/test/pending/pos/t1336.scala
index 63967985c7..63967985c7 100644
--- a/test/files/pos/t1336.scala
+++ b/test/pending/pos/t1336.scala
diff --git a/test/files/pos/t5589.scala b/test/pending/pos/t5589.scala
index 69cbb20391..69cbb20391 100644
--- a/test/files/pos/t5589.scala
+++ b/test/pending/pos/t5589.scala
diff --git a/test/files/run/t4574.scala b/test/pending/run/t4574.scala
index 1dde496aca..1dde496aca 100644
--- a/test/files/run/t4574.scala
+++ b/test/pending/run/t4574.scala
diff --git a/test/scaladoc/run/SI-6017.check b/test/scaladoc/run/SI-6017.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-6017.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6017.scala b/test/scaladoc/run/SI-6017.scala
new file mode 100644
index 0000000000..a4950e48d8
--- /dev/null
+++ b/test/scaladoc/run/SI-6017.scala
@@ -0,0 +1,23 @@
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.html.page.{Index, ReferenceIndex}
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def scaladocSettings = ""
+ override def code = """
+ class STAR
+ class Star
+ """
+
+ def testModel(rootPackage: Package) {
+ model match {
+ case Some(universe) => {
+ val index = IndexModelFactory.makeIndex(universe)
+ // Because "STAR" and "Star" are different
+ assert(index.firstLetterIndex('s').keys.toSeq.length == 2)
+ }
+ case _ => assert(false)
+ }
+ }
+}
diff --git a/test/scaladoc/run/SI-6812.check b/test/scaladoc/run/SI-6812.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-6812.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6812.scala b/test/scaladoc/run/SI-6812.scala
new file mode 100644
index 0000000000..fbd9588ede
--- /dev/null
+++ b/test/scaladoc/run/SI-6812.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ import scala.reflect.macros.Context
+ import language.experimental.macros
+
+ object Macros {
+ def impl(c: Context) = c.literalUnit
+ def foo = macro impl
+ }
+
+ class C {
+ def bar = Macros.foo
+ }
+ """
+
+ def scaladocSettings = ""
+ override def extraSettings = super.extraSettings + " -Ymacro-no-expand"
+ def testModel(root: Package) = ()
+}
diff --git a/test/scaladoc/run/package-object.check b/test/scaladoc/run/package-object.check
index 01dbcc682f..7da897a4f2 100644
--- a/test/scaladoc/run/package-object.check
+++ b/test/scaladoc/run/package-object.check
@@ -1,3 +1,4 @@
List(test.B, test.A, scala.AnyRef, scala.Any)
List(B, A, AnyRef, Any)
+Some((newSource,10))
Done.
diff --git a/test/scaladoc/run/package-object.scala b/test/scaladoc/run/package-object.scala
index 5fb5a4ddf1..f5c79b1332 100644
--- a/test/scaladoc/run/package-object.scala
+++ b/test/scaladoc/run/package-object.scala
@@ -11,6 +11,7 @@ object Test extends ScaladocModelTest {
val p = root._package("test")
println(p.linearizationTemplates)
println(p.linearizationTypes)
+ println(p.inSource)
}
}