summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--build.xml1
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala6
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTypes.scala3
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala3
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reify.scala5
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala12
-rw-r--r--src/compiler/scala/tools/nsc/CompileServer.scala44
-rw-r--r--src/compiler/scala/tools/nsc/CompileSocket.scala2
-rw-r--r--src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Positions.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala2
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala31
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala73
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala22
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala24
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala60
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala182
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala46
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala20
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css6
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js19
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala15
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala40
-rwxr-xr-xsrc/compiler/scala/tools/nsc/interactive/Doc.scala59
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala157
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala7
-rw-r--r--src/compiler/scala/tools/nsc/interactive/REPL.scala9
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala1
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala17
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/IMain.scala3
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/JLineReader.scala5
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/FscSettings.scala23
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala30
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala5
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala194
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala24
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala81
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala22
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala203
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala48
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala21
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala94
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala145
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Analyzer.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala225
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala28
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala38
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala112
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala7
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala519
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala64
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala355
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala108
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala24
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala45
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala687
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala48
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Variances.scala2
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala1
-rw-r--r--src/compiler/scala/tools/util/SocketServer.scala4
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala31
-rw-r--r--src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala1
-rw-r--r--src/library/scala/Array.scala10
-rw-r--r--src/library/scala/annotation/migration.scala3
-rw-r--r--src/library/scala/collection/IndexedSeq.scala4
-rw-r--r--src/library/scala/collection/generic/GenTraversableFactory.scala6
-rw-r--r--src/library/scala/collection/generic/IndexedSeqFactory.scala21
-rw-r--r--src/library/scala/collection/immutable/IndexedSeq.scala4
-rw-r--r--src/library/scala/collection/immutable/List.scala36
-rw-r--r--src/library/scala/collection/immutable/Stream.scala13
-rw-r--r--src/library/scala/collection/immutable/Vector.scala11
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala14
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala2
-rw-r--r--src/library/scala/collection/mutable/ListMap.scala17
-rw-r--r--src/library/scala/collection/parallel/mutable/ParArray.scala3
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashSet.scala2
-rw-r--r--src/library/scala/concurrent/BatchingExecutor.scala117
-rw-r--r--src/library/scala/concurrent/Future.scala6
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala34
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala2
-rw-r--r--src/library/scala/package.scala3
-rw-r--r--src/library/scala/util/parsing/combinator/JavaTokenParsers.scala7
-rw-r--r--src/partest/scala/tools/partest/ASMConverters.scala71
-rw-r--r--src/partest/scala/tools/partest/BytecodeTest.scala102
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala2
-rw-r--r--src/partest/scala/tools/partest/javaagent/ASMTransformer.java13
-rw-r--r--src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java13
-rw-r--r--src/partest/scala/tools/partest/javaagent/ProfilingAgent.java2
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala2
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala2
-rw-r--r--src/reflect/scala/reflect/api/Exprs.scala2
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala27
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationCheckers.scala179
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala17
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala45
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala15
-rw-r--r--src/reflect/scala/reflect/internal/Importers.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Positions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/Printers.scala6
-rw-r--r--src/reflect/scala/reflect/internal/PrivateWithin.scala23
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala2
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala1
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala54
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala25
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala53
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala75
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala135
-rw-r--r--src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala31
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala2
-rw-r--r--src/reflect/scala/reflect/internal/util/Position.scala8
-rw-r--r--src/reflect/scala/reflect/macros/Attachments.scala16
-rw-r--r--src/reflect/scala/reflect/macros/compileTimeOnly.scala16
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala13
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala1
-rw-r--r--src/scalap/scala/tools/scalap/Main.scala2
-rw-r--r--test/benchmarking/t6726-patmat-analysis.scala4005
-rw-r--r--test/disabled/run/t6987.check1
-rw-r--r--test/disabled/run/t6987.scala43
-rw-r--r--test/files/disabled/run/t4602.scala57
-rw-r--r--test/files/instrumented/t6611.check1
-rw-r--r--test/files/instrumented/t6611.scala35
-rw-r--r--test/files/jvm/bytecode-test-example.check1
-rw-r--r--test/files/jvm/bytecode-test-example/Foo_1.scala9
-rw-r--r--test/files/jvm/bytecode-test-example/Test.scala32
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala14
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore.check1
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore.flags1
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala29
-rw-r--r--test/files/jvm/patmat_opt_ignore_underscore/test.scala15
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck.check1
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck.flags1
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala24
-rw-r--r--test/files/jvm/patmat_opt_no_nullcheck/test.scala8
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest.check1
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest.flags1
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala24
-rw-r--r--test/files/jvm/patmat_opt_primitive_typetest/test.scala8
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala7
-rw-r--r--test/files/jvm/t6941.check1
-rw-r--r--test/files/jvm/t6941.flags1
-rw-r--r--test/files/jvm/t6941/Analyzed_1.scala11
-rw-r--r--test/files/jvm/t6941/test.scala15
-rw-r--r--test/files/jvm/throws-annot-from-java.check47
-rw-r--r--test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala3
-rw-r--r--test/files/jvm/throws-annot-from-java/Test_3.scala29
-rw-r--r--test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java6
-rw-r--r--test/files/lib/javac-artifacts.jar.desired.sha11
-rw-r--r--test/files/neg/anyval-anyref-parent.check2
-rw-r--r--test/files/neg/cyclics-import.check11
-rw-r--r--test/files/neg/macro-false-deprecation-warning.check4
-rw-r--r--test/files/neg/macro-false-deprecation-warning.flags1
-rw-r--r--test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala15
-rw-r--r--test/files/neg/names-defaults-neg.check2
-rw-r--r--test/files/neg/protected-constructors.check5
-rw-r--r--test/files/neg/t2148.check2
-rw-r--r--test/files/neg/t2968.check10
-rw-r--r--test/files/neg/t2968.scala26
-rw-r--r--test/files/neg/t2968b.check4
-rw-r--r--test/files/neg/t2968b.scala7
-rw-r--r--test/files/neg/t409.check4
-rw-r--r--test/files/neg/t5353.check4
-rw-r--r--test/files/neg/t5353.scala3
-rw-r--r--test/files/neg/t5378.check31
-rw-r--r--test/files/neg/t5378.scala54
-rw-r--r--test/files/neg/t5529.check5
-rw-r--r--test/files/neg/t5543.check10
-rw-r--r--test/files/neg/t5543.scala19
-rw-r--r--test/files/neg/t5692a.check2
-rw-r--r--test/files/neg/t5692b.check2
-rw-r--r--test/files/neg/t5696.check2
-rw-r--r--test/files/neg/t5753/Impls$class.classbin626 -> 0 bytes
-rw-r--r--test/files/neg/t5753/Impls.classbin866 -> 0 bytes
-rw-r--r--test/files/neg/t5954.check10
-rw-r--r--test/files/neg/t5954.flags1
-rw-r--r--test/files/neg/t5954.scala2
-rw-r--r--test/files/neg/t6040.check4
-rw-r--r--test/files/neg/t6231.check6
-rw-r--r--test/files/neg/t6231.scala15
-rw-r--r--test/files/neg/t6426.check7
-rw-r--r--test/files/neg/t6426.scala5
-rw-r--r--test/files/neg/t6443c.check7
-rw-r--r--test/files/neg/t6443c.scala21
-rw-r--r--test/files/neg/t6539/Macro_1.scala2
-rw-r--r--test/files/neg/t6539/Test_2.scala6
-rw-r--r--test/files/neg/t6567.check7
-rw-r--r--test/files/neg/t6567.flags1
-rw-r--r--test/files/neg/t6567.scala11
-rw-r--r--test/files/neg/t6601.check4
-rw-r--r--test/files/neg/t6601/AccessPrivateConstructor_2.scala3
-rw-r--r--test/files/neg/t6601/PrivateConstructor_1.scala1
-rw-r--r--test/files/neg/t6667.check3
-rw-r--r--test/files/neg/t6667.flags1
-rw-r--r--test/files/neg/t6667b.check3
-rw-r--r--test/files/neg/t6667b.flags1
-rw-r--r--test/files/neg/t667.check4
-rw-r--r--test/files/neg/t6728.check4
-rw-r--r--test/files/neg/t6728.scala5
-rw-r--r--test/files/neg/t6829.check6
-rw-r--r--test/files/neg/t6902.check10
-rw-r--r--test/files/neg/t6902.flags1
-rw-r--r--test/files/neg/t6902.scala23
-rw-r--r--test/files/neg/t6952.check13
-rw-r--r--test/files/neg/t6952.scala4
-rw-r--r--test/files/neg/t6963.check2
-rw-r--r--test/files/neg/t6963.flags1
-rw-r--r--test/files/neg/t6963.scala3
-rw-r--r--test/files/neg/t6963a.check5
-rw-r--r--test/files/neg/t6963a.flags1
-rw-r--r--test/files/neg/t6963a.scala5
-rw-r--r--test/files/neg/t6963b.check13
-rw-r--r--test/files/neg/t6963b.flags1
-rw-r--r--test/files/neg/t6963b.scala20
-rw-r--r--test/files/neg/t877.check4
-rw-r--r--test/files/pos/SI-7060.flags1
-rw-r--r--test/files/pos/SI-7060.scala11
-rw-r--r--test/files/pos/SI-7100.scala6
-rw-r--r--test/files/pos/lubs.scala3
-rw-r--r--test/files/pos/package-case.flags1
-rw-r--r--test/files/pos/presuperContext.scala13
-rw-r--r--test/files/pos/t1014.scala4
-rw-r--r--test/files/pos/t1803.flags1
-rw-r--r--test/files/pos/t1803.scala2
-rw-r--r--test/files/pos/t2130-1.flags1
-rw-r--r--test/files/pos/t2130-2.flags1
-rw-r--r--test/files/pos/t3577.scala29
-rw-r--r--test/files/pos/t3999b.flags1
-rw-r--r--test/files/pos/t4052.flags1
-rw-r--r--test/files/pos/t5082.scala14
-rw-r--r--test/files/pos/t5130.scala46
-rw-r--r--test/files/pos/t5604b/T_1.scala6
-rw-r--r--test/files/pos/t5604b/T_2.scala6
-rw-r--r--test/files/pos/t5604b/Test_1.scala7
-rw-r--r--test/files/pos/t5604b/Test_2.scala7
-rw-r--r--test/files/pos/t5604b/pack_1.scala5
-rw-r--r--test/files/pos/t5859.scala15
-rw-r--r--test/files/pos/t6072.scala3
-rw-r--r--test/files/pos/t6146.flags1
-rw-r--r--test/files/pos/t6146.scala60
-rw-r--r--test/files/pos/t6482.scala11
-rw-r--r--test/files/pos/t6516.scala19
-rw-r--r--test/files/pos/t6595.flags1
-rw-r--r--test/files/pos/t6595.scala18
-rw-r--r--test/files/pos/t6601/PrivateValueClass_1.scala1
-rw-r--r--test/files/pos/t6601/UsePrivateValueClass_2.scala10
-rw-r--r--test/files/pos/t6651.scala33
-rw-r--r--test/files/pos/t6891.flags1
-rw-r--r--test/files/pos/t6891.scala26
-rw-r--r--test/files/pos/t6942.flags1
-rw-r--r--test/files/pos/t6942/Bar.java235
-rw-r--r--test/files/pos/t6942/t6942.scala64
-rw-r--r--test/files/pos/t6963c.flags1
-rw-r--r--test/files/pos/t6963c.scala25
-rw-r--r--test/files/pos/t6976/Exts_1.scala10
-rw-r--r--test/files/pos/t6976/ImplicitBug_1.scala27
-rw-r--r--test/files/pos/t6976/ImplicitBug_2.scala7
-rw-r--r--test/files/pos/t6994.flags1
-rw-r--r--test/files/pos/t6994.scala8
-rw-r--r--test/files/pos/t7011.flags1
-rw-r--r--test/files/pos/t7011.scala7
-rw-r--r--test/files/pos/t7022.scala9
-rw-r--r--test/files/pos/t7033.scala15
-rw-r--r--test/files/pos/t7035.scala15
-rw-r--r--[-rwxr-xr-x]test/files/presentation/doc.check1
-rwxr-xr-xtest/files/presentation/doc/doc.scala (renamed from test/files/presentation/doc.scala)52
-rw-r--r--test/files/presentation/ide-t1001326.check4
-rw-r--r--test/files/presentation/ide-t1001326/Test.scala91
-rw-r--r--test/files/presentation/ide-t1001326/src/a/A.scala5
-rw-r--r--test/files/run/analyzerPlugins.check196
-rw-r--r--test/files/run/analyzerPlugins.scala121
-rw-r--r--test/files/run/array-addition.check4
-rw-r--r--test/files/run/array-addition.scala11
-rw-r--r--test/files/run/idempotency-case-classes.check2
-rw-r--r--test/files/run/idempotency-partial-functions.scala25
-rw-r--r--test/files/run/inline-ex-handlers.check214
-rw-r--r--test/files/run/parserJavaIdent.check26
-rw-r--r--test/files/run/parserJavaIdent.scala26
-rw-r--r--test/files/run/reflection-java-annotations.check2
-rw-r--r--test/files/run/reflection-java-annotations/JavaAnnottee_1.java47
-rw-r--r--test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java34
-rw-r--r--test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java21
-rw-r--r--test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java4
-rw-r--r--test/files/run/reflection-java-annotations/Test_2.scala (renamed from test/files/run/reflection-java-annotations.scala)2
-rw-r--r--test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java4
-rw-r--r--test/files/run/reflection-java-crtp/Main_2.scala (renamed from test/files/run/reflection-java-crtp.scala)2
-rw-r--r--test/files/run/reify-staticXXX.scala36
-rw-r--r--test/files/run/reify_magicsymbols.check2
-rw-r--r--test/files/run/t2418.check1
-rw-r--r--test/files/run/t2418.scala10
-rw-r--r--test/files/run/t2818.check4
-rw-r--r--test/files/run/t2818.scala6
-rw-r--r--test/files/run/t3353.check1
-rw-r--r--test/files/run/t3353.scala10
-rw-r--r--test/files/run/t4729.check4
-rw-r--r--test/files/run/t4729/J_1.java4
-rw-r--r--test/files/run/t4729/S_2.scala29
-rw-r--r--test/files/run/t5064.check6
-rw-r--r--test/files/run/t5313.check12
-rw-r--r--test/files/run/t5313.scala54
-rw-r--r--test/files/run/t5374.check6
-rw-r--r--test/files/run/t5374.scala76
-rw-r--r--test/files/run/t5543.check6
-rw-r--r--test/files/run/t5543.scala19
-rw-r--r--test/files/run/t5603.check4
-rw-r--r--test/files/run/t5604.check8
-rw-r--r--test/files/run/t5604.scala50
-rw-r--r--test/files/run/t5824.check1
-rw-r--r--test/files/run/t5824.scala8
-rw-r--r--test/files/run/t6011c.scala13
-rw-r--r--test/files/run/t6028.check2
-rw-r--r--test/files/run/t6113.check1
-rw-r--r--test/files/run/t6113.scala6
-rw-r--r--test/files/run/t6135.scala13
-rw-r--r--test/files/run/t6146b.check52
-rw-r--r--test/files/run/t6146b.scala39
-rw-r--r--test/files/run/t6154.check1
-rw-r--r--test/files/run/t6154.scala10
-rw-r--r--test/files/run/t6187.check32
-rw-r--r--test/files/run/t6187.scala18
-rw-r--r--test/files/run/t6187b.scala5
-rw-r--r--test/files/run/t6206.check4
-rw-r--r--test/files/run/t6206.scala37
-rw-r--r--test/files/run/t6288.check22
-rw-r--r--test/files/run/t6288b-jump-position.check12
-rw-r--r--test/files/run/t6434.check10
-rw-r--r--test/files/run/t6434.scala8
-rw-r--r--test/files/run/t6439.check66
-rw-r--r--test/files/run/t6439.scala22
-rw-r--r--test/files/run/t6443-by-name.check3
-rw-r--r--test/files/run/t6443-by-name.scala18
-rw-r--r--test/files/run/t6443-varargs.check1
-rw-r--r--test/files/run/t6443-varargs.scala16
-rw-r--r--test/files/run/t6443.scala15
-rw-r--r--test/files/run/t6443b.scala16
-rw-r--r--test/files/run/t6548.check2
-rw-r--r--test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java17
-rw-r--r--test/files/run/t6548/Test_2.scala (renamed from test/files/run/t6548.scala)2
-rw-r--r--test/files/run/t6572/bar_1.scala19
-rw-r--r--test/files/run/t6572/foo_2.scala17
-rw-r--r--test/files/run/t6584.check8
-rw-r--r--test/files/run/t6584.scala16
-rw-r--r--test/files/run/t6611.scala61
-rw-r--r--test/files/run/t6637.check1
-rw-r--r--test/files/run/t6637.scala8
-rw-r--r--test/files/run/t6669.scala26
-rw-r--r--test/files/run/t6853.scala18
-rw-r--r--test/files/run/t6863.scala114
-rw-r--r--test/files/run/t6888.check3
-rw-r--r--test/files/run/t6888.scala19
-rw-r--r--test/files/run/t6968.check1
-rw-r--r--test/files/run/t6968.scala7
-rw-r--r--test/files/run/t6969.check1
-rw-r--r--test/files/run/t6969.scala28
-rw-r--r--test/files/run/t6989.check216
-rw-r--r--test/files/run/t6989/JavaClass_1.java41
-rw-r--r--test/files/run/t6989/Test_2.scala42
-rw-r--r--test/files/run/t7008-scala-defined.check7
-rw-r--r--test/files/run/t7008-scala-defined/Impls_Macros_2.scala12
-rw-r--r--test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala6
-rw-r--r--test/files/run/t7008-scala-defined/Test_3.scala9
-rw-r--r--test/files/run/t7008.check9
-rw-r--r--test/files/run/t7008/Impls_Macros_2.scala12
-rw-r--r--test/files/run/t7008/JavaClassWithCheckedExceptions_1.java7
-rw-r--r--test/files/run/t7008/Test_3.scala9
-rw-r--r--test/files/run/t7039.check1
-rw-r--r--test/files/run/t7039.scala11
-rw-r--r--test/files/run/t7046.check2
-rw-r--r--test/files/run/t7046.scala13
-rw-r--r--test/files/run/t7064-old-style-supercalls.check1
-rw-r--r--test/files/run/t7064-old-style-supercalls.scala48
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala2
-rwxr-xr-xtest/partest8
-rw-r--r--test/pending/neg/t5378.scala19
-rw-r--r--test/pending/neg/t5589neg.check (renamed from test/files/neg/t5589neg.check)0
-rw-r--r--test/pending/neg/t5589neg.scala (renamed from test/files/neg/t5589neg.scala)0
-rw-r--r--test/pending/neg/t5589neg2.scala (renamed from test/files/neg/t5589neg2.scala)0
-rw-r--r--test/pending/pos/t1336.scala (renamed from test/files/pos/t1336.scala)0
-rw-r--r--test/pending/pos/t5589.scala (renamed from test/files/pos/t5589.scala)0
-rw-r--r--test/pending/run/idempotency-partial-functions.scala28
-rw-r--r--test/pending/run/t4574.scala (renamed from test/files/run/t4574.scala)0
-rw-r--r--test/scaladoc/run/SI-6017.check1
-rw-r--r--test/scaladoc/run/SI-6017.scala28
-rw-r--r--test/scaladoc/run/SI-6812.check1
-rw-r--r--test/scaladoc/run/SI-6812.scala24
-rw-r--r--test/scaladoc/run/package-object.check1
-rw-r--r--test/scaladoc/run/package-object.scala1
400 files changed, 11503 insertions, 2223 deletions
diff --git a/build.xml b/build.xml
index 113923db6b..af577afbaa 100644
--- a/build.xml
+++ b/build.xml
@@ -1377,6 +1377,7 @@ QUICK BUILD (QUICK)
<pathelement location="${ant.jar}"/>
<path refid="forkjoin.classpath"/>
<path refid="fjbg.classpath"/>
+ <path refid="asm.classpath"/>
<pathelement location="${scalacheck.jar}"/>
</compilationpath>
</scalacfork>
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 31974b5b76..06e287f62f 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -45,9 +45,7 @@ trait GenTrees {
case global.EmptyTree =>
reifyMirrorObject(EmptyTree)
case global.emptyValDef =>
- mirrorSelect(nme.emptyValDef)
- case global.pendingSuperCall =>
- mirrorSelect(nme.pendingSuperCall)
+ mirrorBuildSelect(nme.emptyValDef)
case FreeDef(_, _, _, _, _) =>
reifyNestedFreeDef(tree)
case FreeRef(_, _) =>
@@ -177,7 +175,7 @@ trait GenTrees {
// then we can reify the scrutinee as a symless AST and that will definitely be hygienic
// why? because then typechecking of a scrutinee doesn't depend on the environment external to the quasiquote
// otherwise we need to reify the corresponding type
- if (sym.isLocalToReifee || tpe.isLocalToReifee)
+ if (sym.isLocalToReifee || tpe.isLocalToReifee || treeInfo.isWildcardStarType(tree))
reifyProduct(tree)
else {
if (reifyDebug) println("reifying bound type %s (underlying type is %s)".format(sym, tpe))
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
index 7aa87dc2f8..bb7e1f9b56 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala
@@ -69,8 +69,7 @@ trait GenTypes {
def reificationIsConcrete: Boolean = state.reificationIsConcrete
def spliceType(tpe: Type): Tree = {
- val quantified = currentQuantified
- if (tpe.isSpliceable && !(quantified contains tpe.typeSymbol)) {
+ if (tpe.isSpliceable && !(boundSymbolsInCallstack contains tpe.typeSymbol)) {
if (reifyDebug) println("splicing " + tpe)
val tagFlavor = if (concrete) tpnme.TypeTag.toString else tpnme.WeakTypeTag.toString
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index 21db93d8f5..49877b4286 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -34,6 +34,9 @@ trait GenUtils {
def mirrorSelect(name: String): Tree =
termPath(nme.UNIVERSE_PREFIX + name)
+ def mirrorBuildSelect(name: String): Tree =
+ termPath(nme.UNIVERSE_BUILD_PREFIX + name)
+
def mirrorMirrorSelect(name: String): Tree =
termPath(nme.MIRROR_PREFIX + name)
diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala
index dc0028be38..8e13a45cdb 100644
--- a/src/compiler/scala/reflect/reify/phases/Reify.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reify.scala
@@ -28,7 +28,10 @@ trait Reify extends GenSymbols
finally currents = currents.tail
}
}
- def currentQuantified = flatCollect(reifyStack.currents)({ case ExistentialType(quantified, _) => quantified })
+ def boundSymbolsInCallstack = flatCollect(reifyStack.currents) {
+ case ExistentialType(quantified, _) => quantified
+ case PolyType(typeParams, _) => typeParams
+ }
def current = reifyStack.currents.head
def currents = reifyStack.currents
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 1b7509fdbe..7406f5d02d 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -188,8 +188,12 @@ trait Reshape {
}
private def toPreTyperTypedOrAnnotated(tree: Tree): Tree = tree match {
- case ty @ Typed(expr1, tt @ TypeTree()) =>
+ case ty @ Typed(expr1, tpt) =>
if (reifyDebug) println("reify typed: " + tree)
+ val original = tpt match {
+ case tt @ TypeTree() => tt.original
+ case tpt => tpt
+ }
val annotatedArg = {
def loop(tree: Tree): Tree = tree match {
case annotated1 @ Annotated(ann, annotated2 @ Annotated(_, _)) => loop(annotated2)
@@ -197,15 +201,15 @@ trait Reshape {
case _ => EmptyTree
}
- loop(tt.original)
+ loop(original)
}
if (annotatedArg != EmptyTree) {
if (annotatedArg.isType) {
if (reifyDebug) println("verdict: was an annotated type, reify as usual")
ty
} else {
- if (reifyDebug) println("verdict: was an annotated value, equivalent is " + tt.original)
- toPreTyperTypedOrAnnotated(tt.original)
+ if (reifyDebug) println("verdict: was an annotated value, equivalent is " + original)
+ toPreTyperTypedOrAnnotated(original)
}
} else {
if (reifyDebug) println("verdict: wasn't annotated, reify as usual")
diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala
index c23c1e6154..7a0a072bb8 100644
--- a/src/compiler/scala/tools/nsc/CompileServer.scala
+++ b/src/compiler/scala/tools/nsc/CompileServer.scala
@@ -92,10 +92,11 @@ class StandardCompileServer extends SocketServer {
val args = input.split("\0", -1).toList
val newSettings = new FscSettings(fscError)
- this.verbose = newSettings.verbose.value
val command = newOfflineCompilerCommand(args, newSettings)
+ this.verbose = newSettings.verbose.value
info("Settings after normalizing paths: " + newSettings)
+ if (!command.files.isEmpty) info("Input files after normalizing paths: " + (command.files mkString ","))
printMemoryStats()
// Update the idle timeout if given
@@ -173,11 +174,22 @@ object CompileServer extends StandardCompileServer {
/** A directory holding redirected output */
private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory()
- private def redirect(setter: PrintStream => Unit, filename: String) {
- setter(new PrintStream((redirectDir / filename).createFile().bufferedOutput()))
- }
-
- def main(args: Array[String]) {
+ private def createRedirect(filename: String) =
+ new PrintStream((redirectDir / filename).createFile().bufferedOutput())
+
+ def main(args: Array[String]) =
+ execute(() => (), args)
+
+ /**
+ * Used for internal testing. The callback is called upon
+ * server start, notifying the caller that the server is
+ * ready to run. WARNING: the callback runs in the
+ * server's thread, blocking the server from doing any work
+ * until the callback is finished. Callbacks should be kept
+ * simple and clients should not try to interact with the
+ * server while the callback is processing.
+ */
+ def execute(startupCallback : () => Unit, args: Array[String]) {
val debug = args contains "-v"
if (debug) {
@@ -185,14 +197,16 @@ object CompileServer extends StandardCompileServer {
echo("Redirect dir is " + redirectDir)
}
- redirect(System.setOut, "scala-compile-server-out.log")
- redirect(System.setErr, "scala-compile-server-err.log")
- System.err.println("...starting server on socket "+port+"...")
- System.err.flush()
- compileSocket setPort port
- run()
-
- compileSocket deletePort port
- sys exit 0
+ Console.withErr(createRedirect("scala-compile-server-err.log")) {
+ Console.withOut(createRedirect("scala-compile-server-out.log")) {
+ Console.err.println("...starting server on socket "+port+"...")
+ Console.err.flush()
+ compileSocket setPort port
+ startupCallback()
+ run()
+
+ compileSocket deletePort port
+ }
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala
index 9a3e8d1530..4051bda914 100644
--- a/src/compiler/scala/tools/nsc/CompileSocket.scala
+++ b/src/compiler/scala/tools/nsc/CompileSocket.scala
@@ -72,7 +72,7 @@ class CompileSocket extends CompileOutputCommon {
/** A temporary directory to use */
val tmpDir = {
val udir = Option(Properties.userName) getOrElse "shared"
- val f = (Path(Properties.tmpDir) / "scala-devel" / udir).createDirectory()
+ val f = (Path(Properties.tmpDir) / ("scala-devel" + udir)).createDirectory()
if (f.isDirectory && f.canWrite) {
info("[Temp directory: " + f + "]")
diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
index 8a3c531ff0..caf6ad14cf 100644
--- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
+++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala
@@ -33,7 +33,7 @@ class OfflineCompilerCommand(arguments: List[String], settings: FscSettings) ext
}
else {
// Otherwise we're on the server and will use it to absolutize the paths.
- settings.absolutize(currentDir.value)
+ settings.absolutize()
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala
index 49569f5e05..d8fb632f73 100644
--- a/src/compiler/scala/tools/nsc/ast/Positions.scala
+++ b/src/compiler/scala/tools/nsc/ast/Positions.scala
@@ -20,7 +20,7 @@ trait Positions extends scala.reflect.internal.Positions {
// When we prune due to encountering a position, traverse the
// pruned children so we can warn about those lacking positions.
t.children foreach { c =>
- if (!c.canHaveAttrs) ()
+ if ((c eq EmptyTree) || (c eq emptyValDef)) ()
else if (c.pos == NoPosition) {
reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
inform("parent: " + treeSymStatus(t))
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index 96146b7343..99b82d9746 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -58,7 +58,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
def mkUnchecked(expr: Tree): Tree = atPos(expr.pos) {
// This can't be "Annotated(New(UncheckedClass), expr)" because annotations
// are very picky about things and it crashes the compiler with "unexpected new".
- Annotated(New(scalaDot(UncheckedClass.name), Nil), expr)
+ Annotated(New(scalaDot(UncheckedClass.name), ListOfNil), expr)
}
// if it's a Match, mark the selector unchecked; otherwise nothing.
def mkUncheckedMatch(tree: Tree) = tree match {
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 54402f0903..2ad762fd55 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -65,13 +65,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
// --- factory methods ----------------------------------------------------------
- /** Factory method for a primary constructor super call `super.<init>(args_1)...(args_n)`
- */
- def PrimarySuperCall(argss: List[List[Tree]]): Tree = argss match {
- case Nil => Apply(gen.mkSuperSelect, Nil)
- case xs :: rest => rest.foldLeft(Apply(gen.mkSuperSelect, xs): Tree)(Apply.apply)
- }
-
/** Generates a template with constructor corresponding to
*
* constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
@@ -89,7 +82,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* body
* }
*/
- def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): Template = {
+ def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): Template = {
/* Add constructor to template */
// create parameters for <init> as synthetic trees.
@@ -111,7 +104,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
rhs = EmptyTree
)
}
- val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = Modifiers(PRESUPER)) }
+ val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
val constrs = {
if (constrMods hasFlag TRAIT) {
@@ -124,16 +117,9 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
vparamss1 = List() :: vparamss1;
val superRef: Tree = atPos(superPos)(gen.mkSuperSelect)
- val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
- // this requires knowing which of the parents is a type macro and which is not
- // and that's something that cannot be found out before typer
- // (the type macros aren't in the trunk yet, but there is a plan for them to land there soon)
- // this means that we don't know what will be the arguments of the super call
- // therefore here we emit a dummy which gets populated when the template is named and typechecked
+ val superCall = (superRef /: argss) (Apply.apply)
List(
- // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)`
- // is it going to be a problem that we can no longer include the `argss`?
- atPos(wrappingPos(superPos, lvdefs)) (
+ atPos(wrappingPos(superPos, lvdefs ::: argss.flatten)) (
DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
}
}
@@ -151,10 +137,11 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
* @param constrMods the modifiers for the class constructor, i.e. as in `class C private (...)`
* @param vparamss the value parameters -- if they have symbols they
* should be owned by `sym`
+ * @param argss the supercall arguments
* @param body the template statements without primary constructor
* and value parameter fields.
*/
- def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): ClassDef = {
+ def ClassDef(sym: Symbol, constrMods: Modifiers, vparamss: List[List[ValDef]], argss: List[List[Tree]], body: List[Tree], superPos: Position): ClassDef = {
// "if they have symbols they should be owned by `sym`"
assert(
mforall(vparamss)(p => (p.symbol eq NoSymbol) || (p.symbol.owner == sym)),
@@ -164,7 +151,7 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
ClassDef(sym,
Template(sym.info.parents map TypeTree,
if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
- constrMods, vparamss, body, superPos))
+ constrMods, vparamss, argss, body, superPos))
}
// --- subcomponents --------------------------------------------------
@@ -337,8 +324,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
else
super.transform {
tree match {
- case tree if !tree.canHaveAttrs =>
- tree
case tpt: TypeTree =>
if (tpt.original != null)
transform(tpt.original)
@@ -352,6 +337,8 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
transform(fn)
case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
tree
+ case EmptyTree =>
+ tree
case _ =>
val dupl = tree.duplicate
if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol)) && !(keepLabels && tree.symbol.isLabel))
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 33db4ee2d5..6f79f639b9 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -980,11 +980,8 @@ self =>
/** Assumed (provisionally) to be TermNames. */
def ident(skipIt: Boolean): Name =
- if (isIdent) {
- val name = in.name.encode
- in.nextToken()
- name
- } else {
+ if (isIdent) rawIdent().encode
+ else {
syntaxErrorOrIncomplete(expectedMsg(IDENTIFIER), skipIt)
nme.ERROR
}
@@ -1562,9 +1559,9 @@ self =>
val nstart = in.skipToken()
val npos = r2p(nstart, nstart, in.lastOffset)
val tstart = in.offset
- val (parents, self, stats) = template()
+ val (parents, argss, self, stats) = template(isTrait = false)
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
- makeNew(parents, self, stats, npos, cpos)
+ makeNew(parents, self, stats, argss, npos, cpos)
case _ =>
syntaxErrorOrIncomplete("illegal start of simple expression", true)
errorTermTree
@@ -2106,7 +2103,7 @@ self =>
def annotationExpr(): Tree = atPos(in.offset) {
val t = exprSimpleType()
if (in.token == LPAREN) New(t, multipleArgumentExprs())
- else New(t, Nil)
+ else New(t, ListOfNil)
}
/* -------- PARAMETERS ------------------------------------------- */
@@ -2742,17 +2739,20 @@ self =>
* TraitParents ::= AnnotType {with AnnotType}
* }}}
*/
- def templateParents(): List[Tree] = {
- val parents = new ListBuffer[Tree]
- def readAppliedParent() = {
- val start = in.offset
- val parent = startAnnotType()
- val argss = if (in.token == LPAREN) multipleArgumentExprs() else Nil
- parents += atPos(start)((parent /: argss)(Apply.apply))
+ def templateParents(isTrait: Boolean): (List[Tree], List[List[Tree]]) = {
+ val parents = new ListBuffer[Tree] += startAnnotType()
+ val argss = (
+ // TODO: the insertion of ListOfNil here is where "new Foo" becomes
+ // indistinguishable from "new Foo()".
+ if (in.token == LPAREN && !isTrait) multipleArgumentExprs()
+ else ListOfNil
+ )
+
+ while (in.token == WITH) {
+ in.nextToken()
+ parents += startAnnotType()
}
- readAppliedParent()
- while (in.token == WITH) { in.nextToken(); readAppliedParent() }
- parents.toList
+ (parents.toList, argss)
}
/** {{{
@@ -2762,12 +2762,12 @@ self =>
* EarlyDef ::= Annotations Modifiers PatDef
* }}}
*/
- def template(): (List[Tree], ValDef, List[Tree]) = {
+ def template(isTrait: Boolean): (List[Tree], List[List[Tree]], ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
// @S: pre template body cannot stub like post body can!
val (self, body) = templateBody(isPre = true)
- if (in.token == WITH && (self eq emptyValDef)) {
+ if (in.token == WITH && self.isEmpty) {
val earlyDefs: List[Tree] = body flatMap {
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
List(copyValDef(vdef)(mods = mods | Flags.PRESUPER))
@@ -2779,16 +2779,16 @@ self =>
case _ => List()
}
in.nextToken()
- val parents = templateParents()
- val (self1, body1) = templateBodyOpt(parenMeansSyntaxError = false)
- (parents, self1, earlyDefs ::: body1)
+ val (parents, argss) = templateParents(isTrait = isTrait)
+ val (self1, body1) = templateBodyOpt(traitParentSeen = isTrait)
+ (parents, argss, self1, earlyDefs ::: body1)
} else {
- (List(), self, body)
+ (List(), ListOfNil, self, body)
}
} else {
- val parents = templateParents()
- val (self, body) = templateBodyOpt(parenMeansSyntaxError = false)
- (parents, self, body)
+ val (parents, argss) = templateParents(isTrait = isTrait)
+ val (self, body) = templateBodyOpt(traitParentSeen = isTrait)
+ (parents, argss, self, body)
}
}
@@ -2802,15 +2802,15 @@ self =>
* }}}
*/
def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
- val (parents0, self, body) = (
+ val (parents0, argss, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
- template()
+ template(isTrait = mods.isTrait)
}
else {
newLineOptWhenFollowedBy(LBRACE)
- val (self, body) = templateBodyOpt(parenMeansSyntaxError = mods.isTrait || name.isTermName)
- (List(), self, body)
+ val (self, body) = templateBodyOpt(traitParentSeen = false)
+ (List(), ListOfNil, self, body)
}
)
def anyrefParents() = {
@@ -2832,7 +2832,7 @@ self =>
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
Template(parents0, self, anyvalConstructor :: body)
else
- Template(anyrefParents, self, constrMods, vparamss, body, o2p(tstart))
+ Template(anyrefParents, self, constrMods, vparamss, argss, body, o2p(tstart))
}
}
@@ -2847,15 +2847,14 @@ self =>
case (self, Nil) => (self, EmptyTree.asList)
case result => result
}
- def templateBodyOpt(parenMeansSyntaxError: Boolean): (ValDef, List[Tree]) = {
+ def templateBodyOpt(traitParentSeen: Boolean): (ValDef, List[Tree]) = {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) {
templateBody(isPre = false)
} else {
- if (in.token == LPAREN) {
- if (parenMeansSyntaxError) syntaxError(s"traits or objects may not have parameters", true)
- else abort("unexpected opening parenthesis")
- }
+ if (in.token == LPAREN)
+ syntaxError((if (traitParentSeen) "parents of traits" else "traits or objects")+
+ " may not have parameters", true)
(emptyValDef, List())
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 4f564c5d0b..79f0bcf149 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -113,6 +113,11 @@ trait Scanners extends ScannersCommon {
cbuf.append(c)
}
+ /** Determines whether this scanner should emit identifier deprecation warnings,
+ * e.g. when seeing `macro` or `then`, which are planned to become keywords in future versions of Scala.
+ */
+ protected def emitIdentifierDeprecationWarnings = true
+
/** Clear buffer and set name and token */
private def finishNamed(idtoken: Int = IDENTIFIER) {
name = newTermName(cbuf.toString)
@@ -122,7 +127,7 @@ trait Scanners extends ScannersCommon {
val idx = name.start - kwOffset
if (idx >= 0 && idx < kwArray.length) {
token = kwArray(idx)
- if (token == IDENTIFIER && allowIdent != name)
+ if (token == IDENTIFIER && allowIdent != name && emitIdentifierDeprecationWarnings)
deprecationWarning(name+" is now a reserved word; usage as an identifier is deprecated")
}
}
@@ -283,10 +288,16 @@ trait Scanners extends ScannersCommon {
prev copyFrom this
val nextLastOffset = charOffset - 1
fetchToken()
+ def resetOffset() {
+ offset = prev.offset
+ lastOffset = prev.lastOffset
+ }
if (token == CLASS) {
token = CASECLASS
+ resetOffset()
} else if (token == OBJECT) {
token = CASEOBJECT
+ resetOffset()
} else {
lastOffset = nextLastOffset
next copyFrom this
@@ -607,7 +618,10 @@ trait Scanners extends ScannersCommon {
if (ch == '`') {
nextChar()
finishNamed(BACKQUOTED_IDENT)
- if (name.length == 0) syntaxError("empty quoted identifier")
+ if (name.length == 0)
+ syntaxError("empty quoted identifier")
+ else if (name == nme.WILDCARD)
+ syntaxError("wildcard invalid as backquoted identifier")
}
else syntaxError("unclosed quoted identifier")
}
@@ -1488,6 +1502,10 @@ trait Scanners extends ScannersCommon {
def improves(patches1: List[BracePatch]): Boolean =
imbalanceMeasure > new ParensAnalyzer(unit, patches1).imbalanceMeasure
+ // don't emit deprecation warnings about identifiers like `macro` or `then`
+ // when skimming through the source file trying to heal braces
+ override def emitIdentifierDeprecationWarnings = false
+
override def error(offset: Int, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index f94055f666..7969bb9c20 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -205,26 +205,20 @@ abstract class TreeBuilder {
*/
def makeAnonymousNew(stats: List[Tree]): Tree = {
val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
- makeNew(Nil, emptyValDef, stats1, NoPosition, NoPosition)
+ makeNew(Nil, emptyValDef, stats1, ListOfNil, NoPosition, NoPosition)
}
/** Create positioned tree representing an object creation <new parents { stats }
* @param npos the position of the new
* @param cpos the position of the anonymous class starting with parents
*/
- def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree],
+ def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree], argss: List[List[Tree]],
npos: Position, cpos: Position): Tree =
if (parents.isEmpty)
- makeNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty) {
- // `Parsers.template` no longer differentiates tpts and their argss
- // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
- // instead of parents = Ident(C), argss = Nil as before
- // this change works great for things that are actually templates
- // but in this degenerate case we need to perform postprocessing
- val app = treeInfo.dissectApplied(parents.head)
- atPos(npos union cpos) { New(app.callee, app.argss) }
- } else {
+ makeNew(List(scalaAnyRefConstr), self, stats, argss, npos, cpos)
+ else if (parents.tail.isEmpty && stats.isEmpty)
+ atPos(npos union cpos) { New(parents.head, argss) }
+ else {
val x = tpnme.ANON_CLASS_NAME
atPos(npos union cpos) {
Block(
@@ -232,12 +226,12 @@ abstract class TreeBuilder {
atPos(cpos) {
ClassDef(
Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, ListOfNil, stats, cpos.focus))
+ Template(parents, self, NoMods, ListOfNil, argss, stats, cpos.focus))
}),
atPos(npos) {
New(
Ident(x) setPos npos.focus,
- Nil)
+ ListOfNil)
}
)
}
@@ -258,7 +252,7 @@ abstract class TreeBuilder {
/** Create tree representing a while loop */
def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
- val continu = atPos(o2p(body.pos.endOrPoint)) { Apply(Ident(lname), Nil) }
+ val continu = atPos(o2p(body.pos pointOrElse wrappingPos(List(cond, body)).pos.endOrPoint)) { Apply(Ident(lname), Nil) }
val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
LabelDef(lname, Nil, rhs)
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index fd2b11898c..44d7a1929b 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -1164,34 +1164,28 @@ abstract class GenICode extends SubComponent {
resCtx
}
- private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position): Unit = {
- if (!(from <:< to) && !(from == NullReference && to == NothingReference)) {
- to match {
- case UNIT =>
- ctx.bb.emit(DROP(from), pos)
- debuglog("Dropped an " + from);
-
- case _ =>
- debugassert(from != UNIT, "Can't convert from UNIT to " + to + " at: " + pos)
- assert(!from.isReferenceType && !to.isReferenceType,
- "type error: can't convert from " + from + " to " + to +" in unit " + unit.source + " at " + pos)
-
- ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
- }
- } else if (from == NothingReference) {
- ctx.bb.emit(THROW(ThrowableClass))
- ctx.bb.enterIgnoreMode
- } else if (from == NullReference) {
- ctx.bb.emit(DROP(from))
- ctx.bb.emit(CONSTANT(Constant(null)))
+ private def adapt(from: TypeKind, to: TypeKind, ctx: Context, pos: Position) {
+ // An awful lot of bugs explode here - let's leave ourselves more clues.
+ // A typical example is an overloaded type assigned after typer.
+ log(s"GenICode#adapt($from, $to, $ctx, $pos)")
+
+ val conforms = (from <:< to) || (from == NullReference && to == NothingReference)
+ def coerce(from: TypeKind, to: TypeKind) = ctx.bb.emit(CALL_PRIMITIVE(Conversion(from, to)), pos)
+ def checkAssertions() {
+ def msg = s"Can't convert from $from to $to in unit ${unit.source} at $pos"
+ debugassert(from != UNIT, msg)
+ assert(!from.isReferenceType && !to.isReferenceType, msg)
}
- else if (from == ThrowableReference && !(ThrowableClass.tpe <:< to.toType)) {
- log("Inserted check-cast on throwable to " + to + " at " + pos)
- ctx.bb.emit(CHECK_CAST(to))
+ if (conforms) from match {
+ case NothingReference => ctx.bb.emit(THROW(ThrowableClass)) ; ctx.bb.enterIgnoreMode
+ case NullReference => ctx.bb.emit(Seq(DROP(from), CONSTANT(Constant(null))))
+ case ThrowableReference if !(ThrowableClass.tpe <:< to.toType) => ctx.bb.emit(CHECK_CAST(to)) // downcast throwables
+ case BYTE | SHORT | CHAR | INT if to == LONG => coerce(INT, LONG) // widen subrange types
+ case _ => ()
}
- else (from, to) match {
- case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => ctx.bb.emit(CALL_PRIMITIVE(Conversion(INT, LONG)))
- case _ => ()
+ else to match {
+ case UNIT => ctx.bb.emit(DROP(from), pos) // value discarding
+ case _ => checkAssertions() ; coerce(from, to) // other primitive coercions
}
}
@@ -1907,18 +1901,8 @@ abstract class GenICode extends SubComponent {
var handlerCount = 0
- override def toString(): String = {
- val buf = new StringBuilder()
- buf.append("\tpackage: ").append(packg).append('\n')
- buf.append("\tclazz: ").append(clazz).append('\n')
- buf.append("\tmethod: ").append(method).append('\n')
- buf.append("\tbb: ").append(bb).append('\n')
- buf.append("\tlabels: ").append(labels).append('\n')
- buf.append("\texception handlers: ").append(handlers).append('\n')
- buf.append("\tcleanups: ").append(cleanups).append('\n')
- buf.append("\tscope: ").append(scope).append('\n')
- buf.toString()
- }
+ override def toString =
+ s"package $packg { class $clazz { def $method { bb=$bb } } }"
def loadException(ctx: Context, exh: ExceptionHandler, pos: Position) = {
debuglog("Emitting LOAD_EXCEPTION for class: " + exh.loadExceptionClass)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index d185ed0c34..0abbe44b02 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -1018,7 +1018,7 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
if (needsAnnotation) {
val c = Constant(RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
- meth.addAnnotation(ThrowsClass, arg)
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index fe0020e074..598965b982 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -888,7 +888,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
if (needsAnnotation) {
val c = Constant(RemoteExceptionClass.tpe)
val arg = Literal(c) setType c.tpe
- meth.addAnnotation(ThrowsClass, arg)
+ meth.addAnnotation(appliedType(ThrowsClass, c.tpe), arg)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
index fee683ce3a..1beed3f420 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/DeadCodeElimination.scala
@@ -18,6 +18,9 @@ abstract class DeadCodeElimination extends SubComponent {
import icodes.opcodes._
import definitions.RuntimePackage
+ /** The block and index where an instruction is located */
+ type InstrLoc = (BasicBlock, Int)
+
val phaseName = "dce"
/** Create a new phase */
@@ -55,10 +58,10 @@ abstract class DeadCodeElimination extends SubComponent {
val rdef = new reachingDefinitions.ReachingDefinitionsAnalysis;
/** Use-def chain: give the reaching definitions at the beginning of given instruction. */
- var defs: immutable.Map[(BasicBlock, Int), immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
+ var defs: immutable.Map[InstrLoc, immutable.Set[rdef.lattice.Definition]] = immutable.HashMap.empty
/** Useful instructions which have not been scanned yet. */
- val worklist: mutable.Set[(BasicBlock, Int)] = new mutable.LinkedHashSet
+ val worklist: mutable.Set[InstrLoc] = new mutable.LinkedHashSet
/** what instructions have been marked as useful? */
val useful: mutable.Map[BasicBlock, mutable.BitSet] = perRunCaches.newMap()
@@ -66,21 +69,29 @@ abstract class DeadCodeElimination extends SubComponent {
/** what local variables have been accessed at least once? */
var accessedLocals: List[Local] = Nil
+ /** Map from a local and a basic block to the instructions that store to that local in that basic block */
+ val localStores = mutable.Map[(Local, BasicBlock), mutable.BitSet]() withDefault {_ => mutable.BitSet()}
+
+ /** Stores that clobber previous stores to array or ref locals. See SI-5313 */
+ val clobbers = mutable.Set[InstrLoc]()
+
/** the current method. */
var method: IMethod = _
/** Map instructions who have a drop on some control path, to that DROP instruction. */
- val dropOf: mutable.Map[(BasicBlock, Int), List[(BasicBlock, Int)]] = perRunCaches.newMap()
+ val dropOf: mutable.Map[InstrLoc, List[InstrLoc]] = perRunCaches.newMap()
def dieCodeDie(m: IMethod) {
if (m.hasCode) {
debuglog("dead code elimination on " + m);
dropOf.clear()
+ localStores.clear()
+ clobbers.clear()
m.code.blocks.clear()
accessedLocals = m.params.reverse
m.code.blocks ++= linearizer.linearize(m)
collectRDef(m)
- mark
+ mark()
sweep(m)
accessedLocals = accessedLocals.distinct
val diff = m.locals diff accessedLocals
@@ -102,12 +113,27 @@ abstract class DeadCodeElimination extends SubComponent {
useful(bb) = new mutable.BitSet(bb.size)
var rd = rdef.in(bb);
for (Pair(i, idx) <- bb.toList.zipWithIndex) {
+
+ // utility for adding to worklist
+ def moveToWorkList() = moveToWorkListIf(true)
+
+ // utility for (conditionally) adding to worklist
+ def moveToWorkListIf(cond: Boolean) =
+ if (cond) {
+ debuglog("in worklist: " + i)
+ worklist += ((bb, idx))
+ } else {
+ debuglog("not in worklist: " + i)
+ }
+
+ // instruction-specific logic
i match {
- case LOAD_LOCAL(l) =>
+ case LOAD_LOCAL(_) =>
defs = defs + Pair(((bb, idx)), rd.vars)
+ moveToWorkListIf(false)
- case STORE_LOCAL(_) =>
+ case STORE_LOCAL(l) =>
/* SI-4935 Check whether a module is stack top, if so mark the instruction that loaded it
* (otherwise any side-effects of the module's constructor go lost).
* (a) The other two cases where a module's value is stored (STORE_FIELD and STORE_ARRAY_ITEM)
@@ -124,14 +150,25 @@ abstract class DeadCodeElimination extends SubComponent {
case _ => false
}
}
- if (necessary) worklist += ((bb, idx))
+ moveToWorkListIf(necessary)
+
+ // add it to the localStores map
+ val key = (l, bb)
+ val set = localStores(key)
+ set += idx
+ localStores(key) = set
case RETURN(_) | JUMP(_) | CJUMP(_, _, _, _) | CZJUMP(_, _, _, _) | STORE_FIELD(_, _) |
THROW(_) | LOAD_ARRAY_ITEM(_) | STORE_ARRAY_ITEM(_) | SCOPE_ENTER(_) | SCOPE_EXIT(_) | STORE_THIS(_) |
- LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() => worklist += ((bb, idx))
- case CALL_METHOD(m1, _) if isSideEffecting(m1) => worklist += ((bb, idx)); debuglog("marking " + m1)
+ LOAD_EXCEPTION(_) | SWITCH(_, _) | MONITOR_ENTER() | MONITOR_EXIT() =>
+ moveToWorkList()
+
+ case CALL_METHOD(m1, _) if isSideEffecting(m1) =>
+ moveToWorkList()
+
case CALL_METHOD(m1, SuperCall(_)) =>
- worklist += ((bb, idx)) // super calls to constructor
+ moveToWorkList() // super calls to constructor
+
case DROP(_) =>
val necessary = rdef.findDefs(bb, idx, 1) exists { p =>
val (bb1, idx1) = p
@@ -140,12 +177,13 @@ abstract class DeadCodeElimination extends SubComponent {
case LOAD_EXCEPTION(_) | DUP(_) | LOAD_MODULE(_) => true
case _ =>
dropOf((bb1, idx1)) = (bb,idx) :: dropOf.getOrElse((bb1, idx1), Nil)
-// println("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
+ debuglog("DROP is innessential: " + i + " because of: " + bb1(idx1) + " at " + bb1 + ":" + idx1)
false
}
}
- if (necessary) worklist += ((bb, idx))
+ moveToWorkListIf(necessary)
case _ => ()
+ moveToWorkListIf(false)
}
rd = rdef.interpret(bb, idx, rd)
}
@@ -162,17 +200,35 @@ abstract class DeadCodeElimination extends SubComponent {
def mark() {
// log("Starting with worklist: " + worklist)
while (!worklist.isEmpty) {
- val (bb, idx) = worklist.iterator.next
+ val (bb, idx) = worklist.head
worklist -= ((bb, idx))
debuglog("Marking instr: \tBB_" + bb + ": " + idx + " " + bb(idx))
val instr = bb(idx)
+ // adds the instrutions that define the stack values about to be consumed to the work list to
+ // be marked useful
+ def addDefs() = for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
+ debuglog(s"\t${bb1(idx1)} is consumed by $instr")
+ worklist += ((bb1, idx1))
+ }
+
+ // DROP logic -- if an instruction is useful, its drops are also useful
+ // and we don't mark the DROPs as useful directly but add them to the
+ // worklist so we also mark their reaching defs as useful - see SI-7060
if (!useful(bb)(idx)) {
useful(bb) += idx
dropOf.get(bb, idx) foreach {
- for ((bb1, idx1) <- _)
- useful(bb1) += idx1
+ for ((bb1, idx1) <- _) {
+ /*
+ * SI-7060: A drop that we now mark as useful can be reached via several paths,
+ * so we should follow by marking all its reaching definition as useful too:
+ */
+ debuglog("\tAdding: " + bb1(idx1) + " to the worklist, as a useful DROP.")
+ worklist += ((bb1, idx1))
+ }
}
+
+ // per-instruction logic
instr match {
case LOAD_LOCAL(l1) =>
for ((l2, bb1, idx1) <- defs((bb, idx)) if l1 == l2; if !useful(bb1)(idx1)) {
@@ -180,6 +236,15 @@ abstract class DeadCodeElimination extends SubComponent {
worklist += ((bb1, idx1))
}
+ case STORE_LOCAL(l1) if l1.kind.isRefOrArrayType =>
+ addDefs()
+ // see SI-5313
+ // search for clobbers of this store if we aren't doing l1 = null
+ // this doesn't catch the second store in x=null;l1=x; but in practice this catches
+ // a lot of null stores very cheaply
+ if (idx == 0 || bb(idx - 1) != CONSTANT(Constant(null)))
+ findClobbers(l1, bb, idx + 1)
+
case nw @ NEW(REFERENCE(sym)) =>
assert(nw.init ne null, "null new.init at: " + bb + ": " + idx + "(" + instr + ")")
worklist += findInstruction(bb, nw.init)
@@ -199,26 +264,86 @@ abstract class DeadCodeElimination extends SubComponent {
()
case _ =>
- for ((bb1, idx1) <- rdef.findDefs(bb, idx, instr.consumed) if !useful(bb1)(idx1)) {
- debuglog("\tAdding " + bb1(idx1))
- worklist += ((bb1, idx1))
- }
+ addDefs()
}
}
}
}
+ /**
+ * Finds and marks all clobbers of the given local starting in the given
+ * basic block at the given index
+ *
+ * Storing to local variables of reference or array type may be indirectly
+ * observable because it may remove a reference to an object which may allow the object
+ * to be gc'd. See SI-5313. In this code I call the LOCAL_STORE(s) that immediately follow a
+ * LOCAL_STORE and that store to the same local "clobbers." If a LOCAL_STORE is marked
+ * useful then its clobbers must go into the set of clobbers, which will be
+ * compensated for later
+ */
+ def findClobbers(l: Local, bb: BasicBlock, idx: Int) {
+ // previously visited blocks tracked to prevent searching forever in a cycle
+ val inspected = mutable.Set[BasicBlock]()
+ // our worklist of blocks that still need to be checked
+ val blocksToBeInspected = mutable.Set[BasicBlock]()
+
+ // Tries to find the next clobber of l1 in bb1 starting at idx1.
+ // if it finds one it adds the clobber to clobbers set for later
+ // handling. If not it adds the direct successor blocks to
+ // the uninspectedBlocks to try to find clobbers there. Either way
+ // it adds the exception successor blocks for further search
+ def findClobberInBlock(idx1: Int, bb1: BasicBlock) {
+ val key = ((l, bb1))
+ val foundClobber = (localStores contains key) && {
+ def minIdx(s : mutable.BitSet) = if(s.isEmpty) -1 else s.min
+
+ // find the smallest index greater than or equal to idx1
+ val clobberIdx = minIdx(localStores(key) dropWhile (_ < idx1))
+ if (clobberIdx == -1)
+ false
+ else {
+ debuglog(s"\t${bb1(clobberIdx)} is a clobber of ${bb(idx)}")
+ clobbers += ((bb1, clobberIdx))
+ true
+ }
+ }
+
+ // always need to look into the exception successors for additional clobbers
+ // because we don't know when flow might enter an exception handler
+ blocksToBeInspected ++= (bb1.exceptionSuccessors filterNot inspected)
+ // If we didn't find a clobber here then we need to look at successor blocks.
+ // if we found a clobber then we don't need to search in the direct successors
+ if (!foundClobber) {
+ blocksToBeInspected ++= (bb1.directSuccessors filterNot inspected)
+ }
+ }
+
+ // first search starting at the current index
+ // note we don't put bb in the inspected list yet because a loop may later force
+ // us back around to search from the beginning of bb
+ findClobberInBlock(idx, bb)
+ // then loop until we've exhausted the set of uninspected blocks
+ while(!blocksToBeInspected.isEmpty) {
+ val bb1 = blocksToBeInspected.head
+ blocksToBeInspected -= bb1
+ inspected += bb1
+ findClobberInBlock(0, bb1)
+ }
+ }
+
def sweep(m: IMethod) {
val compensations = computeCompensations(m)
+ debuglog("Sweeping: " + m)
+
m foreachBlock { bb =>
-// Console.println("** Sweeping block " + bb + " **")
+ debuglog(bb + ":")
val oldInstr = bb.toList
bb.open
bb.clear
for (Pair(i, idx) <- oldInstr.zipWithIndex) {
if (useful(bb)(idx)) {
-// log(" " + i + " is useful")
+ debuglog(" * " + i + " is useful")
bb.emit(i, i.pos)
compensations.get(bb, idx) match {
case Some(is) => is foreach bb.emit
@@ -236,9 +361,15 @@ abstract class DeadCodeElimination extends SubComponent {
i match {
case NEW(REFERENCE(sym)) =>
log(s"Eliminated instantation of $sym inside $m")
+ case STORE_LOCAL(l) if clobbers contains ((bb, idx)) =>
+ // if an unused instruction was a clobber of a used store to a reference or array type
+ // then we'll replace it with the store of a null to make sure the reference is
+ // eliminated. See SI-5313
+ bb emit CONSTANT(Constant(null))
+ bb emit STORE_LOCAL(l)
case _ => ()
}
- debuglog("Skipped: bb_" + bb + ": " + idx + "( " + i + ")")
+ debuglog(" " + i + " [swept]")
}
}
@@ -247,8 +378,8 @@ abstract class DeadCodeElimination extends SubComponent {
}
}
- private def computeCompensations(m: IMethod): mutable.Map[(BasicBlock, Int), List[Instruction]] = {
- val compensations: mutable.Map[(BasicBlock, Int), List[Instruction]] = new mutable.HashMap
+ private def computeCompensations(m: IMethod): mutable.Map[InstrLoc, List[Instruction]] = {
+ val compensations: mutable.Map[InstrLoc, List[Instruction]] = new mutable.HashMap
m foreachBlock { bb =>
assert(bb.closed, "Open block in computeCompensations")
@@ -259,6 +390,7 @@ abstract class DeadCodeElimination extends SubComponent {
val defs = rdef.findDefs(bb, idx, 1, depth)
for (d <- defs) {
val (bb, idx) = d
+ debuglog("rdef: "+ bb(idx))
bb(idx) match {
case DUP(_) if idx > 0 =>
bb(idx - 1) match {
@@ -287,7 +419,7 @@ abstract class DeadCodeElimination extends SubComponent {
res
}
- private def findInstruction(bb: BasicBlock, i: Instruction): (BasicBlock, Int) = {
+ private def findInstruction(bb: BasicBlock, i: Instruction): InstrLoc = {
for (b <- linearizer.linearizeAt(method, bb)) {
val idx = b.toList indexWhere (_ eq i)
if (idx != -1)
diff --git a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
index f60d56d9bb..f509c63ba0 100755
--- a/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
+++ b/src/compiler/scala/tools/nsc/doc/base/CommentFactoryBase.scala
@@ -100,26 +100,26 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
- protected val endOfText = '\u0003'
- protected val endOfLine = '\u000A'
+ private val endOfText = '\u0003'
+ private val endOfLine = '\u000A'
/** Something that should not have happened, happened, and Scaladoc should exit. */
- protected def oops(msg: String): Nothing =
+ private def oops(msg: String): Nothing =
throw FatalError("program logic: " + msg)
/** The body of a line, dropping the (optional) start star-marker,
* one leading whitespace and all trailing whitespace. */
- protected val CleanCommentLine =
+ private val CleanCommentLine =
new Regex("""(?:\s*\*\s?)?(.*)""")
/** Dangerous HTML tags that should be replaced by something safer,
* such as wiki syntax, or that should be dropped. */
- protected val DangerousTags =
+ private val DangerousTags =
new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
/** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
* if it cannot be salvaged. */
- protected def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ private def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
case "p" | "div" => "\n\n"
case "h1" => "\n= "
case "/h1" => " =\n"
@@ -135,11 +135,11 @@ trait CommentFactoryBase { this: MemberLookupBase =>
/** Javadoc tags that should be replaced by something useful, such as wiki
* syntax, or that should be dropped. */
- protected val JavadocTags =
+ private val JavadocTags =
new Regex("""\{\@(code|docRoot|inheritDoc|link|linkplain|literal|value)([^}]*)\}""")
/** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
- protected def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ private def javadocReplacement(mtch: Regex.Match): String = mtch.group(1) match {
case "code" => "`" + mtch.group(2) + "`"
case "docRoot" => ""
case "inheritDoc" => ""
@@ -151,41 +151,41 @@ trait CommentFactoryBase { this: MemberLookupBase =>
}
/** Safe HTML tags that can be kept. */
- protected val SafeTags =
+ private val SafeTags =
new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
- protected val safeTagMarker = '\u000E'
+ private val safeTagMarker = '\u000E'
/** A Scaladoc tag not linked to a symbol and not followed by text */
- protected val SingleTag =
+ private val SingleTagRegex =
new Regex("""\s*@(\S+)\s*""")
/** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
- protected val SimpleTag =
+ private val SimpleTagRegex =
new Regex("""\s*@(\S+)\s+(.*)""")
/** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
* of the symbol, and the rest of the line. */
- protected val SymbolTag =
+ private val SymbolTagRegex =
new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""")
/** The start of a scaladoc code block */
- protected val CodeBlockStart =
+ private val CodeBlockStartRegex =
new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
/** The end of a scaladoc code block */
- protected val CodeBlockEnd =
+ private val CodeBlockEndRegex =
new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
/** A key used for a tag map. The key is built from the name of the tag and
* from the linked symbol if the tag has one.
* Equality on tag keys is structural. */
- protected sealed abstract class TagKey {
+ private sealed abstract class TagKey {
def name: String
}
- protected final case class SimpleTagKey(name: String) extends TagKey
- protected final case class SymbolTagKey(name: String, symbol: String) extends TagKey
+ private final case class SimpleTagKey(name: String) extends TagKey
+ private final case class SymbolTagKey(name: String, symbol: String) extends TagKey
/** Parses a raw comment string into a `Comment` object.
* @param comment The expanded comment string (including start and end markers) to be parsed.
@@ -231,7 +231,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
inCodeBlock: Boolean
): Comment = remaining match {
- case CodeBlockStart(before, marker, after) :: ls if (!inCodeBlock) =>
+ case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
if (!before.trim.isEmpty && !after.trim.isEmpty)
parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false)
else if (!before.trim.isEmpty)
@@ -250,7 +250,7 @@ trait CommentFactoryBase { this: MemberLookupBase =>
parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true)
}
- case CodeBlockEnd(before, marker, after) :: ls =>
+ case CodeBlockEndRegex(before, marker, after) :: ls =>
if (!before.trim.isEmpty && !after.trim.isEmpty)
parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true)
if (!before.trim.isEmpty)
@@ -269,17 +269,17 @@ trait CommentFactoryBase { this: MemberLookupBase =>
parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false)
}
- case SymbolTag(name, sym, body) :: ls if (!inCodeBlock) =>
+ case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) =>
val key = SymbolTagKey(name, sym)
val value = body :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
- case SimpleTag(name, body) :: ls if (!inCodeBlock) =>
+ case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) =>
val key = SimpleTagKey(name)
val value = body :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
- case SingleTag(name) :: ls if (!inCodeBlock) =>
+ case SingleTagRegex(name) :: ls if (!inCodeBlock) =>
val key = SimpleTagKey(name)
val value = "" :: tags.getOrElse(key, Nil)
parse0(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index 86407fb9a3..c76bdc58d9 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -48,10 +48,28 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
</div>
</body>
+ def letters: NodeSeq =
+ '_' +: ('a' to 'z') map {
+ char => {
+ val label = if (char == '_') '#' else char.toUpper
+
+ index.firstLetterIndex.get(char) match {
+ case Some(_) =>
+ <a target="template" href={ "index/index-" + char + ".html" }>{
+ label
+ }</a>
+ case None => <span>{ label }</span>
+ }
+ }
+ }
+
def browser =
<div id="browser" class="ui-layout-west">
<div class="ui-west-center">
- <div id="filter"></div>
+ <div id="filter">
+ <div id="textfilter"></div>
+ <div id="letters">{ letters }</div>
+ </div>
<div class="pack" id="tpl">{
def packageElem(pack: model.Package): NodeSeq = {
<xml:group>
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
index 2a8f9b570a..55fb370a41 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.css
@@ -206,7 +206,7 @@ h1 {
border-right:0;
}
-#letters > a {
+#letters > a, #letters > span {
/* font-family: monospace;*/
color: #858484;
font-weight: bold;
@@ -214,6 +214,10 @@ h1 {
text-shadow: #ffffff 0 1px 0;
padding-right: 2px;
}
+
+#letters > span {
+ color: #bbb;
+}
#tpl {
display: block;
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
index 1323a06c01..70073b272a 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -335,8 +335,7 @@ function keyboardScrolldownLeftPane() {
/* Configures the text filter */
function configureTextFilter() {
scheduler.add("init", function() {
- $("#filter").append("<div id='textfilter'><span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/></div>");
- printAlphabet();
+ $("#textfilter").append("<span class='pre'/><span class='input'><input id='index-input' type='text' accesskey='/'/></span><span class='post'/>");
var input = $("#textfilter input");
resizeFilterBlock();
input.bind('keyup', function(event) {
@@ -532,19 +531,3 @@ function kindFilterSync() {
function resizeFilterBlock() {
$("#tpl").css("top", $("#filter").outerHeight(true));
}
-
-function printAlphabet() {
- var html = '<a target="template" href="index/index-_.html">#</a>';
- var c;
- for (c = 'a'; c <= 'z'; c = String.fromCharCode(c.charCodeAt(0) + 1)) {
- html += [
- '<a target="template" href="index/index-',
- c,
- '.html">',
- c.toUpperCase(),
- '</a>'
- ].join('');
- }
- $("#filter").append('<div id="letters">' + html + '</div>');
-}
-
diff --git a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
index 10e2f23142..4ee6daf73e 100755
--- a/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/IndexModelFactory.scala
@@ -20,7 +20,7 @@ object IndexModelFactory {
/* Owner template ordering */
implicit def orderingSet = math.Ordering.String.on { x: MemberEntity => x.name.toLowerCase }
/* symbol name ordering */
- implicit def orderingMap = math.Ordering.String.on { x: String => x.toLowerCase }
+ implicit def orderingMap = math.Ordering.String
def addMember(d: MemberEntity) = {
val firstLetter = {
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index c6cfc317ea..0a469c9227 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -314,12 +314,15 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
inform("Creating doc template for " + sym)
override def toRoot: List[DocTemplateImpl] = this :: inTpl.toRoot
- def inSource =
- if (sym.sourceFile != null && ! sym.isSynthetic)
- Some((sym.sourceFile, sym.pos.line))
+
+ protected def inSourceFromSymbol(symbol: Symbol) =
+ if (symbol.sourceFile != null && ! symbol.isSynthetic)
+ Some((symbol.sourceFile, symbol.pos.line))
else
None
+ def inSource = inSourceFromSymbol(sym)
+
def sourceUrl = {
def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/")
val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/"
@@ -508,11 +511,11 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
abstract class PackageImpl(sym: Symbol, inTpl: PackageImpl) extends DocTemplateImpl(sym, inTpl) with Package {
override def inTemplate = inTpl
override def toRoot: List[PackageImpl] = this :: inTpl.toRoot
- override lazy val linearization = {
- val symbol = sym.info.members.find {
+ override lazy val (inSource, linearization) = {
+ val representive = sym.info.members.find {
s => s.isPackageObject
} getOrElse sym
- linearizationFromSymbol(symbol)
+ (inSourceFromSymbol(representive), linearizationFromSymbol(representive))
}
def packages = members collect { case p: PackageImpl if !(droppedPackages contains p) => p }
}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index b4af8f00d6..73738ebd21 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -139,7 +139,12 @@ trait CompilerControl { self: Global =>
/** Sets sync var `response` to the fully attributed & typechecked tree contained in `source`.
* @pre `source` needs to be loaded.
+ *
+ * @note Deprecated because of race conditions in the typechecker when the background compiler
+ * is interrupted while typing the same `source`.
+ * @see SI-6578
*/
+ @deprecated("Use `askLoadedTyped` instead to avoid race conditions in the typechecker", "2.10.1")
def askType(source: SourceFile, forceReload: Boolean, response: Response[Tree]) =
postWorkItem(new AskTypeItem(source, forceReload, response))
@@ -157,6 +162,20 @@ trait CompilerControl { self: Global =>
def askLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) =
postWorkItem(new AskLinkPosItem(sym, source, response))
+ /** Sets sync var `response` to doc comment information for a given symbol.
+ *
+ * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
+ * @param site The place where sym is observed.
+ * @param source The source file that's supposed to contain the definition
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition of a given symbol that has a doc comment,
+ * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
+ * Note: This operation does not automatically load `source`. If `source`
+ * is unloaded, it stays that way.
+ */
+ def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) =
+ postWorkItem(new AskDocCommentItem(sym, site, source, response))
+
/** Sets sync var `response` to list of members that are visible
* as members of the tree enclosing `pos`, possibly reachable by an implicit.
* @pre source is loaded
@@ -240,15 +259,12 @@ trait CompilerControl { self: Global =>
}
/** Returns parse tree for source `source`. No symbols are entered. Syntax errors are reported.
- * Can be called asynchronously from presentation compiler.
+ *
+ * This method is thread-safe and as such can safely run outside of the presentation
+ * compiler thread.
*/
- def parseTree(source: SourceFile): Tree = ask { () =>
- getUnit(source) match {
- case Some(unit) if unit.status >= JustParsed =>
- unit.body
- case _ =>
- new UnitParser(new CompilationUnit(source)).parse()
- }
+ def parseTree(source: SourceFile): Tree = {
+ new UnitParser(new CompilationUnit(source)).parse()
}
/** Asks for a computation to be done quickly on the presentation compiler thread */
@@ -374,6 +390,14 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
+ case class AskDocCommentItem(val sym: Symbol, val site: Symbol, val source: SourceFile, response: Response[(String, String, Position)]) extends WorkItem {
+ def apply() = self.getDocComment(sym, site, source, response)
+ override def toString = "doc comment "+sym+" in "+source
+
+ def raiseMissing() =
+ response raise new MissingResponse
+ }
+
case class AskLoadedTypedItem(val source: SourceFile, response: Response[Tree]) extends WorkItem {
def apply() = self.waitLoadedTyped(source, response, this.onCompilerThread)
override def toString = "wait loaded & typed "+source
diff --git a/src/compiler/scala/tools/nsc/interactive/Doc.scala b/src/compiler/scala/tools/nsc/interactive/Doc.scala
deleted file mode 100755
index ad28a28105..0000000000
--- a/src/compiler/scala/tools/nsc/interactive/Doc.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2007-2012 LAMP/EPFL
- * @author Eugene Vigdorchik
- */
-
-package scala.tools.nsc
-package interactive
-
-import doc.base._
-import comment._
-import scala.xml.NodeSeq
-
-sealed trait DocResult
-final case class UrlResult(url: String) extends DocResult
-final case class HtmlResult(comment: Comment) extends DocResult
-
-abstract class Doc(val settings: doc.Settings) extends MemberLookupBase with CommentFactoryBase {
-
- override val global: interactive.Global
- import global._
-
- def chooseLink(links: List[LinkTo]): LinkTo
-
- override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] =
- ask { () =>
- if (sym.isClass || sym.isModule)
- Some(LinkToTpl(sym))
- else
- if ((site.isClass || site.isModule) && site.info.members.toList.contains(sym))
- Some(LinkToMember(sym, site))
- else
- None
- }
-
- override def toString(link: LinkTo) = ask { () =>
- link match {
- case LinkToMember(mbr: Symbol, site: Symbol) =>
- mbr.signatureString + " in " + site.toString
- case LinkToTpl(sym: Symbol) => sym.toString
- case _ => link.toString
- }
- }
-
- def retrieve(sym: Symbol, site: Symbol): Option[DocResult] = {
- val sig = ask { () => externalSignature(sym) }
- findExternalLink(sym, sig) map { link => UrlResult(link.url) } orElse {
- val resp = new Response[Tree]
- // Ensure docComment tree is type-checked.
- val pos = ask { () => docCommentPos(sym) }
- askTypeAt(pos, resp)
- resp.get.left.toOption flatMap { _ =>
- ask { () =>
- val comment = parseAtSymbol(expandedDocComment(sym), rawDocComment(sym), pos, Some(site))
- Some(HtmlResult(comment))
- }
- }
- }
- }
-}
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 4ab7b98b3d..105b0e4833 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -225,7 +225,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
/** Called from parser, which signals hereby that a method definition has been parsed.
*/
override def signalParseProgress(pos: Position) {
- checkForMoreWork(pos)
+ // We only want to be interruptible when running on the PC thread.
+ if(onCompilerThread) {
+ checkForMoreWork(pos)
+ }
}
/** Called from typechecker, which signals hereby that a node has been completely typechecked.
@@ -447,7 +450,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
*/
@elidable(elidable.WARNING)
override def assertCorrectThread() {
- assert(initializing || (Thread.currentThread() eq compileRunner),
+ assert(initializing || onCompilerThread,
"Race condition detected: You are running a presentation compiler method outside the PC thread.[phase: %s]".format(globalPhase) +
" Please file a ticket with the current stack trace at https://www.assembla.com/spaces/scala-ide/support/tickets")
}
@@ -462,6 +465,9 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
compileRunner
}
+ private def ensureUpToDate(unit: RichCompilationUnit) =
+ if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+
/** Compile all loaded source files in the order given by `allSources`.
*/
private[interactive] final def backgroundCompile() {
@@ -474,7 +480,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
// ensure all loaded units are parsed
for (s <- allSources; unit <- getUnit(s)) {
// checkForMoreWork(NoPosition) // disabled, as any work done here would be in an inconsistent state
- if (!unit.isUpToDate && unit.status != JustParsed) reset(unit) // reparse previously typechecked units.
+ ensureUpToDate(unit)
parseAndEnter(unit)
serviceParsedEntered()
}
@@ -727,7 +733,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
try {
debugLog("starting targeted type check")
typeCheck(unit)
- println("tree not found at "+pos)
+// println("tree not found at "+pos)
EmptyTree
} catch {
case ex: TyperResult => new Locator(pos) locateIn ex.tree
@@ -758,64 +764,69 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
respond(response)(typedTree(source, forceReload))
}
- /** Implements CompilerControl.askLinkPos */
- private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
+ private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
+ getUnit(source) match {
+ case None =>
+ reloadSources(List(source))
+ try f(getUnit(source).get)
+ finally afterRunRemoveUnitOf(source)
+ case Some(unit) =>
+ f(unit)
+ }
- /** Find position of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
- def findLinkPos(unit: RichCompilationUnit): Position = {
- val originalTypeParams = sym.owner.typeParams
- parseAndEnter(unit)
- val pre = adaptToNewRunMap(ThisType(sym.owner))
- val rawsym = pre.typeSymbol.info.decl(sym.name)
- val newsym = rawsym filter { alt =>
- sym.isType || {
- try {
- val tp1 = pre.memberType(alt) onTypeError NoType
- val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
- matchesType(tp1, tp2, false) || {
- debugLog(s"getLinkPos matchesType($tp1, $tp2) failed")
- val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
- matchesType(tp1, tp3, false) || {
- debugLog(s"getLinkPos fallback matchesType($tp1, $tp3) failed")
- false
- }
- }
- }
- catch {
- case ex: ControlThrowable => throw ex
- case ex: Throwable =>
- println("error in hyperlinking: " + ex)
- ex.printStackTrace()
+ /** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
+ private def findMirrorSymbol(sym: Symbol, unit: RichCompilationUnit): Symbol = {
+ val originalTypeParams = sym.owner.typeParams
+ ensureUpToDate(unit)
+ parseAndEnter(unit)
+ val pre = adaptToNewRunMap(ThisType(sym.owner))
+ val rawsym = pre.typeSymbol.info.decl(sym.name)
+ val newsym = rawsym filter { alt =>
+ sym.isType || {
+ try {
+ val tp1 = pre.memberType(alt) onTypeError NoType
+ val tp2 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, sym.owner.typeParams)
+ matchesType(tp1, tp2, false) || {
+ debugLog(s"findMirrorSymbol matchesType($tp1, $tp2) failed")
+ val tp3 = adaptToNewRunMap(sym.tpe) substSym (originalTypeParams, alt.owner.typeParams)
+ matchesType(tp1, tp3, false) || {
+ debugLog(s"findMirrorSymbol fallback matchesType($tp1, $tp3) failed")
false
+ }
}
}
- }
- if (newsym == NoSymbol) {
- if (rawsym.exists && !rawsym.isOverloaded) rawsym.pos
- else {
- debugLog("link not found " + sym + " " + source + " " + pre)
- NoPosition
+ catch {
+ case ex: ControlThrowable => throw ex
+ case ex: Throwable =>
+ debugLog("error in findMirrorSymbol: " + ex)
+ ex.printStackTrace()
+ false
}
- } else if (newsym.isOverloaded) {
- settings.uniqid.value = true
- debugLog("link ambiguous " + sym + " " + source + " " + pre + " " + newsym.alternatives)
- NoPosition
- } else {
- debugLog("link found for " + newsym + ": " + newsym.pos)
- newsym.pos
}
}
+ if (newsym == NoSymbol) {
+ if (rawsym.exists && !rawsym.isOverloaded) rawsym
+ else {
+ debugLog("mirror not found " + sym + " " + unit.source + " " + pre)
+ NoSymbol
+ }
+ } else if (newsym.isOverloaded) {
+ settings.uniqid.value = true
+ debugLog("mirror ambiguous " + sym + " " + unit.source + " " + pre + " " + newsym.alternatives)
+ NoSymbol
+ } else {
+ debugLog("mirror found for " + newsym + ": " + newsym.pos)
+ newsym
+ }
+ }
+ /** Implements CompilerControl.askLinkPos */
+ private[interactive] def getLinkPos(sym: Symbol, source: SourceFile, response: Response[Position]) {
informIDE("getLinkPos "+sym+" "+source)
respond(response) {
if (sym.owner.isClass) {
- getUnit(source) match {
- case None =>
- reloadSources(List(source))
- try findLinkPos(getUnit(source).get)
- finally afterRunRemoveUnitOf(source)
- case Some(unit) =>
- findLinkPos(unit)
+ withTempUnit(source){ u =>
+ findMirrorSymbol(sym, u).pos
}
} else {
debugLog("link not in class "+sym+" "+source+" "+sym.owner)
@@ -824,6 +835,50 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
+ /** Implements CompilerControl.askDocComment */
+ private[interactive] def getDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) {
+ informIDE("getDocComment "+sym+" "+source)
+ respond(response) {
+ withTempUnit(source){ u =>
+ val mirror = findMirrorSymbol(sym, u)
+ if (mirror eq NoSymbol)
+ ("", "", NoPosition)
+ else {
+ forceDocComment(mirror, u)
+ (expandedDocComment(mirror), rawDocComment(mirror), docCommentPos(mirror))
+ }
+ }
+ }
+ }
+
+ private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
+ // Either typer has been run and we don't find DocDef,
+ // or we force the targeted typecheck here.
+ // In both cases doc comment maps should be filled for the subject symbol.
+ val docTree =
+ unit.body find {
+ case DocDef(_, defn) if defn.symbol eq sym => true
+ case _ => false
+ }
+
+ for (t <- docTree) {
+ debugLog("Found DocDef tree for "+sym)
+ // Cannot get a typed tree at position since DocDef range is transparent.
+ val prevPos = unit.targetPos
+ val prevInterruptsEnabled = interruptsEnabled
+ try {
+ unit.targetPos = t.pos
+ interruptsEnabled = true
+ typeCheck(unit)
+ } catch {
+ case _: TyperResult => // ignore since we are after the side effect.
+ } finally {
+ unit.targetPos = prevPos
+ interruptsEnabled = prevInterruptsEnabled
+ }
+ }
+ }
+
def stabilizedType(tree: Tree): Type = tree match {
case Ident(_) if tree.symbol.isStable =>
singleType(NoPrefix, tree.symbol)
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index ffad19fbaa..84cb03c140 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -165,6 +165,11 @@ trait Picklers { self: Global =>
.wrapped { case sym ~ source => new AskLinkPosItem(sym, source, new Response) } { item => item.sym ~ item.source }
.asClass (classOf[AskLinkPosItem])
+ implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
+ (pkl[Symbol] ~ pkl[Symbol] ~ pkl[SourceFile])
+ .wrapped { case sym ~ site ~ source => new AskDocCommentItem(sym, site, source, new Response) } { item => item.sym ~ item.site ~ item.source }
+ .asClass (classOf[AskDocCommentItem])
+
implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
pkl[SourceFile]
.wrapped { source => new AskLoadedTypedItem(source, new Response) } { _.source }
@@ -182,5 +187,5 @@ trait Picklers { self: Global =>
implicit def action: Pickler[() => Unit] =
reloadItem | askTypeAtItem | askTypeItem | askTypeCompletionItem | askScopeCompletionItem |
- askToDoFirstItem | askLinkPosItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
+ askToDoFirstItem | askLinkPosItem | askDocCommentItem | askLoadedTypedItem | askParsedEnteredItem | emptyAction
}
diff --git a/src/compiler/scala/tools/nsc/interactive/REPL.scala b/src/compiler/scala/tools/nsc/interactive/REPL.scala
index dacfa679dd..7b89d5b0aa 100644
--- a/src/compiler/scala/tools/nsc/interactive/REPL.scala
+++ b/src/compiler/scala/tools/nsc/interactive/REPL.scala
@@ -110,11 +110,6 @@ object REPL {
show(completeResult)
}
- def doTypedTree(file: String) {
- comp.askType(toSourceFile(file), true, typedResult)
- show(typedResult)
- }
-
def doStructure(file: String) {
comp.askParsedEntered(toSourceFile(file), false, structureResult)
show(structureResult)
@@ -175,10 +170,8 @@ object REPL {
comp.askReload(List(toSourceFile(file)), reloadResult)
Thread.sleep(millis.toInt)
println("ask type now")
- comp.askType(toSourceFile(file), false, typedResult)
+ comp.askLoadedTyped(toSourceFile(file), typedResult)
typedResult.get
- case List("typed", file) =>
- doTypedTree(file)
case List("typeat", file, off1, off2) =>
doTypeAt(makePos(file, off1, off2))
case List("typeat", file, off1) =>
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index 64117bd8ee..b95f1fa7ca 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -144,7 +144,7 @@ self: scala.tools.nsc.Global =>
*/
private def setChildrenPos(pos: Position, trees: List[Tree]): Unit = try {
for (tree <- trees) {
- if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+ if (!tree.isEmpty && tree.pos == NoPosition) {
val children = tree.children
if (children.isEmpty) {
tree setPos pos.focus
@@ -165,7 +165,7 @@ self: scala.tools.nsc.Global =>
*/
override def atPos[T <: Tree](pos: Position)(tree: T): T = {
if (pos.isOpaqueRange) {
- if (!tree.isEmpty && tree.canHaveAttrs && tree.pos == NoPosition) {
+ if (!tree.isEmpty && tree.pos == NoPosition) {
tree.setPos(pos)
val children = tree.children
if (children.nonEmpty) {
@@ -203,7 +203,7 @@ self: scala.tools.nsc.Global =>
def validate(tree: Tree, encltree: Tree): Unit = {
- if (!tree.isEmpty && tree.canHaveAttrs) {
+ if (!tree.isEmpty) {
if (settings.Yposdebug.value && (settings.verbose.value || settings.Yrangepos.value))
println("[%10s] %s".format("validate", treeStatus(tree, encltree)))
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
index 62d274bc70..597b9012ce 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/InteractiveTest.scala
@@ -55,7 +55,6 @@ abstract class InteractiveTest
with AskShutdown
with AskReload
with AskLoadedTyped
- with AskType
with PresentationCompilerInstance
with CoreTestDefs
with InteractiveTestSettings { self =>
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
index eb902e3e6c..8d446cbbf8 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/AskCommand.scala
@@ -97,23 +97,6 @@ trait AskTypeAt extends AskCommand {
}
}
-
-trait AskType extends AskCommand {
- import compiler.Tree
-
- protected def askType(source: SourceFile, forceReload: Boolean)(implicit reporter: Reporter): Response[Tree] = {
- ask {
- compiler.askType(source, forceReload, _)
- }
- }
-
- protected def askType(sources: Seq[SourceFile], forceReload: Boolean)(implicit reporter: Reporter): Seq[Response[Tree]] = {
- for(source <- sources) yield
- askType(source, forceReload)
- }
-}
-
-
trait AskLoadedTyped extends AskCommand {
import compiler.Tree
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
index ea2333a65b..7bb2b7b7c8 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -8,7 +8,8 @@ import scala.reflect.internal.util.Position
/** Trait encapsulating the creation of a presentation compiler's instance.*/
private[tests] trait PresentationCompilerInstance extends TestSettings {
protected val settings = new Settings
- protected def docSettings: doc.Settings = new doc.Settings(_ => ())
+ protected val docSettings = new doc.Settings(_ => ())
+
protected val compilerReporter: CompilerReporter = new InteractiveReporter {
override def compiler = PresentationCompilerInstance.this.compiler
}
diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
index 4b6466c079..bed8570bd0 100644
--- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala
@@ -388,6 +388,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
oldReq <- definedNameMap get name.companionName
newSym <- req.definedSymbols get name
oldSym <- oldReq.definedSymbols get name.companionName
+ if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
} {
afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
@@ -972,7 +973,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends
// }
lazy val definedSymbols = (
termNames.map(x => x -> applyToResultMember(x, x => x)) ++
- typeNames.map(x => x -> compilerTypeOf(x).typeSymbol)
+ typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
).toMap[Name, Symbol] withDefaultValue NoSymbol
lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
diff --git a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
index 10f972452f..5fd5b41625 100644
--- a/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/JLineReader.scala
@@ -37,6 +37,9 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
}
class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+ if ((history: History) ne NoHistory)
+ this setHistory history
+
// working around protected/trait/java insufficiencies.
def goBack(num: Int): Unit = back(num)
def readOneKey(prompt: String) = {
@@ -51,8 +54,6 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
// A hook for running code after the repl is done initializing.
lazy val postInit: Unit = {
this setBellEnabled false
- if ((history: History) ne NoHistory)
- this setHistory history
if (completion ne NoCompletion) {
val argCompletor: ArgumentCompleter =
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 050f7a8f95..43a8402fc7 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -551,7 +551,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
atPos(pos) {
- New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
+ New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), ListOfNil)
}
mods1 = mods1 withAnnotations List(annot)
skipTo(SEMI)
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 9d01e73063..dbb9b7a003 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -15,6 +15,7 @@ import symtab.Flags
import mutable.ListBuffer
import scala.annotation.elidable
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
trait ParallelMatching extends ast.TreeDSL
with MatchSupport
@@ -821,7 +822,7 @@ trait ParallelMatching extends ast.TreeDSL
// match that's unimportant; so we add an instance check only if there
// is a binding.
def bindingWarning() = {
- if (isBound && settings.Xmigration28.value) {
+ if (isBound && settings.Xmigration.value < ScalaVersion.twoDotEight) {
cunit.warning(scrutTree.pos,
"A bound pattern such as 'x @ Pattern' now matches fewer cases than the same pattern with no binding.")
}
diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
index 06ebc20d3e..5c852ae07c 100644
--- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala
@@ -38,14 +38,25 @@ class FscSettings(error: String => Unit) extends Settings(error) {
private def holdsPath = Set[Settings#Setting](
d, dependencyfile, pluginsDir, Ygenjavap
)
+
+ override def processArguments(arguments: List[String], processAll: Boolean): (Boolean, List[String]) = {
+ val (r, args) = super.processArguments(arguments, processAll)
+ // we need to ensure the files specified with relative locations are absolutized based on the currentDir
+ (r, args map {a => absolutizePath(a)})
+ }
+
+ /**
+ * Take an individual path and if it's not absolute turns it into an absolute path based on currentDir.
+ * If it's already absolute then it's left alone.
+ */
+ private[this] def absolutizePath(p: String) = (Path(currentDir.value) resolve Path(p)).normalize.path
- /** All user set settings rewritten with absolute paths. */
- def absolutize(root: Path) {
- def rewrite(p: String) = (root resolve Path(p)).normalize.path
+ /** All user set settings rewritten with absolute paths based on currentDir */
+ def absolutize() {
userSetSettings foreach {
- case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(rewrite(p.value))
- case p: PathSetting => p.value = ClassPath.map(p.value, rewrite)
- case p: StringSetting => if (holdsPath(p)) p.value = rewrite(p.value)
+ case p: OutputSetting => p.outputDirs setSingleOutput AbstractFile.getDirectory(absolutizePath(p.value))
+ case p: PathSetting => p.value = ClassPath.map(p.value, absolutizePath)
+ case p: StringSetting => if (holdsPath(p)) p.value = absolutizePath(p.value)
case _ => ()
}
}
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index f1f289ed4d..e4f99474e1 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -221,6 +221,7 @@ class MutableSettings(val errorFn: String => Unit)
def OutputSetting(outputDirs: OutputDirs, default: String) = add(new OutputSetting(outputDirs, default))
def PhasesSetting(name: String, descr: String, default: String = "") = add(new PhasesSetting(name, descr, default))
def StringSetting(name: String, arg: String, descr: String, default: String) = add(new StringSetting(name, arg, descr, default))
+ def ScalaVersionSetting(name: String, arg: String, descr: String, default: ScalaVersion) = add(new ScalaVersionSetting(name, arg, descr, default))
def PathSetting(name: String, descr: String, default: String): PathSetting = {
val prepend = StringSetting(name + "/p", "", "", "").internalOnly()
val append = StringSetting(name + "/a", "", "", "").internalOnly()
@@ -486,6 +487,35 @@ class MutableSettings(val errorFn: String => Unit)
withHelpSyntax(name + " <" + arg + ">")
}
+ /** A setting represented by a Scala version, (`default` unless set) */
+ class ScalaVersionSetting private[nsc](
+ name: String,
+ val arg: String,
+ descr: String,
+ default: ScalaVersion)
+ extends Setting(name, descr) {
+ import ScalaVersion._
+
+ type T = ScalaVersion
+ protected var v: T = NoScalaVersion
+
+ override def tryToSet(args: List[String]) = {
+ value = default
+ Some(args)
+ }
+
+ override def tryToSetColon(args: List[String]) = args match {
+ case Nil => value = default; Some(Nil)
+ case x :: xs => value = ScalaVersion(x, errorFn) ; Some(xs)
+ }
+
+ override def tryToSetFromPropertyValue(s: String) = tryToSet(List(s))
+
+ def unparse: List[String] = if (value == NoScalaVersion) Nil else List(s"${name}:${value.unparse}")
+
+ withHelpSyntax(s"${name}:<${arg}>")
+ }
+
class PathSetting private[nsc](
name: String,
descr: String,
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index b820d10ddc..3df6334ec1 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -85,8 +85,7 @@ trait ScalaSettings extends AbsScalaSettings
val logFreeTerms = BooleanSetting ("-Xlog-free-terms", "Print a message when reification creates a free term.")
val logFreeTypes = BooleanSetting ("-Xlog-free-types", "Print a message when reification resorts to generating a free type.")
val maxClassfileName = IntSetting ("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, Some((72, 255)), _ => None)
- val Xmigration28 = BooleanSetting ("-Xmigration", "Warn about constructs whose behavior may have changed between 2.7 and 2.8.").
- withDeprecationMessage("This setting is no longer useful and will be removed. Please remove it from your build.")
+ val Xmigration = ScalaVersionSetting("-Xmigration", "version", "Warn about constructs whose behavior may have changed since version.", AnyScalaVersion)
val nouescape = BooleanSetting ("-Xno-uescape", "Disable handling of \\u unicode escapes.")
val Xnojline = BooleanSetting ("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting ("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
@@ -169,13 +168,13 @@ trait ScalaSettings extends AbsScalaSettings
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "manager", "Compile using the specified build manager.", List("none", "refined", "simple"), "none")
val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.")
+ val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.")
val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup")
val Ynotnull = BooleanSetting ("-Ynotnull", "Enable (experimental and incomplete) scala.NotNull.")
val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overriden methods.")
val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.")
val Yinvalidate = StringSetting ("-Yinvalidate", "classpath-entry", "Invalidate classpath entry before run", "")
val noSelfCheck = BooleanSetting ("-Yno-self-type-checks", "Suppress check for self-type conformance among inherited members.")
- val companionsInPkgObjs = BooleanSetting("-Ycompanions-in-pkg-objs", "Allow companion objects and case classes in package objects. See issue SI-5954.")
val YvirtClasses = false // too embryonic to even expose as a -Y //BooleanSetting ("-Yvirtual-classes", "Support virtual classes")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
new file mode 100644
index 0000000000..d6a0149411
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -0,0 +1,194 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author James Iry
+ */
+// $Id$
+
+package scala.tools.nsc.settings
+
+/**
+ * Represents a single Scala version in a manner that
+ * supports easy comparison and sorting.
+ */
+abstract class ScalaVersion extends Ordered[ScalaVersion] {
+ def unparse: String
+}
+
+/**
+ * A scala version that sorts higher than all actual versions
+ */
+case object NoScalaVersion extends ScalaVersion {
+ def unparse = "none"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case NoScalaVersion => 0
+ case _ => 1
+ }
+}
+
+/**
+ * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion
+ * may or may not be a released version - i.e. this same class is used to represent
+ * final, release candidate, milestone, and development builds. The build argument is used
+ * to segregate builds
+ */
+case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion {
+ def unparse = s"${major}.${minor}.${rev}.${build.unparse}"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) =>
+ // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these
+ // comparisons a lot so I'm using brute force direct style code
+ if (major < thatMajor) -1
+ else if (major > thatMajor) 1
+ else if (minor < thatMinor) -1
+ else if (minor > thatMinor) 1
+ else if (rev < thatRev) -1
+ else if (rev > thatRev) 1
+ else build compare thatBuild
+ case AnyScalaVersion => 1
+ case NoScalaVersion => -1
+ }
+}
+
+/**
+ * A Scala version that sorts lower than all actual versions
+ */
+case object AnyScalaVersion extends ScalaVersion {
+ def unparse = "any"
+
+ def compare(that: ScalaVersion): Int = that match {
+ case AnyScalaVersion => 0
+ case _ => -1
+ }
+}
+
+/**
+ * Factory methods for producing ScalaVersions
+ */
+object ScalaVersion {
+ private val dot = "\\."
+ private val dash = "\\-"
+ private def not(s:String) = s"[^${s}]"
+ private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r
+
+ def apply(versionString : String, errorHandler: String => Unit): ScalaVersion = {
+ def errorAndValue() = {
+ errorHandler(
+ s"There was a problem parsing ${versionString}. " +
+ "Versions should be in the form major[.minor[.revision]] " +
+ "where each part is a positive number, as in 2.10.1. " +
+ "The minor and revision parts are optional."
+ )
+ AnyScalaVersion
+ }
+
+ def toInt(s: String) = s match {
+ case null | "" => 0
+ case _ => s.toInt
+ }
+
+ def isInt(s: String) = util.Try(toInt(s)).isSuccess
+
+ def toBuild(s: String) = s match {
+ case null | "FINAL" => Final
+ case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2)))
+ case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1)))
+ case _ => Development(s)
+ }
+
+ try versionString match {
+ case "none" => NoScalaVersion
+ case "any" => AnyScalaVersion
+ case R(_, majorS, _, minorS, _, revS, _, buildS) =>
+ SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))
+ case _ =>
+ errorAndValue()
+ } catch {
+ case e: NumberFormatException => errorAndValue()
+ }
+ }
+
+ def apply(versionString: String): ScalaVersion =
+ apply(versionString, msg => throw new NumberFormatException(msg))
+
+ /**
+ * The version of the compiler running now
+ */
+ val current = apply(util.Properties.versionNumberString)
+
+ /**
+ * The 2.8.0 version.
+ */
+ val twoDotEight = SpecificScalaVersion(2, 8, 0, Final)
+}
+
+/**
+ * Represents the data after the dash in major.minor.rev-build
+ */
+abstract class ScalaBuild extends Ordered[ScalaBuild] {
+ /**
+ * Return a version of this build information that can be parsed back into the
+ * same ScalaBuild
+ */
+ def unparse: String
+}
+/**
+ * A development, test, nightly, snapshot or other "unofficial" build
+ */
+case class Development(id: String) extends ScalaBuild {
+ def unparse = s"-${id}"
+
+ def compare(that: ScalaBuild) = that match {
+ // sorting two development builds based on id is reasonably valid for two versions created with the same schema
+ // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions
+ // this is a pragmatic compromise
+ case Development(thatId) => id compare thatId
+ // assume a development build is newer than anything else, that's not really true, but good luck
+ // mapping development build versions to other build types
+ case _ => 1
+ }
+}
+/**
+ * A final final
+ */
+case object Final extends ScalaBuild {
+ def unparse = ""
+
+ def compare(that: ScalaBuild) = that match {
+ case Final => 0
+ // a final is newer than anything other than a development build or another final
+ case Development(_) => -1
+ case _ => 1
+ }
+}
+
+/**
+ * A candidate for final release
+ */
+case class RC(n: Int) extends ScalaBuild {
+ def unparse = s"-RC${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two rcs based on their RC numbers
+ case RC(thatN) => n - thatN
+ // an rc is older than anything other than a milestone or another rc
+ case Milestone(_) => 1
+ case _ => -1
+ }
+}
+
+/**
+ * An intermediate release
+ */
+case class Milestone(n: Int) extends ScalaBuild {
+ def unparse = s"-M${n}"
+
+ def compare(that: ScalaBuild) = that match {
+ // compare two milestones based on their milestone numbers
+ case Milestone(thatN) => n - thatN
+ // a milestone is older than anything other than another milestone
+ case _ => -1
+
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index a708a262e7..a517a33279 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -548,8 +548,8 @@ abstract class ClassfileParser {
skipMembers() // methods
if (!isScala) {
clazz setFlag sflags
- setPrivateWithin(clazz, jflags)
- setPrivateWithin(staticModule, jflags)
+ importPrivateWithinFromJavaFlags(clazz, jflags)
+ importPrivateWithinFromJavaFlags(staticModule, jflags)
clazz.setInfo(classInfo)
moduleClass setInfo staticInfo
staticModule.setInfo(moduleClass.tpe)
@@ -612,7 +612,7 @@ abstract class ClassfileParser {
if (isEnum) ConstantType(Constant(sym))
else info
}
- setPrivateWithin(sym, jflags)
+ importPrivateWithinFromJavaFlags(sym, jflags)
parseAttributes(sym, info)
getScope(jflags).enter(sym)
@@ -663,7 +663,7 @@ abstract class ClassfileParser {
info = MethodType(newParams, clazz.tpe)
}
sym.setInfo(info)
- setPrivateWithin(sym, jflags)
+ importPrivateWithinFromJavaFlags(sym, jflags)
parseAttributes(sym, info)
if ((jflags & JAVA_ACC_VARARGS) != 0) {
sym.setInfo(arrayToRepeated(sym.info))
@@ -1042,8 +1042,9 @@ abstract class ClassfileParser {
def parseExceptions(len: Int) {
val nClasses = in.nextChar
for (n <- 0 until nClasses) {
+ // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
val cls = pool.getClassSymbol(in.nextChar.toInt)
- sym.addAnnotation(definitions.ThrowsClass, Literal(Constant(cls.tpe)))
+ sym.addThrowsAnnotation(cls)
}
}
@@ -1263,19 +1264,6 @@ abstract class ClassfileParser {
protected def getScope(flags: Int): Scope =
if (isStatic(flags)) staticScope else instanceScope
- private def setPrivateWithin(sym: Symbol, jflags: Int) {
- if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
- // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
- // apparently occurs when processing v45.3 bytecode.
- if (sym.enclosingTopLevelClass != NoSymbol)
- sym.privateWithin = sym.enclosingTopLevelClass.owner
-
- // protected in java means package protected. #3946
- if ((jflags & JAVA_ACC_PROTECTED) != 0)
- if (sym.enclosingTopLevelClass != NoSymbol)
- sym.privateWithin = sym.enclosingTopLevelClass.owner
- }
-
private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0
private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0
private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 25b7813646..e8b0cd2696 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -71,8 +71,8 @@ abstract class Pickler extends SubComponent {
if (!t.isDef && t.hasSymbol && t.symbol.isTermMacro) {
unit.error(t.pos, t.symbol.typeParams.length match {
case 0 => "macro has not been expanded"
- case 1 => "type parameter not specified"
- case _ => "type parameters not specified"
+ case 1 => "this type parameter must be specified"
+ case _ => "these type parameters must be specified"
})
return
}
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index 0c9cb31d58..7a0b034fd0 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -15,6 +15,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
import global._
import definitions._
import CODE._
+ import treeInfo.StripCast
/** the following two members override abstract members in Transform */
val phaseName: String = "cleanup"
@@ -206,12 +207,17 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
var reflPoly$Cache: SoftReference[scala.runtime.MethodCache] = new SoftReference(new EmptyMethodCache())
def reflMethod$Method(forReceiver: JClass[_]): JMethod = {
- var method: JMethod = reflPoly$Cache.find(forReceiver)
- if (method != null)
+ var methodCache: MethodCache = reflPoly$Cache.find(forReceiver)
+ if (methodCache eq null) {
+ methodCache = new EmptyMethodCache
+ reflPoly$Cache = new SoftReference(methodCache)
+ }
+ var method: JMethod = methodCache.find(forReceiver)
+ if (method ne null)
return method
else {
method = ScalaRunTime.ensureAccessible(forReceiver.getMethod("xyz", reflParams$Cache))
- reflPoly$Cache = new SoftReference(reflPoly$Cache.get.add(forReceiver, method))
+ reflPoly$Cache = new SoftReference(methodCache.add(forReceiver, method))
return method
}
}
@@ -228,16 +234,22 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def getPolyCache = gen.mkCast(fn(REF(reflPolyCacheSym), nme.get), MethodCacheClass.tpe)
addStaticMethodToClass((reflMethodSym, forReceiverSym) => {
+ val methodCache = reflMethodSym.newVariable(mkTerm("methodCache"), ad.pos) setInfo MethodCacheClass.tpe
val methodSym = reflMethodSym.newVariable(mkTerm("method"), ad.pos) setInfo MethodClass.tpe
BLOCK(
- IF (getPolyCache OBJ_EQ NULL) THEN (REF(reflPolyCacheSym) === mkNewPolyCache) ENDIF,
- VAL(methodSym) === ((getPolyCache DOT methodCache_find)(REF(forReceiverSym))) ,
- IF (REF(methodSym) OBJ_!= NULL) .
+ VAR(methodCache) === getPolyCache,
+ IF (REF(methodCache) OBJ_EQ NULL) THEN BLOCK(
+ REF(methodCache) === NEW(TypeTree(EmptyMethodCacheClass.tpe)),
+ REF(reflPolyCacheSym) === gen.mkSoftRef(REF(methodCache))
+ ) ENDIF,
+
+ VAR(methodSym) === (REF(methodCache) DOT methodCache_find)(REF(forReceiverSym)),
+ IF (REF(methodSym) OBJ_NE NULL) .
THEN (Return(REF(methodSym)))
ELSE {
def methodSymRHS = ((REF(forReceiverSym) DOT Class_getMethod)(LIT(method), REF(reflParamsCacheSym)))
- def cacheRHS = ((getPolyCache DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
+ def cacheRHS = ((REF(methodCache) DOT methodCache_add)(REF(forReceiverSym), REF(methodSym)))
BLOCK(
REF(methodSym) === (REF(ensureAccessibleMethod) APPLY (methodSymRHS)),
REF(reflPolyCacheSym) === gen.mkSoftRef(cacheRHS),
@@ -246,6 +258,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
)
})
+
}
/* ### HANDLING METHODS NORMALLY COMPILED TO OPERATORS ### */
@@ -437,19 +450,31 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* is a value type (int et al.) in which case it must cast to the boxed version
* because invoke only returns object and erasure made sure the result is
* expected to be an AnyRef. */
- val t: Tree = ad.symbol.tpe match {
- case MethodType(mparams, resType) =>
- assert(params.length == mparams.length, mparams)
-
- typedPos {
- val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
- qual = REF(sym)
+ val t: Tree = {
+ val (mparams, resType) = ad.symbol.tpe match {
+ case MethodType(mparams, resType) =>
+ assert(params.length == mparams.length, ((params, mparams)))
+ (mparams, resType)
+ case tpe @ OverloadedType(pre, alts) =>
+ unit.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe))
+ alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match {
+ case mt @ MethodType(mparams, resType) :: Nil =>
+ unit.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt)
+ (mparams, resType)
+ case _ =>
+ unit.error(ad.pos, "Cannot resolve overload.")
+ (Nil, NoType)
+ }
+ }
+ typedPos {
+ val sym = currentOwner.newValue(mkTerm("qual"), ad.pos) setInfo qual0.tpe
+ qual = REF(sym)
- BLOCK(
- VAL(sym) === qual0,
- callAsReflective(mparams map (_.tpe), resType)
- )
- }
+ BLOCK(
+ VAL(sym) === qual0,
+ callAsReflective(mparams map (_.tpe), resType)
+ )
+ }
}
/* For testing purposes, the dynamic application's condition
@@ -606,14 +631,16 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
transformApply
- // This transform replaces Array(Predef.wrapArray(Array(...)), <tag>)
- // with just Array(...)
- case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(array)), _))
- if (wrapRefArrayMeth.symbol == Predef_wrapRefArray &&
- appMeth.symbol == ArrayModule_overloadedApply.suchThat {
- _.tpe.resultType.dealias.typeSymbol == ObjectClass
- }) =>
- super.transform(array)
+ // Replaces `Array(Predef.wrapArray(ArrayValue(...).$asInstanceOf[...]), <tag>)`
+ // with just `ArrayValue(...).$asInstanceOf[...]`
+ //
+ // See SI-6611; we must *only* do this for literal vararg arrays.
+ case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _))
+ if wrapRefArrayMeth.symbol == Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply =>
+ super.transform(arg)
+ case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _)))))
+ if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) =>
+ super.transform(treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems))
case _ =>
super.transform(tree)
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index ec0797acb5..4891ef2fd1 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -511,6 +511,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
sym = closureClass,
constrMods = Modifiers(0),
vparamss = List(List(outerFieldDef)),
+ argss = ListOfNil,
body = List(applyMethodDef),
superPos = impl.pos)
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 41aada473a..889d309ba9 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -1057,17 +1057,17 @@ abstract class Erasure extends AddInterfaces
Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe)))
}
case RefinedType(parents, decls) if (parents.length >= 2) =>
- // Optimization: don't generate isInstanceOf tests if the static type
- // conforms, because it always succeeds. (Or at least it had better.)
- // At this writing the pattern matcher generates some instance tests
- // involving intersections where at least one parent is statically known true.
- // That needs fixing, but filtering the parents here adds an additional
- // level of robustness (in addition to the short term fix.)
- val parentTests = parents filterNot (qual.tpe <:< _)
-
- if (parentTests.isEmpty) Literal(Constant(true))
- else gen.evalOnce(qual, currentOwner, unit) { q =>
- atPos(tree.pos) {
+ gen.evalOnce(qual, currentOwner, unit) { q =>
+ // Optimization: don't generate isInstanceOf tests if the static type
+ // conforms, because it always succeeds. (Or at least it had better.)
+ // At this writing the pattern matcher generates some instance tests
+ // involving intersections where at least one parent is statically known true.
+ // That needs fixing, but filtering the parents here adds an additional
+ // level of robustness (in addition to the short term fix.)
+ val parentTests = parents filterNot (qual.tpe <:< _)
+
+ if (parentTests.isEmpty) Literal(Constant(true))
+ else atPos(tree.pos) {
parentTests map mkIsInstanceOf(q) reduceRight gen.mkAnd
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 0575254c26..2f28a16416 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -11,6 +11,7 @@ import Flags.{ CASE => _, _ }
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import matching.{ Patterns, ParallelMatching }
+import scala.tools.nsc.settings.ScalaVersion
/** This class ...
*
@@ -583,7 +584,7 @@ abstract class ExplicitOuter extends InfoTransform
}
case _ =>
- if (settings.Xmigration28.value) tree match {
+ if (settings.Xmigration.value < ScalaVersion.twoDotEight) tree match {
case TypeApply(fn @ Select(qual, _), args) if fn.symbol == Object_isInstanceOf || fn.symbol == Any_isInstanceOf =>
if (isArraySeqTest(qual.tpe, args.head.tpe))
unit.warning(tree.pos, "An Array will no longer match as Seq[_].")
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 79e51d5daa..bc54054028 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -64,43 +64,69 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
}
}
- /** Return the extension method that corresponds to given instance method `meth`.
- */
+ private def companionModuleForce(sym: Symbol) = {
+ sym.andAlso(_.owner.initialize) // See SI-6976. `companionModule` only calls `rawInfo`. (Why?)
+ sym.companionModule
+ }
+
+ /** Return the extension method that corresponds to given instance method `meth`. */
def extensionMethod(imeth: Symbol): Symbol = atPhase(currentRun.refchecksPhase) {
- val companionInfo = imeth.owner.companionModule.info
+ val companionInfo = companionModuleForce(imeth.owner).info
val candidates = extensionNames(imeth) map (companionInfo.decl(_)) filter (_.exists)
val matching = candidates filter (alt => normalize(alt.tpe, imeth.owner) matches imeth.tpe)
assert(matching.nonEmpty,
- s"no extension method found for $imeth:${imeth.tpe} among ${candidates map (c => c.name+":"+c.tpe)} / ${extensionNames(imeth)}")
+ sm"""|no extension method found for:
+ |
+ | $imeth:${imeth.tpe}
+ |
+ | Candidates:
+ |
+ | ${candidates.map(c => c.name+":"+c.tpe).mkString("\n")}
+ |
+ | Candidates (signatures normalized):
+ |
+ | ${candidates.map(c => c.name+":"+normalize(c.tpe, imeth.owner)).mkString("\n")}
+ |
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}"""")
matching.head
}
+ /** Recognize a MethodType which represents an extension method.
+ *
+ * It may have a curried parameter list with the `$this` alone in the first
+ * parameter list, in which case that parameter list is dropped. Or, since
+ * the curried lists disappear during uncurry, it may have a single parameter
+ * list with `$this` as the first parameter, in which case that parameter is
+ * removed from the list.
+ */
+ object ExtensionMethodType {
+ def unapply(tp: Type) = tp match {
+ case MethodType(thiz :: rest, restpe) if thiz.name == nme.SELF =>
+ Some((thiz, if (rest.isEmpty) restpe else MethodType(rest, restpe) ))
+ case _ =>
+ None
+ }
+ }
+
/** This method removes the `$this` argument from the parameter list a method.
*
* A method may be a `PolyType`, in which case we tear out the `$this` and the class
- * type params from its nested `MethodType`.
- * It may be a `MethodType`, either with a curried parameter list in which the first argument
- * is a `$this` - we just return the rest of the list.
- * This means that the corresponding symbol was generated during `extmethods`.
- *
- * It may also be a `MethodType` in which the `$this` does not appear in a curried parameter list.
- * The curried lists disappear during `uncurry`, and the methods may be duplicated afterwards,
- * for instance, during `specialize`.
- * In this case, the first argument is `$this` and we just get rid of it.
+ * type params from its nested `MethodType`. Or it may be a MethodType, as
+ * described at the ExtensionMethodType extractor.
*/
private def normalize(stpe: Type, clazz: Symbol): Type = stpe match {
case PolyType(tparams, restpe) =>
- GenPolyType(tparams dropRight clazz.typeParams.length, normalize(restpe.substSym(tparams takeRight clazz.typeParams.length, clazz.typeParams), clazz))
- case MethodType(List(thiz), restpe) if thiz.name == nme.SELF =>
- restpe
- case MethodType(tparams, restpe) =>
- MethodType(tparams.drop(1), restpe)
+ // method type parameters, class type parameters
+ val (mtparams, ctparams) = tparams splitAt (tparams.length - clazz.typeParams.length)
+ GenPolyType(mtparams,
+ normalize(restpe.substSym(ctparams, clazz.typeParams), clazz))
+ case ExtensionMethodType(thiz, etpe) =>
+ etpe.substituteTypes(thiz :: Nil, clazz.thisType :: Nil)
case _ =>
stpe
}
class Extender(unit: CompilationUnit) extends TypingTransformer(unit) {
-
private val extensionDefs = mutable.Map[Symbol, mutable.ListBuffer[Tree]]()
def checkNonCyclic(pos: Position, seen: Set[Symbol], clazz: Symbol): Unit =
@@ -111,27 +137,54 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
}
+ /** We will need to clone the info of the original method (which obtains clones
+ * of the method type parameters), clone the type parameters of the value class,
+ * and create a new polymethod with the union of all those type parameters, with
+ * their infos adjusted to be consistent with their new home. Example:
+ *
+ * class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ * def baz[B >: A](x: B): List[B] = x :: xs
+ * // baz has to be transformed into this extension method, where
+ * // A is cloned from class Foo and B is cloned from method baz:
+ * // def extension$baz[B >: A <: Any, A >: Nothing <: AnyRef]($this: Foo[A])(x: B): List[B]
+ * }
+ *
+ * TODO: factor out the logic for consolidating type parameters from a class
+ * and a method for re-use elsewhere, because nobody will get this right without
+ * some higher level facilities.
+ */
def extensionMethInfo(extensionMeth: Symbol, origInfo: Type, clazz: Symbol): Type = {
- // No variance for method type parameters
- var newTypeParams = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
- val thisParamType = appliedType(clazz.typeConstructor, newTypeParams map (_.tpeHK))
+ val GenPolyType(tparamsFromMethod, methodResult) = origInfo cloneInfo extensionMeth
+ // Start with the class type parameters - clones will be method type parameters
+ // so must drop their variance.
+ val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT)
+
+ val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*)
val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType
- def transform(clonedType: Type): Type = clonedType match {
- case MethodType(params, restpe) =>
- // I assume it was a bug that this was dropping params... [Martin]: No, it wasn't; it's curried.
- MethodType(List(thisParam), clonedType)
- case NullaryMethodType(restpe) =>
- MethodType(List(thisParam), restpe)
- }
- val GenPolyType(tparams, restpe) = origInfo cloneInfo extensionMeth
- GenPolyType(tparams ::: newTypeParams, transform(restpe) substSym (clazz.typeParams, newTypeParams))
- }
+ val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult))
+ val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam)
- private def allParams(tpe: Type): List[Symbol] = tpe match {
- case MethodType(params, res) => params ::: allParams(res)
- case _ => List()
- }
+ def fixres(tp: Type) = tp substThisAndSym (clazz, selfParamType, clazz.typeParams, tparamsFromClass)
+ def fixtparam(tp: Type) = tp substSym (clazz.typeParams, tparamsFromClass)
+ // We can't substitute symbols on the entire polytype because we
+ // need to modify the bounds of the cloned type parameters, but we
+ // don't want to substitute for the cloned type parameters themselves.
+ val tparams = tparamsFromMethod ::: tparamsFromClass
+ GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType))
+
+ // For reference, calling fix on the GenPolyType plays out like this:
+ // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966]
+ // do not conform to method extension$baz#16148's type parameter bounds
+ //
+ // And the difference is visible here. See how B is bounded from below by A#16149
+ // in both cases, but in the failing case, the other type parameter has turned into
+ // a different A. (What is that A? It is a clone of the original A created in
+ // SubstMap during the call to substSym, but I am not clear on all the particulars.)
+ //
+ // bad: [B#16154 >: A#16149, A#16155 <: AnyRef#2189]($this#16156: Foo#6965[A#16155])(x#16157: B#16154)List#2457[B#16154]
+ // good: [B#16151 >: A#16149, A#16149 <: AnyRef#2189]($this#16150: Foo#6965[A#16149])(x#16153: B#16151)List#2457[B#16151]
+ }
override def transform(tree: Tree): Tree = {
tree match {
case Template(_, _, _) =>
@@ -140,42 +193,62 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
wrap over other value classes anyway.
checkNonCyclic(currentOwner.pos, Set(), currentOwner) */
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
+ currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
} else tree
case DefDef(_, _, tparams, vparamss, _, rhs) if tree.symbol.isMethodWithExtension =>
- val companion = currentOwner.companionModule
- val origMeth = tree.symbol
- val extensionName = extensionNames(origMeth).head
- val extensionMeth = companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
- .setAnnotations(origMeth.annotations)
- companion.info.decls.enter(extensionMeth)
- val newInfo = extensionMethInfo(extensionMeth, origMeth.info, currentOwner)
+ val origMeth = tree.symbol
+ val origThis = currentOwner
+ val origTpeParams = tparams.map(_.symbol) ::: origThis.typeParams // method type params ++ class type params
+ val origParams = vparamss.flatten map (_.symbol)
+ val companion = origThis.companionModule
+
+ def makeExtensionMethodSymbol = {
+ val extensionName = extensionNames(origMeth).head
+ val extensionMeth = (
+ companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
+ setAnnotations origMeth.annotations
+ )
+ companion.info.decls.enter(extensionMeth)
+ }
+
+ val extensionMeth = makeExtensionMethodSymbol
+ val newInfo = extensionMethInfo(extensionMeth, origMeth.info, origThis)
extensionMeth setInfo newInfo
- log("Value class %s spawns extension method.\n Old: %s\n New: %s".format(
- currentOwner,
- origMeth.defString,
- extensionMeth.defString)) // extensionMeth.defStringSeenAs(origInfo
-
- def thisParamRef = gen.mkAttributedIdent(extensionMeth.info.params.head setPos extensionMeth.pos)
- val GenPolyType(extensionTpeParams, extensionMono) = extensionMeth.info
- val origTpeParams = (tparams map (_.symbol)) ::: currentOwner.typeParams
- val extensionBody = rhs
+
+ log(s"Value class $origThis spawns extension method.\n Old: ${origMeth.defString}\n New: ${extensionMeth.defString}")
+
+ val GenPolyType(extensionTpeParams, MethodType(thiz :: Nil, extensionMono)) = newInfo
+ val extensionParams = allParameters(extensionMono)
+ val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
+
+ val extensionBody = (
+ rhs
.substituteSymbols(origTpeParams, extensionTpeParams)
- .substituteSymbols(vparamss.flatten map (_.symbol), allParams(extensionMono).tail)
- .substituteThis(currentOwner, thisParamRef)
- .changeOwner((origMeth, extensionMeth))
- extensionDefs(companion) += atPos(tree.pos) { DefDef(extensionMeth, extensionBody) }
- val extensionCallPrefix = Apply(
- gen.mkTypeApply(gen.mkAttributedRef(companion), extensionMeth, origTpeParams map (_.tpeHK)),
- List(This(currentOwner)))
- val extensionCall = atOwner(origMeth) {
- localTyper.typedPos(rhs.pos) {
- gen.mkForwarder(extensionCallPrefix, mmap(vparamss)(_.symbol))
- }
- }
- deriveDefDef(tree)(_ => extensionCall)
+ .substituteSymbols(origParams, extensionParams)
+ .substituteThis(origThis, extensionThis)
+ .changeOwner(origMeth -> extensionMeth)
+ )
+
+ // Record the extension method ( FIXME: because... ? )
+ extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody))
+
+ // These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
+ // which leaves the actual argument application for extensionCall.
+ val sel = Select(gen.mkAttributedRef(companion), extensionMeth)
+ val targs = origTpeParams map (_.tpeHK)
+ val callPrefix = gen.mkMethodCall(sel, targs, This(origThis) :: Nil)
+
+ // Apply all the argument lists.
+ deriveDefDef(tree)(_ =>
+ atOwner(origMeth)(
+ localTyper.typedPos(rhs.pos)(
+ gen.mkForwarder(callPrefix, mmap(vparamss)(_.symbol))
+ )
+ )
+ )
case _ =>
super.transform(tree)
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index d912b76f68..631468dd0c 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -369,6 +369,7 @@ abstract class LambdaLift extends InfoTransform {
copyDefDef(tree)(vparamss = List(vparams ++ freeParams))
case ClassDef(_, _, _, _) =>
+ // SI-6231
// Disabled attempt to to add getters to freeParams
// this does not work yet. Problem is that local symbols need local names
// and references to local symbols need to be transformed into
@@ -386,7 +387,7 @@ abstract class LambdaLift extends InfoTransform {
tree
}
-/* Something like this will be necessary to eliminate the implementation
+/* SI-6231: Something like this will be necessary to eliminate the implementation
* restiction from paramGetter above:
* We need to pass getters to the interface of an implementation class.
private def fixTraitGetters(lifted: List[Tree]): List[Tree] =
@@ -447,28 +448,53 @@ abstract class LambdaLift extends InfoTransform {
/* Creating a constructor argument if one isn't present. */
val constructorArg = rhs match {
case EmptyTree =>
- sym.primaryConstructor.info.paramTypes match {
+ sym.tpe.typeSymbol.primaryConstructor.info.paramTypes match {
case List(tp) => gen.mkZero(tp)
case _ =>
- log("Couldn't determine how to properly construct " + sym)
+ debugwarn("Couldn't determine how to properly construct " + sym)
rhs
}
case arg => arg
}
- /** Wrap expr argument in new *Ref(..) constructor, but make
- * sure that Try expressions stay at toplevel.
+
+ /** Wrap expr argument in new *Ref(..) constructor. But try/catch
+ * is a problem because a throw will clear the stack and post catch
+ * we would expect the partially-constructed object to be on the stack
+ * for the call to init. So we recursively
+ * search for "leaf" result expressions where we know its safe
+ * to put the new *Ref(..) constructor or, if all else fails, transform
+ * an expr to { val temp=expr; new *Ref(temp) }.
+ * The reason we narrowly look for try/catch in captured var definitions
+ * is because other try/catch expression have already been lifted
+ * see SI-6863
*/
- def refConstr(expr: Tree): Tree = expr match {
+ def refConstr(expr: Tree): Tree = typer.typedPos(expr.pos) {expr match {
+ // very simple expressions can be wrapped in a new *Ref(expr) because they can't have
+ // a try/catch in final expression position.
+ case Ident(_) | Apply(_, _) | Literal(_) | New(_) | Select(_, _) | Throw(_) | Assign(_, _) | ValDef(_, _, _, _) | Return(_) | EmptyTree =>
+ New(sym.tpe, expr)
case Try(block, catches, finalizer) =>
Try(refConstr(block), catches map refConstrCase, finalizer)
+ case Block(stats, expr) =>
+ Block(stats, refConstr(expr))
+ case If(cond, trueBranch, falseBranch) =>
+ If(cond, refConstr(trueBranch), refConstr(falseBranch))
+ case Match(selector, cases) =>
+ Match(selector, cases map refConstrCase)
+ // if we can't figure out what else to do, turn expr into {val temp1 = expr; new *Ref(temp1)} to avoid
+ // any possibility of try/catch in the *Ref constructor. This should be a safe tranformation as a default
+ // though it potentially wastes a variable slot. In particular this case handles LabelDefs.
case _ =>
- New(sym.tpe, expr)
- }
+ debuglog("assigning expr to temp: " + (expr.pos))
+ val tempSym = currentOwner.newValue(unit.freshTermName("temp"), expr.pos) setInfo expr.tpe
+ val tempDef = ValDef(tempSym, expr) setPos expr.pos
+ val tempRef = Ident(tempSym) setPos expr.pos
+ Block(tempDef, New(sym.tpe, tempRef))
+ }}
def refConstrCase(cdef: CaseDef): CaseDef =
CaseDef(cdef.pat, cdef.guard, refConstr(cdef.body))
- treeCopy.ValDef(tree, mods, name, tpt1, typer.typedPos(rhs.pos) {
- refConstr(constructorArg)
- })
+
+ treeCopy.ValDef(tree, mods, name, tpt1, refConstr(constructorArg))
} else tree
case Return(Block(stats, value)) =>
Block(stats, treeCopy.Return(tree, value)) setType tree.tpe setPos tree.pos
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 57bdaea17a..c9c68d080d 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -867,7 +867,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
rhs match {
case Block(List(assign), returnTree) =>
val Assign(moduleVarRef, _) = assign
- val cond = Apply(Select(moduleVarRef, nme.eq), List(NULL))
+ val cond = Apply(Select(moduleVarRef, Object_eq), List(NULL))
mkFastPathBody(clazz, moduleSym, cond, List(assign), List(NULL), returnTree, attrThis, args)
case _ =>
abort("Invalid getter " + rhs + " for module in class " + clazz)
@@ -1215,9 +1215,24 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// refer to fields in some implementation class via an abstract
// getter in the interface.
val iface = toInterface(sym.owner.tpe).typeSymbol
- val getter = sym getter iface orElse abort("No getter for " + sym + " in " + iface)
+ val ifaceGetter = sym getter iface
+
+ def si6231Restriction() {
+ // See SI-6231 comments in LamdaLift for ideas on how to lift the restriction.
+ val msg = sm"""Implementation restriction: local ${iface.fullLocationString} is unable to automatically capture the
+ |free variable ${sym} on behalf of ${currentClass}. You can manually assign it to a val inside the trait,
+ |and refer that that val in ${currentClass}. For more details, see SI-6231."""
+ reporter.error(tree.pos, msg)
+ }
- typedPos(tree.pos)((qual DOT getter)())
+ if (ifaceGetter == NoSymbol) {
+ if (sym.isParamAccessor) {
+ si6231Restriction()
+ EmptyTree
+ }
+ else abort("No getter for " + sym + " in " + iface)
+ }
+ else typedPos(tree.pos)((qual DOT ifaceGetter)())
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
// assign to fields in some implementation class via an abstract
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index bbab545d9e..232148676c 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -178,6 +178,14 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
case class Overload(sym: Symbol, env: TypeEnv) {
override def toString = "specialized overload " + sym + " in " + env
+ def matchesSym(other: Symbol) = sym.tpe =:= other.tpe
+ def matchesEnv(env1: TypeEnv) = TypeEnv.includes(env, env1)
+ }
+ private def newOverload(method: Symbol, specializedMethod: Symbol, env: TypeEnv) = {
+ assert(!specializedMethod.isOverloaded, specializedMethod.defString)
+ val om = Overload(specializedMethod, env)
+ overloads(method) ::= om
+ om
}
/** Just to mark uncheckable */
@@ -289,10 +297,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
}
- /** Return the specialized overload of sym in the given env, if any. */
- def overload(sym: Symbol, env: TypeEnv) =
- overloads(sym).find(ov => TypeEnv.includes(ov.env, env))
-
/** Return the specialized name of 'sym' in the given environment. It
* guarantees the same result regardless of the map order by sorting
* type variables alphabetically.
@@ -628,7 +632,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
info(om) = if (original.isDeferred) Forward(original) else Implementation(original)
typeEnv(om) = env ++ typeEnv(m) // add the environment for any method tparams
- overloads(specMember) ::= Overload(om, typeEnv(om))
+ newOverload(specMember, om, typeEnv(om))
enterMember(om)
}
@@ -835,7 +839,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("%s expands to %s in %s".format(sym, specMember.name.decode, pp(env)))
info(specMember) = NormalizedMember(sym)
- overloads(sym) ::= Overload(specMember, env)
+ newOverload(sym, specMember, env)
owner.info.decls.enter(specMember)
specMember
}
@@ -877,9 +881,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
if (wasSpec.nonEmpty)
debuglog("specialized overload for %s in %s".format(specMember, pp(typeEnv(specMember))))
- overloads(sym) ::= Overload(specMember, spec)
+ newOverload(sym, specMember, spec)
info(specMember) = SpecialOverload(sym, typeEnv(specMember))
-
specMember
}
@@ -994,7 +997,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
SpecialOverride(impl)
}
)
- overloads(overriding) ::= Overload(om, env)
+ newOverload(overriding, om, env)
ifDebug(afterSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
"Could not find " + om.name + " in " + overridden.owner.info.decls))
@@ -1360,7 +1363,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("obtained env: " + e)
e.keySet == env.keySet
} catch {
- case _ =>
+ case _: Throwable =>
debuglog("Could not unify.")
false
}
@@ -1476,54 +1479,41 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
transformTypeApply
- case Select(qual, name) =>
- def transformSelect = {
- qual match {
- case _: Super if illegalSpecializedInheritance(currentClass) =>
- val pos = tree.pos
- debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.")
- debuglog(pos.lineContent)
- tree
- case _ =>
+ case Select(Super(_, _), _) if illegalSpecializedInheritance(currentClass) =>
+ val pos = tree.pos
+ debuglog(pos.source.file.name+":"+pos.line+": not specializing call to super inside illegal specialized inheritance class.\n" + pos.lineContent)
+ tree
+ case Select(qual, name) if name != nme.CONSTRUCTOR && specializedTypeVars(symbol.info).nonEmpty =>
debuglog("specializing Select %s [tree.tpe: %s]".format(symbol.defString, tree.tpe))
-
- //log("!!! select " + tree + " -> " + symbol.info + " specTypeVars: " + specializedTypeVars(symbol.info))
- if (specializedTypeVars(symbol.info).nonEmpty && name != nme.CONSTRUCTOR) {
- // log("!!! unifying " + (symbol, symbol.tpe) + " and " + (tree, tree.tpe))
- val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
- // log("!!! found env: " + env + "; overloads: " + overloads(symbol))
- if (!env.isEmpty) {
- // debuglog("checking for rerouting: " + tree + " with sym.tpe: " + symbol.tpe + " tree.tpe: " + tree.tpe + " env: " + env)
- val specMember = overload(symbol, env)
- if (specMember.isDefined) {
- localTyper.typedOperator(atPos(tree.pos)(Select(transform(qual), specMember.get.sym.name)))
- }
- else {
- val qual1 = transform(qual)
+ val env = unify(symbol.tpe, tree.tpe, emptyEnv, false)
+ if (env.isEmpty) super.transform(tree)
+ else {
+ val qual1 = transform(qual)
+ def reselect(member: Symbol) = {
+ val newSelect = atPos(tree.pos)(Select(qual1, member))
+ if (member.isMethod) localTyper typedOperator newSelect
+ else localTyper typed newSelect
+ }
+ overloads(symbol) find (_ matchesEnv env) match {
+ case Some(Overload(member, _)) => reselect(member)
+ case _ =>
val specMember = qual1.tpe.member(specializedName(symbol, env)).suchThat(_.tpe matches subst(env, symbol.tpe))
- if (specMember ne NoSymbol) {
- val tree1 = atPos(tree.pos)(Select(qual1, specMember))
- if (specMember.isMethod)
- localTyper.typedOperator(tree1)
- else
- localTyper.typed(tree1)
- } else
+ if (specMember ne NoSymbol)
+ reselect(specMember)
+ else
treeCopy.Select(tree, qual1, name)
- }
- } else
- super.transform(tree)
- } else overloads(symbol).find(_.sym.info =:= symbol.info) match {
- case Some(specMember) =>
- val qual1 = transform(qual)
- debuglog("** routing " + tree + " to " + specMember.sym.fullName + " tree: " + Select(qual1, specMember.sym))
- localTyper.typedOperator(atPos(tree.pos)(Select(qual1, specMember.sym)))
- case None =>
- super.transform(tree)
- }
+ }
}
+ case Select(qual, _) =>
+ overloads(symbol) find (_ matchesSym symbol) match {
+ case Some(Overload(member, _)) =>
+ val newTree = Select(transform(qual), member)
+ debuglog(s"** routing $tree to ${member.fullName} tree: $newTree")
+ localTyper.typedOperator(atPos(tree.pos)(newTree))
+ case None =>
+ super.transform(tree)
}
- transformSelect
case PackageDef(pid, stats) =>
tree.symbol.info // make sure specializations have been performed
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 9908bd689e..e9f403aea0 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -231,7 +231,17 @@ abstract class UnCurry extends InfoTransform
* If `settings.XoldPatmat.value`, also synthesized AbstractPartialFunction subclasses (see synthPartialFunction).
*
*/
- def transformFunction(fun: Function): Tree =
+ def transformFunction(fun: Function): Tree = {
+ fun.tpe match {
+ // can happen when analyzer plugins assign refined types to functions, e.g.
+ // (() => Int) { def apply(): Int @typeConstraint }
+ case RefinedType(List(funTp), decls) =>
+ debuglog(s"eliminate refinement from function type ${fun.tpe}")
+ fun.tpe = funTp
+ case _ =>
+ ()
+ }
+
deEta(fun) match {
// nullary or parameterless
case fun1 if fun1 ne fun => fun1
@@ -239,10 +249,7 @@ abstract class UnCurry extends InfoTransform
// only get here when running under -Xoldpatmat
synthPartialFunction(fun)
case _ =>
- val parents = (
- if (isFunctionType(fun.tpe)) addSerializable(abstractFunctionForFunctionType(fun.tpe))
- else addSerializable(ObjectClass.tpe, fun.tpe)
- )
+ val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe))
val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation serialVersionUIDAnnotation
anonClass setInfo ClassInfoType(parents, newScope, anonClass)
@@ -270,11 +277,12 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos)),
+ List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyMethodDef), fun.pos)),
Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
+ }
/** Transform a function node (x => body) of type PartialFunction[T, R] where
* body = expr match { case P_i if G_i => E_i }_i=1..n
@@ -395,7 +403,7 @@ abstract class UnCurry extends InfoTransform
localTyper.typedPos(fun.pos) {
Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
+ List(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, List(applyOrElseMethodDef, isDefinedAtMethodDef), fun.pos)),
Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
}
}
@@ -603,8 +611,6 @@ abstract class UnCurry extends InfoTransform
}
case ValDef(_, _, _, rhs) =>
if (sym eq NoSymbol) throw new IllegalStateException("Encountered Valdef without symbol: "+ tree + " in "+ unit)
- // a local variable that is mutable and free somewhere later should be lifted
- // as lambda lifting (coming later) will wrap 'rhs' in an Ref object.
if (!sym.owner.isSourceMethod)
withNeedLift(true) { super.transform(tree) }
else
@@ -629,7 +635,7 @@ abstract class UnCurry extends InfoTransform
}
}
- case Assign(Select(_, _), _) =>
+ case Assign(_: RefTree, _) =>
withNeedLift(true) { super.transform(tree) }
case Assign(lhs, _) if lhs.symbol.owner != currentMethod || lhs.symbol.hasFlag(LAZY | ACCESSOR) =>
@@ -748,15 +754,22 @@ abstract class UnCurry extends InfoTransform
}
case dd @ DefDef(_, _, _, vparamss0, _, rhs0) =>
- val vparamss1 = vparamss0 match {
- case _ :: Nil => vparamss0
- case _ => vparamss0.flatten :: Nil
- }
+ val (newParamss, newRhs): (List[List[ValDef]], Tree) =
+ if (dependentParamTypeErasure isDependent dd)
+ dependentParamTypeErasure erase dd
+ else {
+ val vparamss1 = vparamss0 match {
+ case _ :: Nil => vparamss0
+ case _ => vparamss0.flatten :: Nil
+ }
+ (vparamss1, rhs0)
+ }
+
val flatdd = copyDefDef(dd)(
- vparamss = vparamss1,
+ vparamss = newParamss,
rhs = nonLocalReturnKeys get dd.symbol match {
- case Some(k) => atPos(rhs0.pos)(nonLocalReturnTry(rhs0, k, dd.symbol))
- case None => rhs0
+ case Some(k) => atPos(newRhs.pos)(nonLocalReturnTry(newRhs, k, dd.symbol))
+ case None => newRhs
}
)
addJavaVarargsForwarders(dd, flatdd)
@@ -782,6 +795,104 @@ abstract class UnCurry extends InfoTransform
}
}
+ /**
+ * When we concatenate parameter lists, formal parameter types that were dependent
+ * on prior parameter values will no longer be correctly scoped.
+ *
+ * For example:
+ *
+ * {{{
+ * def foo(a: A)(b: a.B): a.type = {b; b}
+ * // after uncurry
+ * def foo(a: A, b: a/* NOT IN SCOPE! */.B): a.B = {b; b}
+ * }}}
+ *
+ * This violates the principle that each compiler phase should produce trees that
+ * can be retyped (see [[scala.tools.nsc.typechecker.TreeCheckers]]), and causes
+ * a practical problem in `erasure`: it is not able to correctly determine if
+ * such a signature overrides a corresponding signature in a parent. (SI-6443).
+ *
+ * This transformation erases the dependent method types by:
+ * - Widening the formal parameter type to existentially abstract
+ * over the prior parameters (using `packSymbols`)
+ * - Inserting casts in the method body to cast to the original,
+ * precise type.
+ *
+ * For the example above, this results in:
+ *
+ * {{{
+ * def foo(a: A, b: a.B forSome { val a: A }): a.B = { val b$1 = b.asInstanceOf[a.B]; b$1; b$1 }
+ * }}}
+ */
+ private object dependentParamTypeErasure {
+ sealed abstract class ParamTransform {
+ def param: ValDef
+ }
+ final case class Identity(param: ValDef) extends ParamTransform
+ final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform
+
+ def isDependent(dd: DefDef): Boolean =
+ beforeUncurry {
+ val methType = dd.symbol.info
+ methType.isDependentMethodType && mexists(methType.paramss)(_.info exists (_.isImmediatelyDependent))
+ }
+
+ /**
+ * @return (newVparamss, newRhs)
+ */
+ def erase(dd: DefDef): (List[List[ValDef]], Tree) = {
+ import dd.{ vparamss, rhs }
+ val vparamSyms = vparamss flatMap (_ map (_.symbol))
+
+ val paramTransforms: List[ParamTransform] =
+ vparamss.flatten.map { p =>
+ val declaredType = p.symbol.info
+ // existentially abstract over value parameters
+ val packedType = typer.packSymbols(vparamSyms, declaredType)
+ if (packedType =:= declaredType) Identity(p)
+ else {
+ // Change the type of the param symbol
+ p.symbol updateInfo packedType
+
+ // Create a new param tree
+ val newParam: ValDef = copyValDef(p)(tpt = TypeTree(packedType))
+
+ // Within the method body, we'll cast the parameter to the originally
+ // declared type and assign this to a synthetic val. Later, we'll patch
+ // the method body to refer to this, rather than the parameter.
+ val tempVal: ValDef = {
+ val tempValName = unit freshTermName (p.name + "$")
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(declaredType)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), declaredType)))
+ }
+ Packed(newParam, tempVal)
+ }
+ }
+
+ val allParams = paramTransforms map (_.param)
+ val (packedParams, tempVals) = paramTransforms.collect {
+ case Packed(param, tempVal) => (param, tempVal)
+ }.unzip
+
+ val rhs1 = localTyper.typedPos(rhs.pos) {
+ // Patch the method body to refer to the temp vals
+ val rhsSubstituted = rhs.substituteSymbols(packedParams map (_.symbol), tempVals map (_.symbol))
+ // The new method body: { val p$1 = p.asInstanceOf[<dependent type>]; ...; <rhsSubstituted> }
+ Block(tempVals, rhsSubstituted)
+ }
+
+ // update the type of the method after uncurry.
+ dd.symbol updateInfo {
+ val GenPolyType(tparams, tp) = dd.symbol.info
+ logResult("erased dependent param types for ${dd.symbol.info}") {
+ GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType))
+ }
+ }
+ (allParams :: Nil, rhs1)
+ }
+ }
+
+
/* Analyzes repeated params if method is annotated as `varargs`.
* If the repeated params exist, it saves them into the `repeatedParams` map,
* which is used later.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
index 78175f393a..b50486306d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala
@@ -25,6 +25,7 @@ trait Analyzer extends AnyRef
with TypeDiagnostics
with ContextErrors
with StdAttachments
+ with AnalyzerPlugins
{
val global : Global
import global._
diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
new file mode 100644
index 0000000000..28f620dbb5
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala
@@ -0,0 +1,225 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.tools.nsc
+package typechecker
+
+/**
+ * @author Lukas Rytz
+ * @version 1.0
+ */
+trait AnalyzerPlugins { self: Analyzer =>
+ import global._
+
+
+ trait AnalyzerPlugin {
+ /**
+ * Selectively activate this analyzer plugin, e.g. according to the compiler phase.
+ *
+ * Note that the current phase can differ from the global compiler phase (look for `enteringPhase`
+ * invocations in the compiler). For instance, lazy types created by the UnPickler are completed
+ * at the phase in which their symbol is created. Observations show that this can even be the
+ * parser phase. Since symbol completion can trigger subtyping, typing etc, your plugin might
+ * need to be active also in phases other than namer and typer.
+ *
+ * Typically, this method can be implemented as
+ *
+ * global.phase.id < global.currentRun.picklerPhase.id
+ */
+ def isActive(): Boolean = true
+
+ /**
+ * Let analyzer plugins change the expected type before type checking a tree.
+ */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = pt
+
+ /**
+ * Let analyzer plugins modify the type that has been computed for a tree.
+ *
+ * @param tpe The type inferred by the type checker, initially (for first plugin) `tree.tpe`
+ * @param typer The yper that type checked `tree`
+ * @param tree The type-checked tree
+ * @param mode Mode that was used for typing `tree`
+ * @param pt Expected type that was used for typing `tree`
+ */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = tpe
+
+ /**
+ * Let analyzer plugins change the types assigned to definitions. For definitions that have
+ * an annotated type, the assigned type is obtained by typing that type tree. Otherwise, the
+ * type is inferred by typing the definition's righthand side.
+ *
+ * In order to know if the type was inferred, you can query the `wasEmpty` field in the `tpt`
+ * TypeTree of the definition (for DefDef and ValDef).
+ *
+ * (*) If the type of a method or value is inferred, the type-checked tree is stored in the
+ * `analyzer.transformed` hash map, indexed by the definition's rhs tree.
+ *
+ * NOTE: Invoking the type checker can lead to cyclic reference errors. For instance, if this
+ * method is called from the type completer of a recursive method, type checking the mehtod
+ * rhs will invoke the same completer again. It might be possible to avoid this situation by
+ * assigning `tpe` to `defTree.symbol` (untested) - the final type computed by this method
+ * will then be assigned to the definition's symbol by monoTypeCompleter (in Namers).
+ *
+ * The hooks into `typeSig` allow analyzer plugins to add annotations to (or change the types
+ * of) definition symbols. This cannot not be achieved by using `pluginsTyped`: this method
+ * is only called during type checking, so changing the type of a symbol at this point is too
+ * late: references to the symbol might already be typed and therefore obtain the the original
+ * type assigned during naming.
+ *
+ * @param defTree is the definition for which the type was computed. The different cases are
+ * outlined below. Note that this type is untyped (for methods and values with inferred type,
+ * the typed rhs trees are available in analyzer.transformed).
+ *
+ * Case defTree: Template
+ * - tpe : A ClassInfoType for the template
+ * - typer: The typer for template members, i.e. expressions and definitions of defTree.body
+ * - pt : WildcardType
+ * - the class symbol is accessible through typer.context.owner
+ *
+ * Case defTree: ClassDef
+ * - tpe : A ClassInfoType, or a PolyType(params, ClassInfoType) for polymorphic classes.
+ * The class type is the one computed by templateSig, i.e. through the above case
+ * - typer: The typer for the class. Note that this typer has a different context than the
+ * typer for the template.
+ * - pt : WildcardType
+ *
+ * Case defTree: ModuleDef
+ * - tpe : A ClassInfoType computed by templateSig
+ * - typer: The typer for the module. context.owner of this typer is the module class symbol
+ * - pt : WildcardType
+ *
+ * Case defTree: DefDef
+ * - tpe : The type of the method (MethodType, PolyType or NullaryMethodType). (*)
+ * - typer: The typer the rhs of this method
+ * - pt : If tpt.isEmpty, either the result type from the overridden method, or WildcardType.
+ * Otherwise the type obtained from typing tpt.
+ * - Note that for constructors, pt is the class type which the constructor creates. To type
+ * check the rhs of the constructor however, the expected type has to be WildcardType (see
+ * Typers.typedDefDef)
+ *
+ * Case defTree: ValDef
+ * - tpe : The type of this value. (*)
+ * - typer: The typer for the rhs of this value
+ * - pt : If tpt.isEmpty, WildcardType. Otherwise the type obtained from typing tpt.
+ * - Note that pluginsTypeSig might be called multiple times for the same ValDef since it is
+ * used to compute the types of the accessor methods (see `pluginsTypeSigAccessor`)
+ *
+ * Case defTree: TypeDef
+ * - tpe : The type obtained from typing rhs (PolyType if the TypeDef defines a polymorphic type)
+ * - typer: The typer for the rhs of this type
+ * - pt : WildcardType
+ */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = tpe
+
+ /**
+ * Modify the types of field accessors. The namer phase creates method types for getters and
+ * setters based on the type of the corresponding field.
+ *
+ * Note: in order to compute the method type of an accessor, the namer calls `typeSig` on the
+ * `ValDef` tree of the corresponding field. This implies that the `pluginsTypeSig` method
+ * is potentially called multiple times for the same ValDef tree.
+ *
+ * @param tpe The method type created by the namer for the accessor
+ * @param typer The typer for the ValDef (not for the rhs)
+ * @param tree The ValDef corresponding to the accessor
+ * @param sym The accessor method symbol (getter, setter, beanGetter or beanSetter)
+ */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = tpe
+
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = false
+
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adapting, it should return the tree unchanged.
+ */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = tree
+
+ /**
+ * Modify the type of a return expression. By default, return expressions have type
+ * NothingClass.tpe.
+ *
+ * @param tpe The type of the return expression
+ * @param typer The typer that was used for typing the return tree
+ * @param tree The typed return expression tree
+ * @param pt The return type of the enclosing method
+ */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe
+ }
+
+
+
+ /** A list of registered analyzer plugins */
+ private var analyzerPlugins: List[AnalyzerPlugin] = Nil
+
+ /** Registers a new analyzer plugin */
+ def addAnalyzerPlugin(plugin: AnalyzerPlugin) {
+ if (!analyzerPlugins.contains(plugin))
+ analyzerPlugins = plugin :: analyzerPlugins
+ }
+
+
+ /** @see AnalyzerPlugin.pluginsPt */
+ def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type =
+ if (analyzerPlugins.isEmpty) pt
+ else analyzerPlugins.foldLeft(pt)((pt, plugin) =>
+ if (!plugin.isActive()) pt else plugin.pluginsPt(pt, typer, tree, mode))
+
+ /** @see AnalyzerPlugin.pluginsTyped */
+ def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTpe = addAnnotations(tree, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersTpe
+ else analyzerPlugins.foldLeft(annotCheckersTpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTyped(tpe, typer, tree, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypeSig */
+ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSig(tpe, typer, defTree, pt))
+
+ /** @see AnalyzerPlugin.pluginsTypeSigAccessor */
+ def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type =
+ if (analyzerPlugins.isEmpty) tpe
+ else analyzerPlugins.foldLeft(tpe)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypeSigAccessor(tpe, typer, tree, sym))
+
+ /** @see AnalyzerPlugin.canAdaptAnnotations */
+ def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersExists = global.canAdaptAnnotations(tree, mode, pt)
+ annotCheckersExists || {
+ if (analyzerPlugins.isEmpty) false
+ else analyzerPlugins.exists(plugin =>
+ plugin.isActive() && plugin.canAdaptAnnotations(tree, typer, mode, pt))
+ }
+ }
+
+ /** @see AnalyzerPlugin.adaptAnnotations */
+ def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
+ // support deprecated methods in annotation checkers
+ val annotCheckersTree = global.adaptAnnotations(tree, mode, pt)
+ if (analyzerPlugins.isEmpty) annotCheckersTree
+ else analyzerPlugins.foldLeft(annotCheckersTree)((tree, plugin) =>
+ if (!plugin.isActive()) tree else plugin.adaptAnnotations(tree, typer, mode, pt))
+ }
+
+ /** @see AnalyzerPlugin.pluginsTypedReturn */
+ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val annotCheckersType = adaptTypeOfReturn(tree.expr, pt, tpe)
+ if (analyzerPlugins.isEmpty) annotCheckersType
+ else analyzerPlugins.foldLeft(annotCheckersType)((tpe, plugin) =>
+ if (!plugin.isActive()) tpe else plugin.pluginsTypedReturn(tpe, typer, tree, pt))
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 4268398081..4bf7f78167 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -184,18 +184,14 @@ trait ContextErrors {
}
def ParentTypesError(templ: Template, ex: TypeError) = {
- templ.tpe = null
- issueNormalTypeError(templ, ex.getMessage())
- setError(templ)
+ templ.tpe = null
+ issueNormalTypeError(templ, ex.getMessage())
}
// additional parentTypes errors
- def ConstrArgsInParentWhichIsTraitError(arg: Tree, parent: Symbol) =
+ def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) =
issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments")
- def ConstrArgsInParentOfTraitError(arg: Tree, parent: Symbol) =
- issueNormalTypeError(arg, "parents of traits may not have parameters")
-
def MissingTypeArgumentsParentTpeError(supertpt: Tree) =
issueNormalTypeError(supertpt, "missing type arguments")
@@ -273,9 +269,6 @@ trait ContextErrors {
def VolatileValueError(vdef: Tree) =
issueNormalTypeError(vdef, "values cannot be volatile")
- def FinalVolatileVarError(vdef: Tree) =
- issueNormalTypeError(vdef, "final vars cannot be volatile")
-
def LocalVarUninitializedError(vdef: Tree) =
issueNormalTypeError(vdef, "local variables must be initialized")
@@ -730,7 +723,7 @@ trait ContextErrors {
} catch {
// the code above tries various tricks to detect the relevant portion of the stack trace
// if these tricks fail, just fall back to uninformative, but better than nothing, getMessage
- case NonFatal(ex) =>
+ case NonFatal(ex) => // currently giving a spurious warning, see SI-6994
macroLogVerbose("got an exception when processing a macro generated exception\n" +
"offender = " + stackTraceString(realex) + "\n" +
"error = " + stackTraceString(ex))
@@ -763,10 +756,14 @@ trait ContextErrors {
else " of " + expanded.getClass
))
- def MacroImplementationNotFoundError(expandee: Tree) =
- macroExpansionError(expandee,
+ def MacroImplementationNotFoundError(expandee: Tree) = {
+ val message =
"macro implementation not found: " + expandee.symbol.name + " " +
- "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)")
+ "(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)" +
+ (if (forScaladoc) ". When generating scaladocs for multiple projects at once, consider using -Ymacro-no-expand to disable macro expansions altogether."
+ else "")
+ macroExpansionError(expandee, message)
+ }
}
}
@@ -1047,6 +1044,9 @@ trait ContextErrors {
def MaxParametersCaseClassError(tree: Tree) =
issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.")
+ def InheritsItselfError(tree: Tree) =
+ issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself")
+
def MissingParameterOrValTypeError(vparam: Tree) =
issueNormalTypeError(vparam, "missing parameter type")
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index af2aeefecd..620665126e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -35,7 +35,7 @@ trait Contexts { self: Analyzer =>
val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil
}
- private val startContext = {
+ private lazy val startContext = {
NoContext.make(
Template(List(), emptyValDef, List()) setSymbol global.NoSymbol setType global.NoType,
rootMirror.RootClass,
@@ -342,6 +342,16 @@ trait Contexts { self: Analyzer =>
c
}
+ /**
+ * A context for typing constructor parameter ValDefs, super or self invocation arguments and default getters
+ * of constructors. These expressions need to be type checked in a scope outside the class, cf. spec 5.3.1.
+ *
+ * This method is called by namer / typer where `this` is the context for the constructor DefDef. The
+ * owner of the resulting (new) context is the outer context for the Template, i.e. the context for the
+ * ClassDef. This means that class type parameters will be in scope. The value parameters of the current
+ * constructor are also entered into the new constructor scope. Members of the class however will not be
+ * accessible.
+ */
def makeConstructorContext = {
var baseContext = enclClass.outer
while (baseContext.tree.isInstanceOf[Template])
@@ -361,6 +371,8 @@ trait Contexts { self: Analyzer =>
enterLocalElems(c.scope.elems)
}
}
+ // Enter the scope elements of this (the scope for the constructor DefDef) into the new constructor scope.
+ // Concretely, this will enter the value parameters of constructor.
enterElems(this)
argContext
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index df753ba53c..f6142a81be 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -317,15 +317,33 @@ abstract class Duplicators extends Analyzer {
super.typed(tree, mode, pt)
case Select(th @ This(_), sel) if (oldClassOwner ne null) && (th.symbol == oldClassOwner) =>
- // log("selection on this, no type ascription required")
- // we use the symbol name instead of the tree name because the symbol may have been
- // name mangled, rendering the tree name obsolete
- // log(tree)
- val t = super.typedPos(tree.pos, mode, pt) {
- Select(This(newClassOwner), tree.symbol.name)
- }
- // log("typed to: " + t + "; tpe = " + t.tpe + "; " + inspectTpe(t.tpe))
- t
+ // We use the symbol name instead of the tree name because the symbol
+ // may have been name mangled, rendering the tree name obsolete.
+ // ...but you can't just do a Select on a name because if the symbol is
+ // overloaded, you will crash in the backend.
+ val memberByName = newClassOwner.thisType.member(tree.symbol.name)
+ def nameSelection = Select(This(newClassOwner), tree.symbol.name)
+ val newTree = (
+ if (memberByName.isOverloaded) {
+ // Find the types of the overload alternatives as seen in the new class,
+ // and filter the list down to those which match the old type (after
+ // fixing the old type so it is seen as if from the new class.)
+ val typeInNewClass = fixType(oldClassOwner.info memberType tree.symbol)
+ val alts = memberByName.alternatives
+ val memberTypes = alts map (newClassOwner.info memberType _)
+ val memberString = memberByName.defString
+ alts zip memberTypes filter (_._2 =:= typeInNewClass) match {
+ case ((alt, tpe)) :: Nil =>
+ log(s"Arrested overloaded type in Duplicators, narrowing to ${alt.defStringSeenAs(tpe)}\n Overload was: $memberString")
+ Select(This(newClassOwner), alt)
+ case _ =>
+ log(s"Could not disambiguate $memberString in Duplicators. Attempting name-based selection, but this may not end well...")
+ nameSelection
+ }
+ }
+ else nameSelection
+ )
+ super.typed(atPos(tree.pos)(newTree), mode, pt)
case This(_) if (oldClassOwner ne null) && (tree.symbol == oldClassOwner) =>
// val tree1 = Typed(This(newClassOwner), TypeTree(fixType(tree.tpe.widen)))
@@ -370,7 +388,7 @@ abstract class Duplicators extends Analyzer {
cases
}
- super.typedPos(tree.pos, mode, pt)(Match(scrut, cases1))
+ super.typed(atPos(tree.pos)(Match(scrut, cases1)), mode, pt)
case EmptyTree =>
// no need to do anything, in particular, don't set the type to null, EmptyTree.tpe_= asserts
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index ec3a0a0ef7..d1cf9b1904 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -1319,12 +1319,17 @@ trait Implicits {
// `materializeImplicit` does some preprocessing for `pt`
// is it only meant for manifests/tags or we need to do the same for `implicitsOfExpectedType`?
- if (result.isFailure && !wasAmbigious) result = searchImplicit(implicitsOfExpectedType, false)
+ if (result.isFailure) result = searchImplicit(implicitsOfExpectedType, false)
if (result.isFailure) {
context.updateBuffer(previousErrs)
if (Statistics.canEnable) Statistics.stopTimer(oftypeFailNanos, failstart)
} else {
+ if (wasAmbigious && settings.lint.value)
+ reporter.warning(tree.pos,
+ "Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667. \n" +
+ previousErrs.map(_.errMsg).mkString("\n"))
+
if (Statistics.canEnable) Statistics.stopTimer(oftypeSucceedNanos, succstart)
if (Statistics.canEnable) Statistics.incCounter(oftypeImplicitHits)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 2c2aa03d24..7161043dcf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -58,20 +58,31 @@ trait Infer extends Checkable {
* @throws TypeError when the unapply[Seq] definition is ill-typed
* @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
*
- * From the spec:
+ * This is the spec currently implemented -- TODO: update it.
+ *
* 8.1.8 ExtractorPatterns
*
* An extractor pattern x(p1, ..., pn) where n ≥ 0 is of the same syntactic form as a constructor pattern.
* However, instead of a case class, the stable identifier x denotes an object which has a member method named unapply or unapplySeq that matches the pattern.
- * An unapply method in an object x matches the pattern x(p1, ..., pn) if it takes exactly one argument and one of the following applies:
*
- * n = 0 and unapply’s result type is Boolean.
+ * An `unapply` method with result type `R` in an object `x` matches the
+ * pattern `x(p_1, ..., p_n)` if it takes exactly one argument and, either:
+ * - `n = 0` and `R =:= Boolean`, or
+ * - `n = 1` and `R <:< Option[T]`, for some type `T`.
+ * The argument pattern `p1` is typed in turn with expected type `T`.
+ * - Or, `n > 1` and `R <:< Option[Product_n[T_1, ..., T_n]]`, for some
+ * types `T_1, ..., T_n`. The argument patterns `p_1, ..., p_n` are
+ * typed with expected types `T_1, ..., T_n`.
+ *
+ * An `unapplySeq` method in an object `x` matches the pattern `x(p_1, ..., p_n)`
+ * if it takes exactly one argument and its result type is of the form `Option[S]`,
+ * where either:
+ * - `S` is a subtype of `Seq[U]` for some element type `U`, (set `m = 0`)
+ * - or `S` is a `ProductX[T_1, ..., T_m]` and `T_m <: Seq[U]` (`m <= n`).
*
- * n = 1 and unapply’s result type is Option[T], for some type T.
- * the (only) argument pattern p1 is typed in turn with expected type T
+ * The argument patterns `p_1, ..., p_n` are typed with expected types
+ * `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
*
- * n > 1 and unapply’s result type is Option[(T1, ..., Tn)], for some types T1, ..., Tn.
- * the argument patterns p1, ..., pn are typed in turn with expected types T1, ..., Tn
*/
def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int, unappSym: Symbol): (List[Type], List[Type]) = {
val isUnapplySeq = unappSym.name == nme.unapplySeq
@@ -83,31 +94,34 @@ trait Infer extends Checkable {
else toRepeated
}
+ // empty list --> error, otherwise length == 1
+ lazy val optionArgs = resTp.baseType(OptionClass).typeArgs
+ // empty list --> not a ProductN, otherwise product element types
+ def productArgs = getProductArgs(optionArgs.head)
+
val formals =
- if (nbSubPats == 0 && booleanExtractor && !isUnapplySeq) Nil
- else resTp.baseType(OptionClass).typeArgs match {
- case optionTArg :: Nil =>
- def productArgs = getProductArgs(optionTArg)
+ // convert Seq[T] to the special repeated argument type
+ // so below we can use formalTypes to expand formals to correspond to the number of actuals
+ if (isUnapplySeq) {
+ if (optionArgs.nonEmpty)
+ productArgs match {
+ case Nil => List(seqToRepeatedChecked(optionArgs.head))
+ case normalTps :+ seqTp => normalTps :+ seqToRepeatedChecked(seqTp)
+ }
+ else throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.fullLocationString} does not conform to Option[_]")
+ } else {
+ if (booleanExtractor && nbSubPats == 0) Nil
+ else if (optionArgs.nonEmpty)
if (nbSubPats == 1) {
- if (isUnapplySeq) List(seqToRepeatedChecked(optionTArg))
- else {
- val productArity = productArgs.size
- if (productArity > 1 && settings.lint.value)
- global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionTArg}")
- List(optionTArg)
- }
+ val productArity = productArgs.size
+ if (productArity > 1 && settings.lint.value)
+ global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
+ optionArgs
}
// TODO: update spec to reflect we allow any ProductN, not just TupleN
- else productArgs match {
- case Nil if isUnapplySeq => List(seqToRepeatedChecked(optionTArg))
- case tps if isUnapplySeq => tps.init :+ seqToRepeatedChecked(tps.last)
- case tps => tps
- }
- case _ =>
- if (isUnapplySeq)
- throw new TypeError(s"result type $resTp of unapplySeq defined in ${unappSym.owner+unappSym.owner.locationString} not in {Option[_], Some[_]}")
- else
- throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.owner+unappSym.owner.locationString} not in {Boolean, Option[_], Some[_]}")
+ else productArgs
+ else
+ throw new TypeError(s"result type $resTp of unapply defined in ${unappSym.fullLocationString} does not conform to Option[_] or Boolean")
}
// for unapplySeq, replace last vararg by as many instances as required by nbSubPats
@@ -257,8 +271,8 @@ trait Infer extends Checkable {
tp1 // @MAT aliases already handled by subtyping
}
- private val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
- private val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
+ private lazy val stdErrorClass = rootMirror.RootClass.newErrorClass(tpnme.ERROR)
+ private lazy val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR)
/** The context-dependent inferencer part */
class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
@@ -411,8 +425,19 @@ trait Infer extends Checkable {
/** Like weakly compatible but don't apply any implicit conversions yet.
* Used when comparing the result type of a method with its prototype.
+ *
* [Martin] I think Infer is also created by Erasure, with the default
* implementation of isCoercible
+ * [Paulp] (Assuming the above must refer to my comment on isCoercible)
+ * Nope, I examined every occurrence of Inferencer in trunk. It
+ * appears twice as a self-type, once at its definition, and once
+ * where it is instantiated in Typers. There are no others.
+ *
+ % ack -A0 -B0 --no-filename '\bInferencer\b' src
+ self: Inferencer =>
+ self: Inferencer =>
+ class Inferencer(context: Context) extends InferencerContextErrors with InferCheckable {
+ val infer = new Inferencer(context0) {
*/
def isConservativelyCompatible(tp: Type, pt: Type): Boolean =
context.withImplicitsDisabled(isWeaklyCompatible(tp, pt))
@@ -1578,10 +1603,10 @@ trait Infer extends Checkable {
}
// Drop those that use a default; keep those that use vararg/tupling conversion.
mtypes exists (t =>
- !t.typeSymbol.hasDefaultFlag && {
- compareLengths(t.params, argtpes) < 0 || // tupling (*)
- hasExactlyNumParams(t, argtpes.length) // same nb or vararg
- }
+ !t.typeSymbol.hasDefaultFlag && (
+ compareLengths(t.params, argtpes) < 0 // tupling (*)
+ || hasExactlyNumParams(t, argtpes.length) // same nb or vararg
+ )
)
// (*) more arguments than parameters, but still applicable: tupling conversion works.
// todo: should not return "false" when paramTypes = (Unit) no argument is given
@@ -1608,15 +1633,18 @@ trait Infer extends Checkable {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
tryTwice { isSecondTry =>
- debuglog("infer method alt "+ tree.symbol +" with alternatives "+
- (alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt)
+ debuglog(s"infer method alt ${tree.symbol} with alternatives ${alts map pre.memberType} argtpes=$argtpes pt=$pt")
- val applicable = resolveOverloadedMethod(argtpes, {
- alts filter { alt =>
- inSilentMode(context)(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) &&
- (!varArgsOnly || isVarArgsList(alt.tpe.params))
- }
- })
+ def varargsApplicableCheck(alt: Symbol) = !varArgsOnly || (
+ isVarArgsList(alt.tpe.params)
+ && (argtpes.size >= alt.tpe.params.size) // must be checked now due to SI-5859
+ )
+ val applicable = resolveOverloadedMethod(argtpes,
+ alts filter (alt =>
+ varargsApplicableCheck(alt)
+ && inSilentMode(context)(isApplicable(undetparams, followApply(pre memberType alt), argtpes, pt))
+ )
+ )
def improves(sym1: Symbol, sym2: Symbol) = {
// util.trace("improve "+sym1+sym1.locationString+" on "+sym2+sym2.locationString)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index b20a9ea626..245656e2d7 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -452,7 +452,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
if (aparam.name != rparam.name && !rparam.isSynthetic) MacroImplParamNameMismatchError(aparam, rparam)
if (isRepeated(aparam) ^ isRepeated(rparam)) MacroImplVarargMismatchError(aparam, rparam)
val aparamtpe = aparam.tpe.dealias match {
- case RefinedType(List(tpe), Scope(sym)) if tpe == MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
+ case RefinedType(List(tpe), Scope(sym)) if tpe =:= MacroContextClass.tpe && sym.allOverriddenSymbols.contains(MacroContextPrefixType) => tpe
case tpe => tpe
}
checkMacroImplParamTypeMismatch(atpeToRtpe(aparamtpe), rparam)
@@ -684,6 +684,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
* the expandee with an error marker set if there has been an error
*/
def macroExpand(typer: Typer, expandee: Tree, mode: Int = EXPRmode, pt: Type = WildcardType): Tree = {
+ if (settings.Ymacronoexpand.value) return expandee // SI-6812
val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
try {
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index acc4f7ff67..99557d1527 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -235,8 +235,8 @@ trait MethodSynthesis {
context.unit.synthetics get meth match {
case Some(mdef) =>
context.unit.synthetics -= meth
- meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, false)
- cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, true)
+ meth setAnnotations deriveAnnotations(annotations, MethodTargetClass, keepClean = false)
+ cd.symbol setAnnotations deriveAnnotations(annotations, ClassTargetClass, keepClean = true)
List(cd, mdef)
case _ =>
// Shouldn't happen, but let's give ourselves a reasonable error when it does
@@ -329,6 +329,7 @@ trait MethodSynthesis {
*/
def category: Symbol
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
final def fieldSelection = Select(This(enclClass), basisSym)
final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
@@ -389,7 +390,7 @@ trait MethodSynthesis {
result
}
def derivedTree: DefDef =
- factoryMeth(mods & flagsMask | flagsExtra, name, tree, symbolic = false)
+ factoryMeth(mods & flagsMask | flagsExtra, name, tree)
def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC
def flagsMask: Long = AccessFlags
def name: TermName = tree.name.toTermName
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 967a3214f2..341dbfbe1f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -65,7 +65,18 @@ trait Namers extends MethodSynthesis {
case ModuleDef(_, _, _) => tree.symbol.moduleClass
case _ => tree.symbol
}
- newNamer(context.makeNewScope(tree, sym))
+ def isConstrParam(vd: ValDef) = {
+ (sym hasFlag PARAM | PRESUPER) &&
+ !vd.mods.isJavaDefined &&
+ sym.owner.isConstructor
+ }
+ val ownerCtx = tree match {
+ case vd: ValDef if isConstrParam(vd) =>
+ context.makeConstructorContext
+ case _ =>
+ context
+ }
+ newNamer(ownerCtx.makeNewScope(tree, sym))
}
def createInnerNamer() = {
newNamer(context.make(context.tree, owner, newScope))
@@ -426,6 +437,7 @@ trait Namers extends MethodSynthesis {
def enterSyms(trees: List[Tree]): Namer = {
trees.foldLeft(this: Namer) { (namer, t) =>
val ctx = namer enterSym t
+ // for Import trees, enterSym returns a changed context, so we need a new namer
if (ctx eq namer.context) namer
else newNamer(ctx)
}
@@ -524,20 +536,19 @@ trait Namers extends MethodSynthesis {
noDuplicates(selectors map (_.rename), AppearsTwice)
}
- def enterCopyMethod(copyDefDef: Tree, tparams: List[TypeDef]): Symbol = {
- val sym = copyDefDef.symbol
- val lazyType = completerOf(copyDefDef, tparams)
+ def enterCopyMethod(copyDef: DefDef): Symbol = {
+ val sym = copyDef.symbol
+ val lazyType = completerOf(copyDef)
/** Assign the types of the class parameters to the parameters of the
* copy method. See comment in `Unapplies.caseClassCopyMeth` */
def assignParamTypes() {
val clazz = sym.owner
val constructorType = clazz.primaryConstructor.tpe
- val subst = new SubstSymMap(clazz.typeParams, tparams map (_.symbol))
+ val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol))
val classParamss = constructorType.paramss
- val DefDef(_, _, _, copyParamss, _, _) = copyDefDef
- map2(copyParamss, classParamss)((copyParams, classParams) =>
+ map2(copyDef.vparamss, classParamss)((copyParams, classParams) =>
map2(copyParams, classParams)((copyP, classP) =>
copyP.tpt setType subst(classP.tpe)
)
@@ -545,24 +556,28 @@ trait Namers extends MethodSynthesis {
}
sym setInfo {
- mkTypeCompleter(copyDefDef) { sym =>
+ mkTypeCompleter(copyDef) { sym =>
assignParamTypes()
lazyType complete sym
}
}
}
- def completerOf(tree: Tree): TypeCompleter = completerOf(tree, treeInfo.typeParameters(tree))
- def completerOf(tree: Tree, tparams: List[TypeDef]): TypeCompleter = {
+
+ def completerOf(tree: Tree): TypeCompleter = {
val mono = namerOf(tree.symbol) monoTypeCompleter tree
+ val tparams = treeInfo.typeParameters(tree)
if (tparams.isEmpty) mono
else {
- //@M! TypeDef's type params are handled differently
- //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
- //@M x is only in scope in `A[x <: B]'
+ /* @M! TypeDef's type params are handled differently, e.g., in `type T[A[x <: B], B]`, A and B are entered
+ * first as both are in scope in the definition of x. x is only in scope in `A[x <: B]`.
+ * No symbols are created for the abstract type's params at this point, i.e. the following assertion holds:
+ * !tree.symbol.isAbstractType || { tparams.forall(_.symbol == NoSymbol)
+ * (tested with the above example, `trait C { type T[A[X <: B], B] }`). See also comment in PolyTypeCompleter.
+ */
if (!tree.symbol.isAbstractType) //@M TODO: change to isTypeMember ?
createNamer(tree) enterSyms tparams
- new PolyTypeCompleter(tparams, mono, tree, context) //@M
+ new PolyTypeCompleter(tparams, mono, context) //@M
}
}
@@ -624,9 +639,9 @@ trait Namers extends MethodSynthesis {
val sym = assignAndEnterSymbol(tree) setFlag bridgeFlag
if (name == nme.copy && sym.isSynthetic)
- enterCopyMethod(tree, tparams)
+ enterCopyMethod(tree)
else
- sym setInfo completerOf(tree, tparams)
+ sym setInfo completerOf(tree)
}
def enterClassDef(tree: ClassDef) {
@@ -739,13 +754,13 @@ trait Namers extends MethodSynthesis {
}
}
- def accessorTypeCompleter(tree: ValDef, isSetter: Boolean = false) = mkTypeCompleter(tree) { sym =>
+ /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */
+ def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym =>
logAndValidate(sym) {
sym setInfo {
- if (isSetter)
- MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
- else
- NullaryMethodType(typeSig(tree))
+ val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitClass.tpe)
+ else NullaryMethodType(typeSig(tree))
+ pluginsTypeSigAccessor(tp, typer, tree, sym)
}
}
}
@@ -808,17 +823,12 @@ trait Namers extends MethodSynthesis {
* assigns the type to the tpt's node. Returns the type.
*/
private def assignTypeToTree(tree: ValOrDefDef, defnTyper: Typer, pt: Type): Type = {
- // compute result type from rhs
- val typedBody =
+ val rhsTpe =
if (tree.symbol.isTermMacro) defnTyper.computeMacroDefType(tree, pt)
else defnTyper.computeType(tree.rhs, pt)
- val typedDefn = widenIfNecessary(tree.symbol, typedBody, pt)
- assignTypeToTree(tree, typedDefn)
- }
-
- private def assignTypeToTree(tree: ValOrDefDef, tpe: Type): Type = {
- tree.tpt defineType tpe setPos tree.pos.focus
+ val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt)
+ tree.tpt defineType defnTpe setPos tree.pos.focus
tree.tpt.tpe
}
@@ -850,8 +860,13 @@ trait Namers extends MethodSynthesis {
private def templateSig(templ: Template): Type = {
val clazz = context.owner
def checkParent(tpt: Tree): Type = {
- if (tpt.tpe.isError) AnyRefClass.tpe
- else tpt.tpe
+ val tp = tpt.tpe
+ val inheritsSelf = tp.typeSymbol == owner
+ if (inheritsSelf)
+ InheritsItselfError(tpt)
+
+ if (inheritsSelf || tp.isError) AnyRefClass.tpe
+ else tp
}
val parents = typer.parentTypes(templ) map checkParent
@@ -895,163 +910,257 @@ trait Namers extends MethodSynthesis {
for (cda <- module.attachments.get[ConstructorDefaultsAttachment]) {
cda.companionModuleClassNamer = templateNamer
}
- ClassInfoType(parents, decls, clazz)
+ val classTp = ClassInfoType(parents, decls, clazz)
+ pluginsTypeSig(classTp, templateNamer.typer, templ, WildcardType)
}
- private def classSig(tparams: List[TypeDef], impl: Template): Type = {
+ private def classSig(cdef: ClassDef): Type = {
+ val clazz = cdef.symbol
+ val ClassDef(_, _, tparams, impl) = cdef
val tparams0 = typer.reenterTypeParams(tparams)
val resultType = templateSig(impl)
- GenPolyType(tparams0, resultType)
+ val res = GenPolyType(tparams0, resultType)
+ val pluginsTp = pluginsTypeSig(res, typer, cdef, WildcardType)
+
+ // Already assign the type to the class symbol (monoTypeCompleter will do it again).
+ // Allows isDerivedValueClass to look at the info.
+ clazz setInfo pluginsTp
+ if (clazz.isDerivedValueClass) {
+ log("Ensuring companion for derived value class " + cdef.name + " at " + cdef.pos.show)
+ clazz setFlag FINAL
+ // Don't force the owner's info lest we create cycles as in SI-6357.
+ enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
+ }
+ pluginsTp
}
- private def methodSig(ddef: DefDef, mods: Modifiers, tparams: List[TypeDef],
- vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): Type = {
- val meth = owner
- val clazz = meth.owner
- // enters the skolemized version into scope, returns the deSkolemized symbols
- val tparamSyms = typer.reenterTypeParams(tparams)
- // since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams
- var vparamSymss = enterValueParams(vparamss)
+ private def moduleSig(mdef: ModuleDef): Type = {
+ val moduleSym = mdef.symbol
+ // The info of both the module and the moduleClass symbols need to be assigned. monoTypeCompleter assigns
+ // the result of typeSig to the module symbol. The module class info is assigned here as a side-effect.
+ val result = templateSig(mdef.impl)
+ val pluginsTp = pluginsTypeSig(result, typer, mdef, WildcardType)
+ // Assign the moduleClass info (templateSig returns a ClassInfoType)
+ val clazz = moduleSym.moduleClass
+ clazz setInfo pluginsTp
+ // clazz.tpe returns a `ModuleTypeRef(clazz)`, a typeRef that links to the module class `clazz`
+ // (clazz.info would the ClassInfoType, which is not what should be assigned to the module symbol)
+ clazz.tpe
+ }
+
+ /**
+ * The method type for `ddef`.
+ *
+ * If a PolyType(tparams, restp) is returned, `tparams` are the external symbols (not type skolems),
+ * i.e. instances of AbstractTypeSymbol. All references in `restp` to the type parameters are TypeRefs
+ * to these non-skolems.
+ *
+ * For type-checking the rhs (in case the result type is inferred), the type skolems of the type parameters
+ * are entered in scope. Equally, the parameter symbols entered into scope have types which refer to those
+ * skolems: when type-checking the rhs, references to parameters need to have types that refer to the skolems.
+ * In summary, typing an rhs happens with respect to the skolems.
+ *
+ * This means that the method's result type computed by the typer refers to skolems. In order to put it
+ * into the method type (the result of methodSig), typeRefs to skolems have to be replaced by references
+ * to the non-skolems.
+ */
+ private def methodSig(ddef: DefDef): Type = {
// DEPMETTODO: do we need to skolemize value parameter symbols?
- if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt defineType context.enclClass.owner.tpe
- tpt setPos meth.pos.focus
- }
- var resultPt = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
- val site = clazz.thisType
- /** Called for all value parameter lists, right to left
- * @param vparams the symbols of one parameter list
- * @param restpe the result type (possibly a MethodType)
+ val DefDef(_, _, tparams, vparamss, tpt, _) = ddef
+
+ val meth = owner
+ val methOwner = meth.owner
+ val site = methOwner.thisType
+
+ /* tparams already have symbols (created in enterDefDef/completerOf), namely the skolemized ones (created
+ * by the PolyTypeCompleter constructor, and assigned to tparams). reenterTypeParams enters the type skolems
+ * into scope and returns the non-skolems.
*/
- def makeMethodType(vparams: List[Symbol], restpe: Type) = {
- // TODODEPMET: check that we actually don't need to do anything here
- // new dependent method types: probably OK already, since 'enterValueParams' above
- // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
- // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
- // so re-use / adapt that)
- if (owner.isJavaDefined)
- // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
- JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
- else
- MethodType(vparams, restpe)
- }
+ val tparamSyms = typer.reenterTypeParams(tparams)
+
+ val tparamSkolems = tparams.map(_.symbol)
+
+ /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems
+ * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter
+ * types from the overridden method).
+ */
+ var vparamSymss = enterValueParams(vparamss)
+
+ /**
+ * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type.
+ * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter,
+ * so the resulting type is a valid external method type, it does not contain (references to) skolems.
+ */
def thisMethodType(restpe: Type) = {
val checkDependencies = new DependentTypeChecker(context)(this)
checkDependencies check vparamSymss
// DEPMETTODO: check not needed when they become on by default
checkDependencies(restpe)
- GenPolyType(
+ val makeMethodType = (vparams: List[Symbol], restpe: Type) => {
+ // TODODEPMET: check that we actually don't need to do anything here
+ // new dependent method types: probably OK already, since 'enterValueParams' above
+ // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to
+ // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies,
+ // so re-use / adapt that)
+ if (meth.isJavaDefined)
+ // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams
+ JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe)
+ else
+ MethodType(vparams, restpe)
+ }
+
+
+ val res = GenPolyType(
tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args?
if (vparamSymss.isEmpty) NullaryMethodType(restpe)
// vparamss refer (if they do) to skolemized tparams
else (vparamSymss :\ restpe) (makeMethodType)
)
+ res.substSym(tparamSkolems, tparamSyms)
}
- def transformedResult =
- thisMethodType(resultPt).substSym(tparams map (_.symbol), tparamSyms)
+ /**
+ * Creates a schematic method type which has WildcardTypes for non specified
+ * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the
+ * type schema is
+ *
+ * PolyType(T, MethodType(List(a: T, b: WildcardType), WildcardType))
+ *
+ * where T are non-skolems.
+ */
+ def methodTypeSchema(resTp: Type) = {
+ // for all params without type set WildcaradType
+ mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
+ thisMethodType(resTp)
+ }
- // luc: added .substSym from skolemized to deSkolemized
- // site.memberType(sym): PolyType(tparams, MethodType(..., ...))
- // ==> all references to tparams are deSkolemized
- // thisMethodType: tparams in PolyType are deSkolemized, the references in the MethodTypes are skolemized.
- // ==> the two didn't match
- //
- // for instance, B.foo would not override A.foo, and the default on parameter b would not be inherited
- // class A { def foo[T](a: T)(b: T = a) = a }
- // class B extends A { override def foo[U](a: U)(b: U) = b }
- def overriddenSymbol =
- intersectionType(clazz.info.parents).nonPrivateMember(meth.name).filter { sym =>
- sym != NoSymbol && (site.memberType(sym) matches transformedResult)
+ def overriddenSymbol(resTp: Type) = {
+ intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym =>
+ sym != NoSymbol && (site.memberType(sym) matches methodTypeSchema(resTp))
}
- // TODO: see whether this or something similar would work instead.
- //
+ }
+ // TODO: see whether this or something similar would work instead:
// def overriddenSymbol = meth.nextOverriddenSymbol
- // fill in result type and parameter types from overridden symbol if there is a unique one.
- if (clazz.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
- // try to complete from matching definition in base type
- mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType)
- val overridden = overriddenSymbol
- if (overridden != NoSymbol && !overridden.isOverloaded) {
- overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
- resultPt = site.memberType(overridden) match {
- case PolyType(tparams, rt) => rt.substSym(tparams, tparamSyms)
- case mt => mt
- }
+ /**
+ * If `meth` doesn't have an explicit return type, extracts the return type from the method
+ * overridden by `meth` (if there's an unique one). This type is lateron used as the expected
+ * type for computing the type of the rhs. The resulting type references type skolems for
+ * type parameters (consistent with the result of `typer.typedType(tpt).tpe`).
+ *
+ * As a first side effect, this method assigns a MethodType constructed using this
+ * return type to `meth`. This allows omitting the result type for recursive methods.
+ *
+ * As another side effect, this method also assigns paramter types from the overridden
+ * method to parameters of `meth` that have missing types (the parser accepts missing
+ * parameter types under -Yinfer-argument-types).
+ */
+ def typesFromOverridden(methResTp: Type): Type = {
+ val overridden = overriddenSymbol(methResTp)
+ if (overridden == NoSymbol || overridden.isOverloaded) {
+ methResTp
+ } else {
+ overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials
+ var overriddenTp = site.memberType(overridden) match {
+ case PolyType(tparams, rt) => rt.substSym(tparams, tparamSkolems)
+ case mt => mt
+ }
for (vparams <- vparamss) {
- var pps = resultPt.params
+ var overriddenParams = overriddenTp.params
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- val paramtpe = pps.head.tpe
- vparam.symbol setInfo paramtpe
- vparam.tpt defineType paramtpe setPos vparam.pos.focus
+ val overriddenParamTp = overriddenParams.head.tpe
+ // references to type parameteres in overriddenParamTp link to the type skolems, so the
+ // assigned type is consistent with the other / existing parameter types in vparamSymss.
+ vparam.symbol setInfo overriddenParamTp
+ vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus
}
- pps = pps.tail
+ overriddenParams = overriddenParams.tail
}
- resultPt = resultPt.resultType
+ overriddenTp = overriddenTp.resultType
}
- resultPt match {
- case NullaryMethodType(rtpe) => resultPt = rtpe
- case MethodType(List(), rtpe) => resultPt = rtpe
+
+ overriddenTp match {
+ case NullaryMethodType(rtpe) => overriddenTp = rtpe
+ case MethodType(List(), rtpe) => overriddenTp = rtpe
case _ =>
}
+
if (tpt.isEmpty) {
// provisionally assign `meth` a method type with inherited result type
// that way, we can leave out the result type even if method is recursive.
- meth setInfo thisMethodType(resultPt)
+ meth setInfo thisMethodType(overriddenTp)
+ overriddenTp
+ } else {
+ methResTp
}
}
}
- // Add a () parameter section if this overrides some method with () parameters.
- if (clazz.isClass && vparamss.isEmpty && overriddenSymbol.alternatives.exists(
- _.info.isInstanceOf[MethodType])) {
+
+ if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
+ tpt defineType context.enclClass.owner.tpe
+ tpt setPos meth.pos.focus
+ }
+
+ val methResTp = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe
+ val resTpFromOverride = if (methOwner.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) {
+ typesFromOverridden(methResTp)
+ } else {
+ methResTp
+ }
+
+ // Add a () parameter section if this overrides some method with () parameters
+ if (methOwner.isClass && vparamss.isEmpty &&
+ overriddenSymbol(methResTp).alternatives.exists(_.info.isInstanceOf[MethodType])) {
vparamSymss = ListOfNil
}
+
+ // issue an error for missing parameter types
mforeach(vparamss) { vparam =>
if (vparam.tpt.isEmpty) {
MissingParameterOrValTypeError(vparam)
vparam.tpt defineType ErrorType
}
}
- addDefaultGetters(meth, vparamss, tparams, overriddenSymbol)
+
+ addDefaultGetters(meth, vparamss, tparams, overriddenSymbol(methResTp))
// fast track macros, i.e. macros defined inside the compiler, are hardcoded
// hence we make use of that and let them have whatever right-hand side they need
// (either "macro ???" as they used to or just "???" to maximally simplify their compilation)
- if (fastTrack contains ddef.symbol) ddef.symbol setFlag MACRO
+ if (fastTrack contains meth) meth setFlag MACRO
// macro defs need to be typechecked in advance
// because @macroImpl annotation only gets assigned during typechecking
// otherwise macro defs wouldn't be able to robustly coexist with their clients
// because a client could be typechecked before a macro def that it uses
- if (ddef.symbol.isTermMacro) {
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- typer.computeMacroDefType(ddef, pt)
+ if (meth.isTermMacro) {
+ typer.computeMacroDefType(ddef, resTpFromOverride)
}
- thisMethodType({
+ val res = thisMethodType({
val rt = (
if (!tpt.isEmpty) {
- typer.typedType(tpt).tpe
+ methResTp
} else {
- // replace deSkolemized symbols with skolemized ones
- // (for resultPt computed by looking at overridden symbol, right?)
- val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
- assignTypeToTree(ddef, typer, pt)
- }
- )
+ // return type is inferred, we don't just use resTpFromOverride. Here, C.f has type String:
+ // trait T { def f: Object }; class C <: T { def f = "" }
+ // using resTpFromOverride as expected type allows for the following (C.f has type A):
+ // trait T { def f: A }; class C <: T { implicit def b2a(t: B): A = ???; def f = new B }
+ assignTypeToTree(ddef, typer, resTpFromOverride)
+ })
// #2382: return type of default getters are always @uncheckedVariance
if (meth.hasDefault)
rt.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List()))
else rt
})
+ pluginsTypeSig(res, typer, ddef, methResTp)
}
/**
@@ -1063,9 +1172,9 @@ trait Namers extends MethodSynthesis {
* flag.
*/
private def addDefaultGetters(meth: Symbol, vparamss: List[List[ValDef]], tparams: List[TypeDef], overriddenSymbol: => Symbol) {
- val clazz = meth.owner
+ val methOwner = meth.owner
val isConstr = meth.isConstructor
- val overridden = if (isConstr || !clazz.isClass) NoSymbol else overriddenSymbol
+ val overridden = if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol
val overrides = overridden != NoSymbol && !overridden.isOverloaded
// value parameters of the base class (whose defaults might be overridden)
var baseParamss = (vparamss, overridden.tpe.paramss) match {
@@ -1115,7 +1224,7 @@ trait Namers extends MethodSynthesis {
val parentNamer = if (isConstr) {
val (cdef, nmr) = moduleNamer.getOrElse {
- val module = companionSymbolOf(clazz, context)
+ val module = companionSymbolOf(methOwner, context)
module.initialize // call type completer (typedTemplate), adds the
// module's templateNamer to classAndNamerOfModule
module.attachments.get[ConstructorDefaultsAttachment] match {
@@ -1161,7 +1270,7 @@ trait Namers extends MethodSynthesis {
name, deftParams, defvParamss, defTpt, defRhs)
}
if (!isConstr)
- clazz.resetFlag(INTERFACE) // there's a concrete member now
+ methOwner.resetFlag(INTERFACE) // there's a concrete member now
val default = parentNamer.enterSyntheticSym(defaultTree)
if (forInteractive && default.owner.isTerm) {
// save the default getters as attachments in the method symbol. if compiling the
@@ -1186,15 +1295,31 @@ trait Namers extends MethodSynthesis {
}
}
+ private def valDefSig(vdef: ValDef) = {
+ val ValDef(_, _, tpt, rhs) = vdef
+ val result = if (tpt.isEmpty) {
+ if (rhs.isEmpty) {
+ MissingParameterOrValTypeError(tpt)
+ ErrorType
+ }
+ else assignTypeToTree(vdef, typer, WildcardType)
+ } else {
+ typer.typedType(tpt).tpe
+ }
+ pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result)
+
+ }
+
//@M! an abstract type definition (abstract type member/type parameter)
// may take type parameters, which are in scope in its bounds
- private def typeDefSig(tpsym: Symbol, tparams: List[TypeDef], rhs: Tree) = {
+ private def typeDefSig(tdef: TypeDef) = {
+ val TypeDef(_, _, tparams, rhs) = tdef
// log("typeDefSig(" + tpsym + ", " + tparams + ")")
val tparamSyms = typer.reenterTypeParams(tparams) //@M make tparams available in scope (just for this abstypedef)
val tp = typer.typedType(rhs).tpe match {
case TypeBounds(lt, rt) if (lt.isError || rt.isError) =>
TypeBounds.empty
- case tp @ TypeBounds(lt, rt) if (tpsym hasFlag JAVA) =>
+ case tp @ TypeBounds(lt, rt) if (tdef.symbol hasFlag JAVA) =>
TypeBounds(lt, objToAny(rt))
case tp =>
tp
@@ -1216,9 +1341,32 @@ trait Namers extends MethodSynthesis {
// However, separate compilation requires the symbol info to be
// loaded to do this check, but loading the info will probably
// lead to spurious cyclic errors. So omit the check.
- GenPolyType(tparamSyms, tp)
+ val res = GenPolyType(tparamSyms, tp)
+ pluginsTypeSig(res, typer, tdef, WildcardType)
}
+ private def importSig(imp: Import) = {
+ val Import(expr, selectors) = imp
+ val expr1 = typer.typedQualifier(expr)
+ typer checkStable expr1
+ if (expr1.symbol != null && expr1.symbol.isRootPackage)
+ RootImportError(imp)
+
+ if (expr1.isErrorTyped)
+ ErrorType
+ else {
+ val newImport = treeCopy.Import(imp, expr1, selectors).asInstanceOf[Import]
+ checkSelectors(newImport)
+ transformed(imp) = newImport
+ // copy symbol and type attributes back into old expression
+ // so that the structure builder will find it.
+ expr.symbol = expr1.symbol
+ expr.tpe = expr1.tpe
+ ImportType(expr1)
+ }
+ }
+
+
/** Given a case class
* case class C[Ts] (ps: Us)
* Add the following methods to toScope:
@@ -1242,6 +1390,11 @@ trait Namers extends MethodSynthesis {
caseClassCopyMeth(cdef) foreach namer.enterSyntheticSym
}
+ /**
+ * TypeSig is invoked by monoTypeCompleters. It returns the type of a definition which
+ * is then assigned to the corresponding symbol (typeSig itself does not need to assign
+ * the type to the symbol, but it can if necessary).
+ */
def typeSig(tree: Tree): Type = {
// log("typeSig " + tree)
/** For definitions, transform Annotation trees to AnnotationInfos, assign
@@ -1274,84 +1427,33 @@ trait Namers extends MethodSynthesis {
}
val sym: Symbol = tree.symbol
- // @Lukas: I am not sure this is the right way to do things.
- // We used to only decorate the module class with annotations, which is
- // clearly wrong. Now we decorate both the class and the object.
- // But maybe some annotations are only meant for one of these but not for the other?
- //
- // TODO: meta-annotations to indicate class vs. object.
+
+ // TODO: meta-annotations to indicate where module annotations should go (module vs moduleClass)
annotate(sym)
if (sym.isModule) annotate(sym.moduleClass)
def getSig = tree match {
- case cdef @ ClassDef(_, name, tparams, impl) =>
- val clazz = tree.symbol
- val result = createNamer(tree).classSig(tparams, impl)
- clazz setInfo result
- if (clazz.isDerivedValueClass) {
- log("Ensuring companion for derived value class " + name + " at " + cdef.pos.show)
- clazz setFlag FINAL
- // Don't force the owner's info lest we create cycles as in SI-6357.
- enclosingNamerWithScope(clazz.owner.rawInfo.decls).ensureCompanionObject(cdef)
- }
- result
-
- case ModuleDef(_, _, impl) =>
- val clazz = sym.moduleClass
- clazz setInfo createNamer(tree).templateSig(impl)
- clazz.tpe
-
- case ddef @ DefDef(mods, _, tparams, vparamss, tpt, rhs) =>
- // TODO: cleanup parameter list
- createNamer(tree).methodSig(ddef, mods, tparams, vparamss, tpt, rhs)
-
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val isBeforeSupercall = (
- (sym hasFlag PARAM | PRESUPER)
- && !mods.isJavaDefined
- && sym.owner.isConstructor
- )
- val typer1 = typer.constrTyperIf(isBeforeSupercall)
- if (tpt.isEmpty) {
- if (rhs.isEmpty) {
- MissingParameterOrValTypeError(tpt)
- ErrorType
- }
- else assignTypeToTree(vdef, newTyper(typer1.context.make(vdef, sym)), WildcardType)
- }
- else typer1.typedType(tpt).tpe
-
- case TypeDef(_, _, tparams, rhs) =>
- createNamer(tree).typeDefSig(sym, tparams, rhs) //@M!
-
- case Import(expr, selectors) =>
- val expr1 = typer.typedQualifier(expr)
- typer checkStable expr1
- if (expr1.symbol != null && expr1.symbol.isRootPackage)
- RootImportError(tree)
-
- if (expr1.isErrorTyped)
- ErrorType
- else {
- val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import]
- checkSelectors(newImport)
- transformed(tree) = newImport
- // copy symbol and type attributes back into old expression
- // so that the structure builder will find it.
- expr.symbol = expr1.symbol
- expr.tpe = expr1.tpe
- ImportType(expr1)
- }
- }
+ case cdef: ClassDef =>
+ createNamer(tree).classSig(cdef)
+
+ case mdef: ModuleDef =>
+ createNamer(tree).moduleSig(mdef)
- val result =
- try getSig
- catch typeErrorHandler(tree, ErrorType)
+ case ddef: DefDef =>
+ createNamer(tree).methodSig(ddef)
- result match {
- case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => deskolemizeTypeParams(tparams)(result)
- case _ => result
+ case vdef: ValDef =>
+ createNamer(tree).valDefSig(vdef)
+
+ case tdef: TypeDef =>
+ createNamer(tree).typeDefSig(tdef) //@M!
+
+ case imp: Import =>
+ importSig(imp)
}
+
+ try getSig
+ catch typeErrorHandler(tree, ErrorType)
}
def includeParent(tpe: Type, parent: Symbol): Type = tpe match {
@@ -1511,14 +1613,25 @@ trait Namers extends MethodSynthesis {
}
}
- /** A class representing a lazy type with known type parameters.
+ /**
+ * A class representing a lazy type with known type parameters. `ctx` is the namer context in which the
+ * `owner` is defined.
+ *
+ * Constructing a PolyTypeCompleter for a DefDef creates type skolems for the type parameters and
+ * assigns them to the `tparams` trees.
*/
- class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, owner: Tree, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
- private val ownerSym = owner.symbol
- override val typeParams = tparams map (_.symbol) //@M
- override val tree = restp.tree
+ class PolyTypeCompleter(tparams: List[TypeDef], restp: TypeCompleter, ctx: Context) extends LockingTypeCompleter with FlagAgnosticCompleter {
+ // @M. If `owner` is an abstract type member, `typeParams` are all NoSymbol (see comment in `completerOf`),
+ // otherwise, the non-skolemized (external) type parameter symbols
+ override val typeParams = tparams map (_.symbol)
+
+ /* The definition tree (poly ClassDef, poly DefDef or HK TypeDef) */
+ override val tree = restp.tree
+
+ private val defnSym = tree.symbol
- if (ownerSym.isTerm) {
+ if (defnSym.isTerm) {
+ // for polymorphic DefDefs, create type skolems and assign them to the tparam trees.
val skolems = deriveFreshSkolems(tparams map (_.symbol))
map2(tparams, skolems)(_ setSymbol _)
}
@@ -1526,8 +1639,8 @@ trait Namers extends MethodSynthesis {
def completeImpl(sym: Symbol) = {
// @M an abstract type's type parameters are entered.
// TODO: change to isTypeMember ?
- if (ownerSym.isAbstractType)
- newNamerFor(ctx, owner) enterSyms tparams //@M
+ if (defnSym.isAbstractType)
+ newNamerFor(ctx, tree) enterSyms tparams //@M
restp complete sym
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index be218fcb02..2340c78f8c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -268,26 +268,32 @@ trait NamesDefaults { self: Analyzer =>
*
* For by-name parameters, create a value
* x$n: () => T = () => arg
+ *
+ * For Ident(<unapply-selector>) arguments, no ValDef is created (SI-3353).
*/
- def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
+ def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[Option[ValDef]] = {
val context = blockTyper.context
- val symPs = map2(args, paramTypes)((arg, tpe) => {
- val byName = isByNameParamType(tpe)
- val repeated = isScalaRepeatedParamType(tpe)
- val argTpe = (
- if (repeated) arg match {
- case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
- case _ => seqType(arg.tpe)
- }
- else arg.tpe
- ).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
- (context.scope.enter(s), byName, repeated)
+ val symPs = map2(args, paramTypes)((arg, tpe) => arg match {
+ case Ident(nme.SELECTOR_DUMMY) =>
+ None // don't create a local ValDef if the argument is <unapply-selector>
+ case _ =>
+ val byName = isByNameParamType(tpe)
+ val repeated = isScalaRepeatedParamType(tpe)
+ val argTpe = (
+ if (repeated) arg match {
+ case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
+ case _ => seqType(arg.tpe)
+ }
+ else arg.tpe
+ ).widen // have to widen or types inferred from literal defaults will be singletons
+ val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
+ Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
- case ((sym, byName, repeated), arg) =>
+ case (None, _) => None
+ case (Some((sym, byName, repeated)), arg) =>
val body =
if (byName) {
val res = blockTyper.typed(Function(List(), arg))
@@ -303,7 +309,7 @@ trait NamesDefaults { self: Analyzer =>
blockTyper.typed(Apply(factory, List(resetLocalAttrs(arg))))
} else arg
}
- atPos(body.pos)(ValDef(sym, body).setType(NoType))
+ Some(atPos(body.pos)(ValDef(sym, body).setType(NoType)))
}
}
@@ -329,27 +335,29 @@ trait NamesDefaults { self: Analyzer =>
// ValDef's in the block), change the arguments to these local values.
case Apply(expr, typedArgs) =>
// typedArgs: definition-site order
- val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, false, false)
+ val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
val valDefs = argValDefs(reorderArgsInv(typedArgs, argPos),
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
- val ref = gen.mkAttributedRef(vDef.symbol)
- atPos(vDef.pos.focus) {
- // for by-name parameters, the local value is a nullary function returning the argument
- tpe.typeSymbol match {
- case ByNameParamClass => Apply(ref, Nil)
- case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
- case _ => ref
+ val refArgs = map3(reorderArgs(valDefs, argPos), formals, typedArgs)((vDefOpt, tpe, origArg) => vDefOpt match {
+ case None => origArg
+ case Some(vDef) =>
+ val ref = gen.mkAttributedRef(vDef.symbol)
+ atPos(vDef.pos.focus) {
+ // for by-name parameters, the local value is a nullary function returning the argument
+ tpe.typeSymbol match {
+ case ByNameParamClass => Apply(ref, Nil)
+ case RepeatedParamClass => Typed(ref, Ident(tpnme.WILDCARD_STAR))
+ case _ => ref
+ }
}
- }
})
// cannot call blockTyper.typedBlock here, because the method expr might be partially applied only
val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt)
res.setPos(res.pos.makeTransparent)
- val block = Block(stats ::: valDefs, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
+ val block = Block(stats ::: valDefs.flatten, res).setType(res.tpe).setPos(tree.pos.makeTransparent)
context.namedApplyBlockInfo =
Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
index f1c70f46d8..f7579ad249 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
@@ -109,9 +109,6 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
import definitions._
import analyzer._ //Typer
-
- case class DefaultOverrideMatchAttachment(default: Tree)
-
object vpmName {
val one = newTermName("one")
val drop = newTermName("drop")
@@ -222,11 +219,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// However this is a pain (at least the way I'm going about it)
// and I have to think these detailed errors are primarily useful
// for beginners, not people writing nested pattern matches.
- def checkMatchVariablePatterns(m: Match) {
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
// A string describing the first variable pattern
var vpat: String = null
// Using an iterator so we can recognize the last case
- val it = m.cases.iterator
+ val it = cases.iterator
def addendum(pat: Tree) = {
matchingSymbolInScope(pat) match {
@@ -269,7 +266,15 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
*/
def translateMatch(match_ : Match): Tree = {
val Match(selector, cases) = match_
- checkMatchVariablePatterns(match_)
+
+ val (nonSyntheticCases, defaultOverride) = cases match {
+ case init :+ last if treeInfo isSyntheticDefaultCase last =>
+ (init, Some(((scrut: Tree) => last.body)))
+ case _ =>
+ (cases, None)
+ }
+
+ checkMatchVariablePatterns(nonSyntheticCases)
// we don't transform after uncurry
// (that would require more sophistication when generating trees,
@@ -296,14 +301,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
- // the alternative to attaching the default case override would be to simply
- // append the default to the list of cases and suppress the unreachable case error that may arise (once we detect that...)
- val matchFailGenOverride = match_.attachments.get[DefaultOverrideMatchAttachment].map{case DefaultOverrideMatchAttachment(default) => ((scrut: Tree) => default)}
-
val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- val combined = combineCases(selector, selectorSym, cases map translateCase(selectorSym, pt), pt, matchOwner, matchFailGenOverride)
+ val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride)
if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
combined
@@ -409,15 +410,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// example check: List[Int] <:< ::[Int]
// TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- val (typeTestTreeMaker, patBinderOrCasted) =
- if (needsTypeTest(patBinder.info.widen, extractor.paramType)) {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
- (List(treeMaker), treeMaker.nextBinder)
- } else {
+ // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
+ val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
+ if (patBinder.info.widen <:< extractor.paramType) {
// no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
// SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
// TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
@@ -426,10 +421,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
*/
- (Nil, patBinder setInfo extractor.paramType)
+ (Nil, patBinder setInfo extractor.paramType, false)
+ } else {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+
+ // check whether typetest implies patBinder is not null,
+ // even though the eventual null check will be on patBinderOrCasted
+ // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
+ (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
}
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, pos), extractor.subBindersAndPatterns: _*)
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
}
@@ -622,8 +628,13 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// to which type should the previous binder be casted?
def paramType : Type
- // binder has been casted to paramType if necessary
- def treeMaker(binder: Symbol, pos: Position): TreeMaker
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
// `subPatBinders` are the variables bound by this pattern in the following patterns
// subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
@@ -637,6 +648,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case bp => bp
}
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
+ case (b, PatternBoundToUnderscore()) => b
+ }.toSet
+
def subPatTypes: List[Type] =
if(isSeq) {
val TypeRef(pre, SeqClass, args) = seqTp
@@ -731,41 +747,31 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
protected def rawSubPatTypes = constructorTp.paramTypes
- // binder has type paramType
- def treeMaker(binder: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
val paramAccessors = binder.constrParamAccessors
// binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
val mutableBinders =
- if (paramAccessors exists (_.isMutable))
+ if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
+ (paramAccessors exists (_.isMutable)))
subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
else Nil
// checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders)
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
}
// reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
- // caseFieldAccessors is messed up after typers (reversed, names mangled for non-public fields)
- // TODO: figure out why...
val accessors = binder.caseFieldAccessors
- // luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
- // (need to undo name-mangling, including the sneaky trailing whitespace)
- val constrParamAccessors = binder.constrParamAccessors
-
- def indexInCPA(acc: Symbol) =
- constrParamAccessors indexWhere { orig =>
- // patmatDebug("compare: "+ (orig, acc, orig.name, acc.name, (acc.name == orig.name), (acc.name startsWith (orig.name append "$"))))
- val origName = orig.name.toString.trim
- val accName = acc.name.toString.trim
- (accName == origName) || (accName startsWith (origName + "$"))
- }
-
- // patmatDebug("caseFieldAccessors: "+ (accessors, binder.caseFieldAccessors map indexInCPA))
- // patmatDebug("constrParamAccessors: "+ constrParamAccessors)
-
- val accessorsSorted = accessors sortBy indexInCPA
- if (accessorsSorted isDefinedAt (i-1)) REF(binder) DOT accessorsSorted(i-1)
+ if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
}
@@ -781,11 +787,21 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def resultType = tpe.finalResultType
def isSeq = extractorCall.symbol.name == nme.unapplySeq
- def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = {
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
// the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted)
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
}
override protected def seqTree(binder: Symbol): Tree =
@@ -818,7 +834,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
protected lazy val rawSubPatTypes =
if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(nbSubPats == 1) List(resultInMonad)
+ else if(!isSeq && nbSubPats == 1) List(resultInMonad)
else getProductArgs(resultInMonad) match {
case Nil => List(resultInMonad)
case x => x
@@ -842,6 +858,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
object Bound {
def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
@@ -1009,10 +1035,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
trait PreserveSubPatBinders extends TreeMaker {
val subPatBinders: List[Symbol]
val subPatRefs: List[Tree]
+ val ignoredSubPatBinders: Set[Symbol]
// unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
// mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
- def storedBinders: Set[Symbol] = if (debugInfoEmitVars) subPatBinders.toSet else Set.empty
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
def emitVars = storedBinders.nonEmpty
@@ -1033,10 +1066,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
import CODE._
- def bindSubPats(in: Tree): Tree = if (!emitVars) in
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
else {
- val (subPatBindersStored, subPatRefsStored) = stored.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new collection.mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
}
}
@@ -1056,7 +1101,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val subPatRefs: List[Tree],
extractorReturnsBoolean: Boolean,
val checkedLength: Option[Int],
- val prevBinder: Symbol) extends FunTreeMaker with PreserveSubPatBinders {
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val condAndNext = extraCond match {
@@ -1099,27 +1148,35 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
val subPatBinders: List[Symbol],
val subPatRefs: List[Tree],
- val mutableBinders: List[Symbol]) extends FunTreeMaker with PreserveSubPatBinders {
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
import CODE._
val nextBinder = prevBinder // just passing through
// mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
- // (the implementation could be optimized by duplicating code from `super.storedBinders`, but this seems more elegant)
- override def storedBinders: Set[Symbol] = super.storedBinders ++ mutableBinders.toSet
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
- casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ val cond =
+ if (binderKnownNonNull) extraCond
+ else (extraCond map (nullCheck AND _)
+ orElse Some(nullCheck))
+
+ cond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
}
override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
}
- // typetag-based tests are inserted by the type checker
- def needsTypeTest(tp: Type, pt: Type): Boolean = !(tp <:< pt)
-
object TypeTestTreeMaker {
// factored out so that we can consistently generate other representations of the tree that implements the test
// (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
@@ -1133,12 +1190,14 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result
def eqTest(pat: Tree, testedBinder: Symbol): Result
def and(a: Result, b: Result): Result
+ def tru: Result
}
object treeCondStrategy extends TypeTestCondStrategy { import CODE._
type Result = Tree
def and(a: Result, b: Result): Result = a AND b
+ def tru = TRUE_typed
def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
@@ -1169,6 +1228,19 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
def eqTest(pat: Tree, testedBinder: Symbol): Result = false
def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
}
}
@@ -1238,10 +1310,16 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// I think it's okay:
// - the isInstanceOf test includes a test for the element type
// - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if (expectedTp <:< AnyRefClass.tpe) && !needsTypeTest(testedBinder.info.widen, expectedTp) =>
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
+ case _ if testedBinder.info.widen <:< expectedTp =>
+ // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
+ // since the types conform, no further checking is required
+ if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ // have to test outer and non-null only when it's a reference type
+ else if (expectedTp <:< AnyRefClass.tpe) {
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+ } else default
case _ => default
}
@@ -1253,6 +1331,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
def isPureTypeTest = renderCondition(pureTypeTestChecker)
+ def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
+
override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
}
@@ -1751,6 +1831,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ def tru = TrueCond
}
ttm.renderCondition(condStrategy)
case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
@@ -1897,17 +1978,24 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case object False extends Prop
// symbols are propositions
- case class Sym(val variable: Var, val const: Const) extends Prop {
- private[this] val id = nextSymId
+ abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ private[this] val id = Sym.nextSymId
+
override def toString = variable +"="+ const +"#"+ id
}
- private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
-
+ class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+ object Sym {
+ private val uniques: util.HashSet[Sym] = new util.HashSet("uniques", 512)
+ def apply(variable: Var, const: Const): Sym = {
+ val newSym = new UniqueSym(variable, const)
+ (uniques findEntryOrUpdate newSym)
+ }
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ }
def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
-
trait PropTraverser {
def apply(x: Prop): Unit = x match {
case And(a, b) => apply(a); apply(b)
@@ -1954,7 +2042,8 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
//
// TODO: for V1 representing x1 and V2 standing for x1.head, encode that
// V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
- def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = {
+ // may throw an AnalysisBudget.Exception
+ def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = {
val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
val vars = new scala.collection.mutable.HashSet[Var]
@@ -1978,10 +2067,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
props foreach gatherEqualities.apply
if (modelNull) vars foreach (_.registerNull)
- val pure = props map rewriteEqualsToProp.apply
+ val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
- var eqAxioms: Prop = True
- def addAxiom(p: Prop) = eqAxioms = And(eqAxioms, p)
+ val eqAxioms = formulaBuilder
+ @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p))
patmatDebug("removeVarEq vars: "+ vars)
vars.foreach { v =>
@@ -2007,23 +2096,37 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
}
- patmatDebug("eqAxioms:\n"+ cnfString(eqFreePropToSolvable(eqAxioms)))
- patmatDebug("pure:"+ pure.map(p => cnfString(eqFreePropToSolvable(p))).mkString("\n"))
+ patmatDebug("eqAxioms:\n"+ cnfString(toFormula(eqAxioms)))
+ patmatDebug("pure:"+ pure.map(p => cnfString(p)).mkString("\n"))
if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
- (eqAxioms, pure)
+ (toFormula(eqAxioms), pure)
}
+ // an interface that should be suitable for feeding a SAT solver when the time comes
type Formula
+ type FormulaBuilder
+
+ // creates an empty formula builder to which more formulae can be added
+ def formulaBuilder: FormulaBuilder
+
+ // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN)
+ // toFormula(f) == andFormula(f1, andFormula(..., fN))
+ def addFormula(buff: FormulaBuilder, f: Formula): Unit
+ def toFormula(buff: FormulaBuilder): Formula
+
+ // the conjunction of formulae `a` and `b`
def andFormula(a: Formula, b: Formula): Formula
+ // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses)
+ def simplifyFormula(a: Formula): Formula
// may throw an AnalysisBudget.Exception
def propToSolvable(p: Prop): Formula = {
val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
- eqFreePropToSolvable(And(eqAxioms, pure))
+ andFormula(eqAxioms, pure)
}
// may throw an AnalysisBudget.Exception
@@ -2039,24 +2142,35 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
trait CNF extends Logic {
- // CNF: a formula is a conjunction of clauses
- type Formula = Array[Clause]
/** Override Array creation for efficiency (to not go through reflection). */
private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
}
- def formula(c: Clause*): Formula = c.toArray
- def andFormula(a: Formula, b: Formula): Formula = a ++ b
+ import scala.collection.mutable.ArrayBuffer
+ type FormulaBuilder = ArrayBuffer[Clause]
+ def formulaBuilder = ArrayBuffer[Clause]()
+ def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
+ def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
+ def toFormula(buff: FormulaBuilder): Formula = buff
+
+ // CNF: a formula is a conjunction of clauses
+ type Formula = FormulaBuilder
+ def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
+
+ type Clause = Set[Lit]
// a clause is a disjunction of distinct literals
- type Clause = Set[Lit]
def clause(l: Lit*): Clause = l.toSet
- private def merge(a: Clause, b: Clause) = a ++ b
type Lit
def Lit(sym: Sym, pos: Boolean = true): Lit
+ def andFormula(a: Formula, b: Formula): Formula = a ++ b
+ def simplifyFormula(a: Formula): Formula = a.distinct
+
+ private def merge(a: Clause, b: Clause) = a ++ b
+
// throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
// TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
def eqFreePropToSolvable(p: Prop): Formula = {
@@ -2142,7 +2256,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
class Lit(val sym: Sym, val pos: Boolean) {
override def toString = if (!pos) "-"+ sym.toString else sym.toString
override def equals(o: Any) = o match {
- case o: Lit => (o.sym == sym) && (o.pos == pos)
+ case o: Lit => (o.sym eq sym) && (o.pos == pos)
case _ => false
}
override def hashCode = sym.hashCode + pos.hashCode
@@ -2191,13 +2305,18 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
}
private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- private def dropUnit(f: Formula, unitLit: Lit) = {
+ private def dropUnit(f: Formula, unitLit: Lit): Formula = {
val negated = -unitLit
// drop entire clauses that are trivially true
// (i.e., disjunctions that contain the literal we're making true in the returned model),
// and simplify clauses by dropping the negation of the literal we're making true
// (since False \/ X == X)
- f.filterNot(_.contains(unitLit)).map(_ - negated)
+ val dropped = formulaBuilderSized(f.size)
+ for {
+ clause <- f
+ if !(clause contains unitLit)
+ } dropped += (clause - negated)
+ dropped
}
def findModelFor(f: Formula): Model = {
@@ -2621,23 +2740,22 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
val propsCasesOk = testCasesOk map (t => symbolicCase(t, modelNull = true))
val propsCasesFail = testCasesFail map (t => Not(symbolicCase(t, modelNull = true)))
- val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
- val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
try {
- // most of the time eqAxiomsFail == eqAxiomsOk, but the different approximations might cause different variables to disapper in general
- val eqAxiomsCNF =
- if (eqAxiomsFail == eqAxiomsOk) eqFreePropToSolvable(eqAxiomsFail)
- else eqFreePropToSolvable(And(eqAxiomsFail, eqAxiomsOk))
+ val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
+ val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
+
+ val prefix = formulaBuilder
+ addFormula(prefix, eqAxioms)
- var prefix = eqAxiomsCNF
var prefixRest = symbolicCasesFail
var current = symbolicCasesOk
var reachable = true
var caseIndex = 0
patmatDebug("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
- patmatDebug("equality axioms:\n"+ cnfString(eqAxiomsCNF))
+ patmatDebug("equality axioms:\n"+ cnfString(eqAxiomsOk))
// invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
// termination: prefixRest.length decreases by 1
@@ -2647,11 +2765,11 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
prefixRest = prefixRest.tail
if (prefixRest.isEmpty) reachable = true
else {
- prefix = andFormula(eqFreePropToSolvable(prefHead), prefix)
+ addFormula(prefix, prefHead)
current = current.tail
- val model = findModelFor(andFormula(eqFreePropToSolvable(current.head), prefix))
+ val model = findModelFor(andFormula(current.head, toFormula(prefix)))
- // patmatDebug("trying to reach:\n"+ cnfString(eqFreePropToSolvable(current.head)) +"\nunder prefix:\n"+ cnfString(prefix))
+ // patmatDebug("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
// if (NoModel ne model) patmatDebug("reached: "+ modelString(model))
reachable = NoModel ne model
@@ -2701,7 +2819,9 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// compare to the fully known type `tp` (modulo abstract types),
// so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
// however, must approximate abstract types in
- val subTp = appliedType(pre.memberType(sym), sym.typeParams.map(_ => WildcardType))
+
+ val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
+ val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
// patmatDebug("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
@@ -3220,6 +3340,7 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// TODO: make more fine-grained, as we don't always need to jump
def canJump: Boolean
+ /** Should exhaustivity analysis be skipped? */
def unchecked: Boolean
@@ -3453,12 +3574,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
case Some(cds) => cds
}
- val allReachable = unchecked || {
- // a switch with duplicate cases yields a verify error,
- // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
- // (even though the verify error would disappear, the behaviour would change)
- unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
- }
+ // a switch with duplicate cases yields a verify error,
+ // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
+ // (even though the verify error would disappear, the behaviour would change)
+ val allReachable = unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
if (!allReachable) Nil
else if (noGuards(caseDefsWithGuards)) {
@@ -3676,11 +3795,17 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
// nextBinder: T
// next == MatchMonad[U]
// returns MatchMonad[U]
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- VAL(nextBinder) === res,
- next
- ))
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest =
+ // only emit a local val for `nextBinder` if it's actually referenced in `next`
+ if (next.exists(_.symbol eq nextBinder))
+ BLOCK(
+ VAL(nextBinder) === res,
+ next
+ )
+ else next
+ ifThenElseZero(cond, rest)
+ }
// guardTree: Boolean
// next: MatchMonad[T]
@@ -3707,10 +3832,10 @@ trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL
with SymbolicMatchAnalysis
with DPLLSolver { self: TreeMakers =>
override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, unchecked: Boolean): (List[List[TreeMaker]], List[Tree]) = {
+ unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
+ reportUnreachable(cases(caseIndex).last.pos)
+ }
if (!unchecked) {
- unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
- reportUnreachable(cases(caseIndex).last.pos)
- }
val counterExamples = exhaustive(prevBinder, cases, pt)
if (counterExamples.nonEmpty)
reportMissingCases(prevBinder.pos, counterExamples)
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 7118375b82..b9fdd7280e 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -11,6 +11,9 @@ import scala.collection.{ mutable, immutable }
import transform.InfoTransform
import scala.collection.mutable.ListBuffer
import scala.language.postfixOps
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.nsc.settings.AnyScalaVersion
+import scala.tools.nsc.settings.NoScalaVersion
/** <p>
* Post-attribution checking and transformation.
@@ -60,23 +63,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
super.transformInfo(sym, tp)
}
- val toJavaRepeatedParam = new TypeMap {
- def apply(tp: Type) = tp match {
- case TypeRef(pre, RepeatedParamClass, args) =>
- typeRef(pre, JavaRepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
-
- val toScalaRepeatedParam = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, JavaRepeatedParamClass, args) =>
- typeRef(pre, RepeatedParamClass, args)
- case _ =>
- mapOver(tp)
- }
- }
+ val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass)
+ val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass)
def accessFlagsToString(sym: Symbol) = flagsToString(
sym getFlag (PRIVATE | PROTECTED),
@@ -156,27 +144,22 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// Override checking ------------------------------------------------------------
- def isJavaVarargsAncestor(clazz: Symbol) = (
- clazz.isClass
- && clazz.isJavaDefined
- && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
- )
-
/** Add bridges for vararg methods that extend Java vararg methods
*/
def addVarargBridges(clazz: Symbol): List[Tree] = {
// This is quite expensive, so attempt to skip it completely.
// Insist there at least be a java-defined ancestor which
// defines a varargs method. TODO: Find a cheaper way to exclude.
- if (clazz.thisType.baseClasses exists isJavaVarargsAncestor) {
+ if (inheritsJavaVarArgsMethod(clazz)) {
log("Found java varargs ancestor in " + clazz.fullLocationString + ".")
val self = clazz.thisType
val bridges = new ListBuffer[Tree]
def varargBridge(member: Symbol, bridgetpe: Type): Tree = {
- log("Generating varargs bridge for " + member.fullLocationString + " of type " + bridgetpe)
+ log(s"Generating varargs bridge for ${member.fullLocationString} of type $bridgetpe")
- val bridge = member.cloneSymbolImpl(clazz, member.flags | VBRIDGE) setPos clazz.pos
+ val newFlags = (member.flags | VBRIDGE | ARTIFACT) & ~PRIVATE
+ val bridge = member.cloneSymbolImpl(clazz, newFlags) setPos clazz.pos
bridge.setInfo(bridgetpe.cloneInfo(bridge))
clazz.info.decls enter bridge
@@ -189,26 +172,35 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
localTyper typed DefDef(bridge, body)
}
- // For all concrete non-private members that have a (Scala) repeated parameter:
- // compute the corresponding method type `jtpe` with a Java repeated parameter
+ // For all concrete non-private members (but: see below) that have a (Scala) repeated
+ // parameter: compute the corresponding method type `jtpe` with a Java repeated parameter
// if a method with type `jtpe` exists and that method is not a varargs bridge
// then create a varargs bridge of type `jtpe` that forwards to the
// member method with the Scala vararg type.
- for (member <- clazz.info.nonPrivateMembers) {
+ //
+ // @PP: Can't call nonPrivateMembers because we will miss refinement members,
+ // which have been marked private. See SI-4729.
+ for (member <- nonTrivialMembers(clazz)) {
+ log(s"Considering $member for java varargs bridge in $clazz")
if (!member.isDeferred && member.isMethod && hasRepeatedParam(member.info)) {
val inherited = clazz.info.nonPrivateMemberAdmitting(member.name, VBRIDGE)
+
// Delaying calling memberType as long as possible
if (inherited ne NoSymbol) {
- val jtpe = toJavaRepeatedParam(self.memberType(member))
+ val jtpe = toJavaRepeatedParam(self memberType member)
// this is a bit tortuous: we look for non-private members or bridges
// if we find a bridge everything is OK. If we find another member,
// we need to create a bridge
- if (inherited filter (sym => (self.memberType(sym) matches jtpe) && !(sym hasFlag VBRIDGE)) exists)
+ val inherited1 = inherited filter (sym => !(sym hasFlag VBRIDGE) && (self memberType sym matches jtpe))
+ if (inherited1.exists)
bridges += varargBridge(member, jtpe)
}
}
}
+ if (bridges.size > 0)
+ log(s"Adding ${bridges.size} bridges for methods extending java varargs.")
+
bridges.toList
}
else Nil
@@ -905,13 +897,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* the type occurs itself at variance position given by `variance`
*/
def validateVariance(tp: Type, variance: Int): Unit = tp match {
- case ErrorType => ;
- case WildcardType => ;
- case NoType => ;
- case NoPrefix => ;
- case ThisType(_) => ;
- case ConstantType(_) => ;
- // case DeBruijnIndex(_, _) => ;
+ case ErrorType =>
+ case WildcardType =>
+ case BoundedWildcardType(bounds) =>
+ validateVariance(bounds, variance)
+ case NoType =>
+ case NoPrefix =>
+ case ThisType(_) =>
+ case ConstantType(_) =>
+ // case DeBruijnIndex(_, _) =>
case SingleType(pre, sym) =>
validateVariance(pre, variance)
case TypeRef(pre, sym, args) =>
@@ -1062,6 +1056,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def apply(tp: Type) = mapOver(tp).normalize
}
+ def checkImplicitViewOptionApply(pos: Position, fn: Tree, args: List[Tree]): Unit = if (settings.lint.value) (fn, args) match {
+ case (tap@TypeApply(fun, targs), List(view: ApplyImplicitView)) if fun.symbol == Option_apply =>
+ unit.warning(pos, s"Suspicious application of an implicit view (${view.fun}) in the argument to Option.apply.") // SI-6567
+ case _ =>
+ }
+
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 =>
def isReferenceOp = name == nme.eq || name == nme.ne
@@ -1372,10 +1372,18 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* indicating it has changed semantics between versions.
*/
private def checkMigration(sym: Symbol, pos: Position) = {
- if (sym.hasMigrationAnnotation)
- unit.warning(pos, "%s has changed semantics in version %s:\n%s".format(
- sym.fullLocationString, sym.migrationVersion.get, sym.migrationMessage.get)
- )
+ if (sym.hasMigrationAnnotation) {
+ val changed = try
+ settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
+ catch {
+ case e : NumberFormatException =>
+ unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
+ true
+ }
+ if (changed)
+ unit.warning(pos, s"${sym.fullLocationString} has changed semantics in version ${sym.migrationVersion.get}:\n${sym.migrationMessage.get}")
+ }
}
private def checkCompileTimeOnly(sym: Symbol, pos: Position) = {
@@ -1467,8 +1475,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
private def isRepeatedParamArg(tree: Tree) = currentApplication match {
case Apply(fn, args) =>
- !args.isEmpty && (args.last eq tree) &&
- fn.tpe.params.length == args.length && isRepeatedParamType(fn.tpe.params.last.tpe)
+ ( args.nonEmpty
+ && (args.last eq tree)
+ && (fn.tpe.params.length == args.length)
+ && isRepeatedParamType(fn.tpe.params.last.tpe)
+ )
case _ =>
false
}
@@ -1563,7 +1574,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case Apply(fn, args) =>
// sensicality should be subsumed by the unreachability/exhaustivity/irrefutability analyses in the pattern matcher
- if (!inPattern) checkSensible(tree.pos, fn, args)
+ if (!inPattern) {
+ checkImplicitViewOptionApply(tree.pos, fn, args)
+ checkSensible(tree.pos, fn, args)
+ }
currentApplication = tree
tree
}
@@ -1578,7 +1592,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
* arbitrarily choose one as more important than the other.
*/
checkDeprecated(sym, tree.pos)
- if (settings.Xmigration28.value)
+ if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
@@ -1677,8 +1691,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
checkAnyValSubclass(currentOwner)
- if (currentOwner.isDerivedValueClass)
- currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler!
if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree
case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc")
diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
index 20db479463..64c5b41638 100644
--- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala
@@ -4,31 +4,7 @@ package typechecker
trait StdAttachments {
self: Analyzer =>
- import global._
-
- /** Carries information necessary to expand the host tree.
- * At times we need to store this info, because macro expansion can be delayed until its targs are inferred.
- * After a macro application has been successfully expanded, this attachment is destroyed.
- */
type UnaffiliatedMacroContext = scala.reflect.macros.runtime.Context
type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type }
case class MacroRuntimeAttachment(delayed: Boolean, typerContext: Context, macroContext: Option[MacroContext])
-
- /** After being synthesized by the parser, primary constructors aren't fully baked yet.
- * A call to super in such constructors is just a fill-me-in-later dummy resolved later
- * by `parentTypes`. This attachment coordinates `parentTypes` and `typedTemplate` and
- * allows them to complete the synthesis.
- */
- case class SuperArgsAttachment(argss: List[List[Tree]])
-
- /** Convenience method for `SuperArgsAttachment`.
- * Compared with `MacroRuntimeAttachment` this attachment has different a usage pattern,
- * so it really benefits from a dedicated extractor.
- */
- def superArgs(tree: Tree): Option[List[List[Tree]]] =
- tree.attachments.get[SuperArgsAttachment] collect { case SuperArgsAttachment(argss) => argss }
-
- /** Determines whether the given tree has an associated SuperArgsAttachment.
- */
- def hasSuperArgs(tree: Tree): Boolean = superArgs(tree).nonEmpty
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index a907ab6c66..242eb9c9fe 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -50,6 +50,10 @@ trait SyntheticMethods extends ast.TreeDSL {
else if (clazz.isDerivedValueClass) valueSymbols
else Nil
}
+ private lazy val renamedCaseAccessors = perRunCaches.newMap[Symbol, mutable.Map[TermName, TermName]]()
+ /** Does not force the info of `caseclazz` */
+ final def caseAccessorName(caseclazz: Symbol, paramName: TermName) =
+ (renamedCaseAccessors get caseclazz).fold(paramName)(_(paramName))
/** Add the synthetic methods to case classes.
*/
@@ -78,14 +82,7 @@ trait SyntheticMethods extends ast.TreeDSL {
else templ
}
- val originalAccessors = clazz.caseFieldAccessors
- // private ones will have been renamed -- make sure they are entered
- // in the original order.
- def accessors = clazz.caseFieldAccessors sortBy { acc =>
- originalAccessors indexWhere { orig =>
- (acc.name == orig.name) || (acc.name startsWith (orig.name append "$"))
- }
- }
+ def accessors = clazz.caseFieldAccessors
val arity = accessors.size
// If this is ProductN[T1, T2, ...], accessorLub is the lub of T1, T2, ..., .
// !!! Hidden behind -Xexperimental due to bummer type inference bugs.
@@ -391,6 +388,8 @@ trait SyntheticMethods extends ast.TreeDSL {
// TODO: shouldn't the next line be: `original resetFlag CASEACCESSOR`?
ddef.symbol resetFlag CASEACCESSOR
lb += logResult("case accessor new")(newAcc)
+ val renamedInClassMap = renamedCaseAccessors.getOrElseUpdate(clazz, mutable.Map() withDefault(x => x))
+ renamedInClassMap(original.name.toTermName) = newAcc.symbol.name.toTermName
}
(lb ++= templ.body ++= synthesize()).toList
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 48a5a36b00..c5c3c560ea 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -117,7 +117,8 @@ abstract class TreeCheckers extends Analyzer {
try p.source.path + ":" + p.line
catch { case _: UnsupportedOperationException => p.toString }
- def errorFn(msg: Any): Unit = println("[check: %s] %s".format(phase.prev, msg))
+ private var hasError: Boolean = false
+ def errorFn(msg: Any): Unit = {hasError = true; println("[check: %s] %s".format(phase.prev, msg))}
def errorFn(pos: Position, msg: Any): Unit = errorFn(posstr(pos) + ": " + msg)
def informFn(msg: Any) {
if (settings.verbose.value || settings.debug.value)
@@ -151,6 +152,7 @@ abstract class TreeCheckers extends Analyzer {
result
}
def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = {
+ hasError = false
val unit0 = currentUnit
currentRun.currentUnit = unit
body
@@ -169,6 +171,7 @@ abstract class TreeCheckers extends Analyzer {
checker.precheck.traverse(unit.body)
checker.typed(unit.body)
checker.postcheck.traverse(unit.body)
+ if (hasError) unit.warning(NoPosition, "TreeCheckers detected non-compliant trees in " + unit)
}
}
@@ -217,8 +220,11 @@ abstract class TreeCheckers extends Analyzer {
case _ => ()
}
- object precheck extends Traverser {
+ object precheck extends TreeStackTraverser {
override def traverse(tree: Tree) {
+ checkSymbolRefsRespectScope(tree)
+ checkReturnReferencesDirectlyEnclosingDef(tree)
+
val sym = tree.symbol
def accessed = sym.accessed
def fail(msg: String) = errorFn(tree.pos, msg + classstr(tree) + " / " + tree)
@@ -289,6 +295,41 @@ abstract class TreeCheckers extends Analyzer {
}
super.traverse(tree)
}
+
+ private def checkSymbolRefsRespectScope(tree: Tree) {
+ def symbolOf(t: Tree): Symbol = Option(tree.symbol).getOrElse(NoSymbol)
+ def definedSymbolOf(t: Tree): Symbol = if (t.isDef) symbolOf(t) else NoSymbol
+ val info = Option(symbolOf(tree).info).getOrElse(NoType)
+ val referencedSymbols: List[Symbol] = {
+ val directRef = tree match {
+ case _: RefTree => symbolOf(tree).toOption
+ case _ => None
+ }
+ def referencedSyms(tp: Type) = (tp collect {
+ case TypeRef(_, sym, _) => sym
+ }).toList
+ val indirectRefs = referencedSyms(info)
+ (indirectRefs ++ directRef).distinct
+ }
+ for {
+ sym <- referencedSymbols
+ if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
+ } errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
+ }
+
+ private def checkReturnReferencesDirectlyEnclosingDef(tree: Tree) {
+ tree match {
+ case _: Return =>
+ path.collectFirst {
+ case dd: DefDef => dd
+ } match {
+ case None => errorFn(s"Return node ($tree) must be enclosed in a DefDef")
+ case Some(dd) =>
+ if (tree.symbol != dd.symbol) errorFn(s"Return symbol (${tree.symbol}} does not reference directly enclosing DefDef (${dd.symbol})")
+ }
+ case _ =>
+ }
+ }
}
object postcheck extends Traverser {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index cbcddba487..dc5491a509 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -30,7 +30,6 @@ trait Typers extends Modes with Adaptations with Tags {
import global._
import definitions._
import TypersStats._
- import patmat.DefaultOverrideMatchAttachment
final def forArgMode(fun: Tree, mode: Int) =
if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode
@@ -53,10 +52,8 @@ trait Typers extends Modes with Adaptations with Tags {
object UnTyper extends Traverser {
override def traverse(tree: Tree) = {
- if (tree.canHaveAttrs) {
- tree.tpe = null
- if (tree.hasSymbol) tree.symbol = NoSymbol
- }
+ if (tree != EmptyTree) tree.tpe = null
+ if (tree.hasSymbol) tree.symbol = NoSymbol
super.traverse(tree)
}
}
@@ -453,12 +450,12 @@ trait Typers extends Modes with Adaptations with Tags {
def reenterValueParams(vparamss: List[List[ValDef]]) {
for (vparams <- vparamss)
for (vparam <- vparams)
- vparam.symbol = context.scope enter vparam.symbol
+ context.scope enter vparam.symbol
}
def reenterTypeParams(tparams: List[TypeDef]): List[Symbol] =
for (tparam <- tparams) yield {
- tparam.symbol = context.scope enter tparam.symbol
+ context.scope enter tparam.symbol
tparam.symbol.deSkolemize
}
@@ -872,7 +869,9 @@ trait Typers extends Modes with Adaptations with Tags {
case _ =>
debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original))
val tree1 = typed(resetAllAttrs(original), mode, WildcardType)
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
+ // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that
+ // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin.
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, pt)
if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree)
}
else
@@ -1052,15 +1051,21 @@ trait Typers extends Modes with Adaptations with Tags {
def insertApply(): Tree = {
assert(!inHKMode(mode), modeString(mode)) //@M
- val qual = adaptToName(tree, nme.apply) match {
- case id @ Ident(_) =>
- val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType
- else if (id.symbol.owner.isClass)
- context.enclosingSubClassContext(id.symbol.owner).prefix
- else NoPrefix
- stabilize(id, pre, EXPRmode | QUALmode, WildcardType)
- case sel @ Select(qualqual, _) =>
- stabilize(sel, qualqual.tpe, EXPRmode | QUALmode, WildcardType)
+ val adapted = adaptToName(tree, nme.apply)
+ def stabilize0(pre: Type): Tree = stabilize(adapted, pre, EXPRmode | QUALmode, WildcardType)
+ // TODO reconcile the overlap between Typers#stablize and TreeGen.stabilize
+ val qual = adapted match {
+ case This(_) =>
+ gen.stabilize(adapted)
+ case Ident(_) =>
+ val owner = adapted.symbol.owner
+ val pre =
+ if (owner.isPackageClass) owner.thisType
+ else if (owner.isClass) context.enclosingSubClassContext(owner).prefix
+ else NoPrefix
+ stabilize0(pre)
+ case Select(qualqual, _) =>
+ stabilize0(qualqual.tpe)
case other =>
other
}
@@ -1071,8 +1076,8 @@ trait Typers extends Modes with Adaptations with Tags {
// begin adapt
tree.tpe match {
- case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (-1)
- adaptAnnotations(tree, mode, pt)
+ case atp @ AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (-1)
+ adaptAnnotations(tree, this, mode, pt)
case ct @ ConstantType(value) if inNoModes(mode, TYPEmode | FUNmode) && (ct <:< pt) && !forScaladoc && !forInteractive => // (0)
val sym = tree.symbol
if (sym != null && sym.isDeprecated) {
@@ -1176,8 +1181,8 @@ trait Typers extends Modes with Adaptations with Tags {
Select(tree, "to" + sym.name)
}
}
- case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, mode, pt) => // (13)
- return typed(adaptAnnotations(tree, mode, pt), mode, pt)
+ case AnnotatedType(_, _, _) if canAdaptAnnotations(tree, this, mode, pt) => // (13)
+ return typed(adaptAnnotations(tree, this, mode, pt), mode, pt)
case _ =>
}
if (!context.undetparams.isEmpty) {
@@ -1393,6 +1398,13 @@ trait Typers extends Modes with Adaptations with Tags {
if (member(qual, name) != NoSymbol) qual
else adaptToMember(qual, HasMember(name))
+ private def typePrimaryConstrBody(clazz : Symbol, cbody: Tree, tparams: List[Symbol], enclTparams: List[Symbol], vparamss: List[List[ValDef]]): Tree = {
+ // XXX: see about using the class's symbol....
+ enclTparams foreach (sym => context.scope.enter(sym))
+ namer.enterValueParams(vparamss)
+ typed(cbody)
+ }
+
private def validateNoCaseAncestor(clazz: Symbol) = {
if (!phase.erasedTypes) {
for (ancestor <- clazz.ancestors find (_.isCase)) {
@@ -1452,7 +1464,7 @@ trait Typers extends Modes with Adaptations with Tags {
case DefDef(_, name, _, _, _, rhs) =>
if (stat.symbol.isAuxiliaryConstructor)
notAllowed("secondary constructor")
- else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_))
+ else if (isValueClass && (name == nme.equals_ || name == nme.hashCode_) && !stat.symbol.isSynthetic)
notAllowed(s"redefinition of $name method. See SIP-15, criterion 4.")
else if (stat.symbol != null && stat.symbol.isParamAccessor)
notAllowed("additional parameter")
@@ -1494,243 +1506,126 @@ trait Typers extends Modes with Adaptations with Tags {
unit.error(tparam.pos, "type parameter of value class may not be specialized")
}
- /** Typechecks a parent type reference.
- *
- * This typecheck is harder than it might look, because it should honor early
- * definitions and also perform type argument inference with the help of super call
- * arguments provided in `encodedtpt`.
- *
- * The method is called in batches (batch = 1 time per each parent type referenced),
- * two batches per definition: once from namer, when entering a ClassDef or a ModuleDef
- * and once from typer, when typechecking the definition.
- *
- * ***Arguments***
- *
- * `encodedtpt` represents the parent type reference wrapped in an `Apply` node
- * which indicates value arguments (i.e. type macro arguments or super constructor call arguments)
- * If no value arguments are provided by the user, the `Apply` node is still
- * there, but its `args` will be set to `Nil`.
- * This argument is synthesized by `tools.nsc.ast.Parsers.templateParents`.
- *
- * `templ` is an enclosing template, which contains a primary constructor synthesized by the parser.
- * Such a constructor is a DefDef which contains early initializers and maybe a super constructor call
- * (I wrote "maybe" because trait constructors don't call super constructors).
- * This argument is synthesized by `tools.nsc.ast.Trees.Template`.
- *
- * `inMixinPosition` indicates whether the reference is not the first in the
- * list of parents (and therefore cannot be a class) or the opposite.
- *
- * ***Return value and side effects***
- *
- * Returns a `TypeTree` representing a resolved parent type.
- * If the typechecked parent reference implies non-nullary and non-empty argument list,
- * this argument list is attached to the returned value in SuperArgsAttachment.
- * The attachment is necessary for the subsequent typecheck to fixup a super constructor call
- * in the body of the primary constructor (see `typedTemplate` for details).
- *
- * This method might invoke `typedPrimaryConstrBody`, hence it might cause the side effects
- * described in the docs of that method. It might also attribute the Super(_, _) reference
- * (if present) inside the primary constructor of `templ`.
- *
- * ***Example***
- *
- * For the following definition:
- *
- * class D extends {
- * val x = 2
- * val y = 4
- * } with B(x)(3) with C(y) with T
- *
- * this method will be called six times:
- *
- * (3 times from the namer)
- * typedParentType(Apply(Apply(Ident(B), List(Ident(x))), List(3)), templ, inMixinPosition = false)
- * typedParentType(Apply(Ident(C), List(Ident(y))), templ, inMixinPosition = true)
- * typedParentType(Apply(Ident(T), List()), templ, inMixinPosition = true)
- *
- * (3 times from the typer)
- * <the same three calls>
- */
- private def typedParentType(encodedtpt: Tree, templ: Template, inMixinPosition: Boolean): Tree = {
- val app = treeInfo.dissectApplied(encodedtpt)
- val (treeInfo.Applied(core, targs, argss), decodedtpt) = (app, app.callee)
- val argssAreTrivial = argss == Nil || argss == ListOfNil
-
- // we cannot avoid cyclic references with `initialize` here, because when type macros arrive,
- // we'll have to check the probe for isTypeMacro anyways.
- // therefore I think it's reasonable to trade a more specific "inherits itself" error
- // for a generic, yet understandable "cyclic reference" error
- var probe = typedTypeConstructor(core.duplicate).tpe.typeSymbol
- if (probe == null) probe = NoSymbol
- probe.initialize
-
- if (probe.isTrait || inMixinPosition) {
- if (!argssAreTrivial) {
- if (probe.isTrait) ConstrArgsInParentWhichIsTraitError(encodedtpt, probe)
- else () // a class in a mixin position - this warrants an error in `validateParentClasses`
- // therefore here we do nothing, e.g. don't check that the # of ctor arguments
- // matches the # of ctor parameters or stuff like that
- }
- typedType(decodedtpt)
- } else {
- var supertpt = typedTypeConstructor(decodedtpt)
- val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else Nil
- if (supertparams.nonEmpty) {
- typedPrimaryConstrBody(templ) {
- val supertpe = PolyType(supertparams, appliedType(supertpt.tpe, supertparams map (_.tpeHK)))
- val supercall = New(supertpe, mmap(argss)(_.duplicate))
- val treeInfo.Applied(Select(ctor, nme.CONSTRUCTOR), _, _) = supercall
- ctor setType supertpe // this is an essential hack, otherwise it will occasionally fail to typecheck
- atPos(supertpt.pos.focus)(supercall)
- } match {
- case EmptyTree => MissingTypeArgumentsParentTpeError(supertpt)
- case tpt => supertpt = TypeTree(tpt.tpe) setPos supertpt.pos.focus
+ def parentTypes(templ: Template): List[Tree] =
+ if (templ.parents.isEmpty) List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
+ else try {
+ val clazz = context.owner
+ // Normalize supertype and mixins so that supertype is always a class, not a trait.
+ var supertpt = typedTypeConstructor(templ.parents.head)
+ val firstParent = supertpt.tpe.typeSymbol
+ var mixins = templ.parents.tail map typedType
+ // If first parent is a trait, make it first mixin and add its superclass as first parent
+ while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) {
+ val supertpt1 = typedType(supertpt)
+ if (!supertpt1.isErrorTyped) {
+ mixins = supertpt1 :: mixins
+ supertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
}
}
- // this is the place where we tell the typer what argss should be used for the super call
- // if argss are nullary or empty, then (see the docs for `typedPrimaryConstrBody`)
- // the super call dummy is already good enough, so we don't need to do anything
- if (argssAreTrivial) supertpt else supertpt updateAttachment SuperArgsAttachment(argss)
- }
- }
-
- /** Typechecks the mishmash of trees that happen to be stuffed into the primary constructor of a given template.
- * Before commencing the typecheck, replaces the `pendingSuperCall` dummy with the result of `actualSuperCall`.
- * `actualSuperCall` can return `EmptyTree`, in which case the dummy is replaced with a literal unit.
- *
- * ***Return value and side effects***
- *
- * If a super call is present in the primary constructor and is not erased by the transform, returns it typechecked.
- * Otherwise (e.g. if the primary constructor is missing or the super call isn't there) returns `EmptyTree`.
- *
- * As a side effect, this method attributes the underlying fields of early vals.
- * Early vals aren't typechecked anywhere else, so it's essential to call `typedPrimaryConstrBody`
- * at least once per definition. It'd be great to disentangle this logic at some point.
- *
- * ***Example***
- *
- * For the following definition:
- *
- * class D extends {
- * val x = 2
- * val y = 4
- * } with B(x)(3) with C(y) with T
- *
- * the primary constructor of `templ` will be:
- *
- * Block(List(
- * ValDef(NoMods, x, TypeTree(), 2)
- * ValDef(NoMods, y, TypeTree(), 4)
- * global.pendingSuperCall,
- * Literal(Constant(())))
- *
- * Note the `pendingSuperCall` part. This is the representation of a fill-me-in-later supercall dummy,
- * which encodes the fact that supercall argss are unknown during parsing and need to be transplanted
- * from one of the parent types. Read more about why the argss are unknown in `tools.nsc.ast.Trees.Template`.
- */
- private def typedPrimaryConstrBody(templ: Template)(actualSuperCall: => Tree): Tree =
- treeInfo.firstConstructor(templ.body) match {
- case ctor @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
- val (preSuperStats, superCall) = {
- val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
- (stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
- }
- val superCall1 = (superCall match {
- case global.pendingSuperCall => actualSuperCall
- case EmptyTree => EmptyTree
- }) orElse cunit
- val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall1)
-
- val clazz = context.owner
- assert(clazz != NoSymbol, templ)
- val cscope = context.outer.makeNewScope(ctor, context.outer.owner)
- val cbody2 = { // called both during completion AND typing.
- val typer1 = newTyper(cscope)
- // XXX: see about using the class's symbol....
- clazz.unsafeTypeParams foreach (sym => typer1.context.scope.enter(sym))
- typer1.namer.enterValueParams(vparamss map (_.map(_.duplicate)))
- typer1.typed(cbody1)
- }
+ if (supertpt.tpe.typeSymbol == AnyClass && firstParent.isTrait)
+ supertpt.tpe = AnyRefClass.tpe
+
+ // Determine
+ // - supertparams: Missing type parameters from supertype
+ // - supertpe: Given supertype, polymorphic in supertparams
+ val supertparams = if (supertpt.hasSymbol) supertpt.symbol.typeParams else List()
+ var supertpe = supertpt.tpe
+ if (!supertparams.isEmpty)
+ supertpe = PolyType(supertparams, appliedType(supertpe, supertparams map (_.tpeHK)))
+
+ // A method to replace a super reference by a New in a supercall
+ def transformSuperCall(scall: Tree): Tree = (scall: @unchecked) match {
+ case Apply(fn, args) =>
+ treeCopy.Apply(scall, transformSuperCall(fn), args map (_.duplicate))
+ case Select(Super(_, _), nme.CONSTRUCTOR) =>
+ treeCopy.Select(
+ scall,
+ atPos(supertpt.pos.focus)(New(TypeTree(supertpe)) setType supertpe),
+ nme.CONSTRUCTOR)
+ }
- val preSuperVals = treeInfo.preSuperFields(templ.body)
- if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
- debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
- else
- map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
+ treeInfo.firstConstructor(templ.body) match {
+ case constr @ DefDef(_, _, _, vparamss, _, cbody @ Block(cstats, cunit)) =>
+ // Convert constructor body to block in environment and typecheck it
+ val (preSuperStats, superCall) = {
+ val (stats, rest) = cstats span (x => !treeInfo.isSuperConstrCall(x))
+ (stats map (_.duplicate), if (rest.isEmpty) EmptyTree else rest.head.duplicate)
+ }
+ val cstats1 = if (superCall == EmptyTree) preSuperStats else preSuperStats :+ superCall
+ val cbody1 = treeCopy.Block(cbody, preSuperStats, superCall match {
+ case Apply(_, _) if supertparams.nonEmpty => transformSuperCall(superCall)
+ case _ => cunit.duplicate
+ })
+ val outercontext = context.outer
+
+ assert(clazz != NoSymbol, templ)
+ val cscope = outercontext.makeNewScope(constr, outercontext.owner)
+ val cbody2 = newTyper(cscope) // called both during completion AND typing.
+ .typePrimaryConstrBody(clazz,
+ cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate)))
+
+ superCall match {
+ case Apply(_, _) =>
+ val treeInfo.Applied(_, _, argss) = superCall
+ val sarg = argss.flatten.headOption.getOrElse(EmptyTree)
+ if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
+ ConstrArgsInTraitParentTpeError(sarg, firstParent)
+ if (!supertparams.isEmpty)
+ supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
+ case _ =>
+ if (!supertparams.isEmpty)
+ MissingTypeArgumentsParentTpeError(supertpt)
+ }
- if (superCall1 == cunit) EmptyTree else cbody2
- case _ =>
- EmptyTree
- }
+ val preSuperVals = treeInfo.preSuperFields(templ.body)
+ if (preSuperVals.isEmpty && preSuperStats.nonEmpty)
+ debugwarn("Wanted to zip empty presuper val list with " + preSuperStats)
+ else
+ map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe)
- /** Makes sure that the first type tree in the list of parent types is always a class.
- * If the first parent is a trait, prepend its supertype to the list until it's a class.
- */
- private def normalizeFirstParent(parents: List[Tree]): List[Tree] = parents match {
- case first :: rest if treeInfo.isTraitRef(first) =>
- def explode(supertpt: Tree, acc: List[Tree]): List[Tree] = {
- if (treeInfo.isTraitRef(supertpt)) {
- val supertpt1 = typedType(supertpt)
- if (!supertpt1.isErrorTyped) {
- val supersupertpt = TypeTree(supertpt1.tpe.firstParent) setPos supertpt.pos.focus
- return explode(supersupertpt, supertpt1 :: acc)
- }
- }
- if (supertpt.tpe.typeSymbol == AnyClass) supertpt.tpe = AnyRefClass.tpe
- supertpt :: acc
+ case _ =>
+ if (!supertparams.isEmpty)
+ MissingTypeArgumentsParentTpeError(supertpt)
}
- explode(first, Nil) ++ rest
- case _ => parents
- }
+/* experimental: early types as type arguments
+ val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef)
+ val earlyMap = new EarlyMap(clazz)
+ List.mapConserve(supertpt :: mixins){ tpt =>
+ val tpt1 = checkNoEscaping.privates(clazz, tpt)
+ if (hasEarlyTypes) tpt1 else tpt1 setType earlyMap(tpt1.tpe)
+ }
+*/
- /** Certain parents are added in the parser before it is known whether
- * that class also declared them as parents. For instance, this is an
- * error unless we take corrective action here:
- *
- * case class Foo() extends Serializable
- *
- * So we strip the duplicates before typer.
- */
- private def fixDuplicateSyntheticParents(parents: List[Tree]): List[Tree] = parents match {
- case Nil => Nil
- case x :: xs =>
- val sym = x.symbol
- x :: fixDuplicateSyntheticParents(
- if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
- else xs
- )
- }
+ //Console.println("parents("+clazz") = "+supertpt :: mixins);//DEBUG
- def parentTypes(templ: Template): List[Tree] = templ.parents match {
- case Nil => List(atPos(templ.pos)(TypeTree(AnyRefClass.tpe)))
- case first :: rest =>
- try {
- val supertpts = fixDuplicateSyntheticParents(normalizeFirstParent(
- typedParentType(first, templ, inMixinPosition = false) +:
- (rest map (typedParentType(_, templ, inMixinPosition = true)))))
-
- // if that is required to infer the targs of a super call
- // typedParentType calls typedPrimaryConstrBody to do the inferring typecheck
- // as a side effect, that typecheck also assigns types to the fields underlying early vals
- // however if inference is not required, the typecheck doesn't happen
- // and therefore early fields have their type trees not assigned
- // here we detect this situation and take preventive measures
- if (treeInfo.hasUntypedPreSuperFields(templ.body))
- typedPrimaryConstrBody(templ)(EmptyTree)
-
- supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt))
- } catch {
- case ex: TypeError =>
- // fallback in case of cyclic errors
- // @H none of the tests enter here but I couldn't rule it out
- // upd. @E when a definitions inherits itself, we end up here
- // because `typedParentType` triggers `initialize` for parent types symbols
- log("Type error calculating parents in template " + templ)
- log("Error: " + ex)
- ParentTypesError(templ, ex)
- List(TypeTree(AnyRefClass.tpe))
+ // Certain parents are added in the parser before it is known whether
+ // that class also declared them as parents. For instance, this is an
+ // error unless we take corrective action here:
+ //
+ // case class Foo() extends Serializable
+ //
+ // So we strip the duplicates before typer.
+ def fixDuplicates(remaining: List[Tree]): List[Tree] = remaining match {
+ case Nil => Nil
+ case x :: xs =>
+ val sym = x.symbol
+ x :: fixDuplicates(
+ if (isPossibleSyntheticParent(sym)) xs filterNot (_.symbol == sym)
+ else xs
+ )
}
- }
+
+ fixDuplicates(supertpt :: mixins) mapConserve (tpt => checkNoEscaping.privates(clazz, tpt))
+ }
+ catch {
+ case ex: TypeError =>
+ // fallback in case of cyclic errors
+ // @H none of the tests enter here but I couldn't rule it out
+ log("Type error calculating parents in template " + templ)
+ log("Error: " + ex)
+ ParentTypesError(templ, ex)
+ List(TypeTree(AnyRefClass.tpe))
+ }
/** <p>Check that</p>
* <ul>
@@ -1748,8 +1643,8 @@ trait Typers extends Modes with Adaptations with Tags {
*/
def validateParentClasses(parents: List[Tree], selfType: Type) {
val pending = ListBuffer[AbsTypeError]()
- def validateDynamicParent(parent: Symbol) =
- if (parent == DynamicClass) checkFeature(parent.pos, DynamicsFeature)
+ def validateDynamicParent(parent: Symbol, parentPos: Position) =
+ if (parent == DynamicClass) checkFeature(parentPos, DynamicsFeature)
def validateParentClass(parent: Tree, superclazz: Symbol) =
if (!parent.isErrorTyped) {
@@ -1799,7 +1694,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError))
pending += ParentInheritedTwiceError(parent, psym)
- validateDynamicParent(psym)
+ validateDynamicParent(psym, parent.pos)
}
if (!parents.isEmpty && parents.forall(!_.isErrorTyped)) {
@@ -1903,33 +1798,33 @@ trait Typers extends Modes with Adaptations with Tags {
})
}
val impl2 = finishMethodSynthesis(impl1, clazz, context)
-
+
// SI-5954. On second compile of a companion class contained in a package object we end up
// with some confusion of names which leads to having two symbols with the same name in the
- // same owner. Until that can be straightened out we can't allow companion objects in package
+ // same owner. Until that can be straightened out we will warn on companion objects in package
// objects. But this code also tries to be friendly by distinguishing between case classes and
// user written companion pairs
- def restrictPackageObjectMembers(mdef : ModuleDef) = for (m <- mdef.symbol.info.members) {
+ def warnPackageObjectMembers(mdef : ModuleDef) = for (m <- mdef.symbol.info.members) {
// ignore synthetic objects, because the "companion" object to a case class is synthetic and
// we only want one error per case class
if (!m.isSynthetic) {
// can't handle case classes in package objects
- if (m.isCaseClass) pkgObjectRestriction(m, mdef, "case")
+ if (m.isCaseClass) pkgObjectWarning(m, mdef, "case")
// can't handle companion class/object pairs in package objects
- else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
- (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
- pkgObjectRestriction(m, mdef, "companion")
+ else if ((m.isClass && m.companionModule != NoSymbol && !m.companionModule.isSynthetic) ||
+ (m.isModule && m.companionClass != NoSymbol && !m.companionClass.isSynthetic))
+ pkgObjectWarning(m, mdef, "companion")
}
- def pkgObjectRestriction(m : Symbol, mdef : ModuleDef, restricted : String) = {
+ def pkgObjectWarning(m : Symbol, mdef : ModuleDef, restricted : String) = {
val pkgName = mdef.symbol.ownerChain find (_.isPackage) map (_.decodedName) getOrElse mdef.symbol.toString
- context.error(if (m.pos.isDefined) m.pos else mdef.pos, s"implementation restriction: package object ${pkgName} cannot contain ${restricted} ${m}. Instead, ${m} should be placed directly in package ${pkgName}.")
- }
+ context.warning(if (m.pos.isDefined) m.pos else mdef.pos, s"${m} should be placed directly in package ${pkgName} instead of package object ${pkgName}. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.")
+ }
}
- if (!settings.companionsInPkgObjs.value && mdef.symbol.isPackageObject)
- restrictPackageObjectMembers(mdef)
-
+ if (mdef.symbol.isPackageObject)
+ warnPackageObjectMembers(mdef)
+
treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType
}
/** In order to override this in the TreeCheckers Typer so synthetics aren't re-added
@@ -1995,12 +1890,9 @@ trait Typers extends Modes with Adaptations with Tags {
// the following is necessary for templates generated later
assert(clazz.info.decls != EmptyScope, clazz)
enterSyms(context.outer.make(templ, clazz, clazz.info.decls), templ.body)
- if (!templ.isErrorTyped) // if `parentTypes` has invalidated the template, don't validate it anymore
- validateParentClasses(parents1, selfType)
+ validateParentClasses(parents1, selfType)
if (clazz.isCase)
validateNoCaseAncestor(clazz)
- if (clazz.isTrait && hasSuperArgs(parents1.head))
- ConstrArgsInParentOfTraitError(parents1.head, clazz)
if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.owner.isPackageClass)
unit.error(clazz.pos, "inner classes cannot be classfile annotations")
@@ -2008,21 +1900,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members
checkFinitary(clazz.info.resultType.asInstanceOf[ClassInfoType])
- val body = {
- val body =
- if (isPastTyper || reporter.hasErrors) templ.body
- else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
- val primaryCtor = treeInfo.firstConstructor(body)
- val primaryCtor1 = primaryCtor match {
- case DefDef(_, _, _, _, _, Block(earlyVals :+ global.pendingSuperCall, unit)) =>
- val argss = superArgs(parents1.head) getOrElse Nil
- val pos = wrappingPos(parents1.head.pos, argss.flatten)
- val superCall = atPos(pos)(PrimarySuperCall(argss))
- deriveDefDef(primaryCtor)(block => Block(earlyVals :+ superCall, unit) setPos pos) setPos pos
- case _ => primaryCtor
- }
- body mapConserve { case `primaryCtor` => primaryCtor1; case stat => stat }
- }
+ val body =
+ if (isPastTyper || reporter.hasErrors) templ.body
+ else templ.body flatMap rewrappingWrapperTrees(namer.addDerivedTrees(Typer.this, _))
val body1 = typedStats(body, templ.symbol)
@@ -2058,21 +1938,28 @@ trait Typers extends Modes with Adaptations with Tags {
* @return ...
*/
def typedValDef(vdef: ValDef): ValDef = {
-// attributes(vdef)
+ val sym = vdef.symbol
+ val valDefTyper = {
+ val maybeConstrCtx =
+ if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext
+ else context
+ newTyper(maybeConstrCtx.makeNewScope(vdef, sym))
+ }
+ valDefTyper.typedValDefImpl(vdef)
+ }
+
+ // use typedValDef instead. this version is called after creating a new context for the ValDef
+ private def typedValDefImpl(vdef: ValDef) = {
val sym = vdef.symbol.initialize
- val typer1 = constrTyperIf(sym.isParameter && sym.owner.isConstructor)
val typedMods = typedModifiers(vdef.mods)
sym.annotations.map(_.completeInfo)
- var tpt1 = checkNoEscaping.privates(sym, typer1.typedType(vdef.tpt))
+ val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt))
checkNonCyclic(vdef, tpt1)
- if (sym.hasAnnotation(definitions.VolatileAttr)) {
- if (!sym.isMutable)
- VolatileValueError(vdef)
- else if (sym.isFinal)
- FinalVolatileVarError(vdef)
- }
+ if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable)
+ VolatileValueError(vdef)
+
val rhs1 =
if (vdef.rhs.isEmpty) {
if (sym.isVariable && sym.owner.isTerm && !sym.isLazy && !isPastTyper)
@@ -2095,7 +1982,7 @@ trait Typers extends Modes with Adaptations with Tags {
else subst(tpt1.tpe.typeArgs(0))
else subst(tpt1.tpe)
} else tpt1.tpe
- newTyper(typer1.context.make(vdef, sym)).transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
+ transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2)
}
treeCopy.ValDef(vdef, typedMods, vdef.name, tpt1, checkDead(rhs1)) setType NoType
}
@@ -2213,37 +2100,58 @@ trait Typers extends Modes with Adaptations with Tags {
*/
def checkMethodStructuralCompatible(ddef: DefDef): Unit = {
val meth = ddef.symbol
- def fail(pos: Position, msg: String) = unit.error(pos, msg)
- val tp: Type = meth.tpe match {
- case mt @ MethodType(_, _) => mt
- case NullaryMethodType(restpe) => restpe // TODO_NMT: drop NullaryMethodType from resultType?
- case PolyType(_, restpe) => restpe
- case _ => NoType
- }
- def nthParamPos(n: Int) = ddef.vparamss match {
- case xs :: _ if xs.length > n => xs(n).pos
- case _ => meth.pos
- }
- def failStruct(pos: Position, what: String, where: String = "Parameter") =
- fail(pos, s"$where type in structural refinement may not refer to $what")
-
- foreachWithIndex(tp.paramTypes) { (paramType, idx) =>
- val sym = paramType.typeSymbol
- def paramPos = nthParamPos(idx)
-
- if (sym.isAbstractType) {
- if (!sym.hasTransOwner(meth.owner))
- failStruct(paramPos, "an abstract type defined outside that refinement")
- else if (!sym.hasTransOwner(meth))
- failStruct(paramPos, "a type member of that refinement")
- }
- if (sym.isDerivedValueClass)
- failStruct(paramPos, "a user-defined value class")
- if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
- failStruct(paramPos, "the type of that refinement (self type)")
- }
- if (tp.resultType.typeSymbol.isDerivedValueClass)
- failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result")
+ def parentString = meth.owner.parentSymbols filterNot (_ == ObjectClass) match {
+ case Nil => ""
+ case xs => xs.map(_.nameString).mkString(" (of ", " with ", ")")
+ }
+ def fail(pos: Position, msg: String): Boolean = {
+ unit.error(pos, msg)
+ false
+ }
+ /** Have to examine all parameters in all lists.
+ */
+ def paramssTypes(tp: Type): List[List[Type]] = tp match {
+ case mt @ MethodType(_, restpe) => mt.paramTypes :: paramssTypes(restpe)
+ case PolyType(_, restpe) => paramssTypes(restpe)
+ case _ => Nil
+ }
+ def resultType = meth.tpe.finalResultType
+ def nthParamPos(n1: Int, n2: Int) =
+ try ddef.vparamss(n1)(n2).pos catch { case _: IndexOutOfBoundsException => meth.pos }
+
+ def failStruct(pos: Position, what: String, where: String = "Parameter type") =
+ fail(pos, s"$where in structural refinement may not refer to $what")
+
+ foreachWithIndex(paramssTypes(meth.tpe)) { (paramList, listIdx) =>
+ foreachWithIndex(paramList) { (paramType, paramIdx) =>
+ val sym = paramType.typeSymbol
+ def paramPos = nthParamPos(listIdx, paramIdx)
+
+ /** Not enough to look for abstract types; have to recursively check the bounds
+ * of each abstract type for more abstract types. Almost certainly there are other
+ * exploitable type soundness bugs which can be seen by bounding a type parameter
+ * by an abstract type which itself is bounded by an abstract type.
+ */
+ def checkAbstract(tp0: Type, what: String): Boolean = {
+ def check(sym: Symbol): Boolean = !sym.isAbstractType || {
+ log(s"""checking $tp0 in refinement$parentString at ${meth.owner.owner.fullLocationString}""")
+ ( (!sym.hasTransOwner(meth.owner) && failStruct(paramPos, "an abstract type defined outside that refinement", what))
+ || (!sym.hasTransOwner(meth) && failStruct(paramPos, "a type member of that refinement", what))
+ || checkAbstract(sym.info.bounds.hi, "Type bound")
+ )
+ }
+ tp0.dealiasWidenChain forall (t => check(t.typeSymbol))
+ }
+ checkAbstract(paramType, "Parameter type")
+
+ if (sym.isDerivedValueClass)
+ failStruct(paramPos, "a user-defined value class")
+ if (paramType.isInstanceOf[ThisType] && sym == meth.owner)
+ failStruct(paramPos, "the type of that refinement (self type)")
+ }
+ }
+ if (resultType.typeSymbol.isDerivedValueClass)
+ failStruct(ddef.tpt.pos, "a user-defined value class", where = "Result type")
}
def typedUseCase(useCase: UseCase) {
@@ -2373,13 +2281,12 @@ trait Typers extends Modes with Adaptations with Tags {
}
def typedTypeDef(tdef: TypeDef): TypeDef =
- typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty){
- _.typedTypeDef0(tdef)
+ typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty) {
+ _.typedTypeDefImpl(tdef)
}
- // call typedTypeDef instead
- // a TypeDef with type parameters must always be type checked in a new scope
- private def typedTypeDef0(tdef: TypeDef): TypeDef = {
+ // use typedTypeDef instead. this version is called after creating a new context for the TypeDef
+ private def typedTypeDefImpl(tdef: TypeDef): TypeDef = {
tdef.symbol.initialize
reenterTypeParams(tdef.tparams)
val tparams1 = tdef.tparams mapConserve typedTypeDef
@@ -2696,8 +2603,13 @@ trait Typers extends Modes with Adaptations with Tags {
def mkParam(methodSym: Symbol, tp: Type = argTp) =
methodSym.newValueParameter(paramName, paramPos.focus, SYNTHETIC) setInfo tp
+ def mkDefaultCase(body: Tree) =
+ atPos(tree.pos.makeTransparent) {
+ CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), body)
+ }
+
// `def applyOrElse[A1 <: $argTp, B1 >: $matchResTp](x: A1, default: A1 => B1): B1 =
- // ${`$selector match { $cases }` updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x)))}`
+ // ${`$selector match { $cases; case default$ => default(x) }`
def applyOrElseMethodDef = {
val methodSym = anonClass.newMethod(nme.applyOrElse, tree.pos, FINAL | OVERRIDE)
@@ -2706,7 +2618,7 @@ trait Typers extends Modes with Adaptations with Tags {
val x = mkParam(methodSym, A1.tpe)
// applyOrElse's default parameter:
- val B1 = methodSym newTypeParameter (newTypeName("B1")) setInfo TypeBounds.empty //lower(resTp)
+ val B1 = methodSym newTypeParameter (newTypeName("B1")) setInfo TypeBounds.empty
val default = methodSym newValueParameter (newTermName("default"), tree.pos.focus, SYNTHETIC) setInfo functionType(List(A1.tpe), B1.tpe)
val paramSyms = List(x, default)
@@ -2716,19 +2628,72 @@ trait Typers extends Modes with Adaptations with Tags {
// should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it)
paramSyms foreach (methodBodyTyper.context.scope enter _)
- val match_ = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
+ // First, type without the default case; only the cases provided
+ // by the user are typed. The LUB of these becomes `B`, the lower
+ // bound of `B1`, which in turn is the result type of the default
+ // case
+ val match0 = methodBodyTyper.typedMatch(selector, cases, mode, resTp)
+ val matchResTp = match0.tpe
- val matchResTp = match_.tpe
B1 setInfo TypeBounds.lower(matchResTp) // patch info
+ // the default uses applyOrElse's first parameter since the scrut's type has been widened
+ val match_ = {
+ val defaultCase = methodBodyTyper.typedCase(
+ mkDefaultCase(methodBodyTyper.typed1(REF(default) APPLY (REF(x)), mode, B1.tpe).setType(B1.tpe)), argTp, B1.tpe)
+ treeCopy.Match(match0, match0.selector, match0.cases :+ defaultCase)
+ }
match_ setType B1.tpe
- // the default uses applyOrElse's first parameter since the scrut's type has been widened
- val matchWithDefault = match_ updateAttachment DefaultOverrideMatchAttachment(REF(default) APPLY (REF(x)))
- (DefDef(methodSym, methodBodyTyper.virtualizedMatch(matchWithDefault, mode, B1.tpe)), matchResTp)
+ // SI-6187 Do you really want to know? Okay, here's what's going on here.
+ //
+ // Well behaved trees satisfy the property:
+ //
+ // typed(tree) == typed(resetLocalAttrs(typed(tree))
+ //
+ // Trees constructed without low-level symbol manipulation get this for free;
+ // references to local symbols are cleared by `ResetAttrs`, but bind to the
+ // corresponding symbol in the re-typechecked tree. But PartialFunction synthesis
+ // doesn't play by these rules.
+ //
+ // During typechecking of method bodies, references to method type parameter from
+ // the declared types of the value parameters should bind to a fresh set of skolems,
+ // which have been entered into scope by `Namer#methodSig`. A comment therein:
+ //
+ // "since the skolemized tparams are in scope, the TypeRefs in vparamSymss refer to skolemized tparams"
+ //
+ // But, if we retypecheck the reset `applyOrElse`, the TypeTree of the `default`
+ // parameter contains no type. Somehow (where?!) it recovers a type that is _almost_ okay:
+ // `A1 => B1`. But it should really be `A1&0 => B1&0`. In the test, run/t6187.scala, this
+ // difference results in a type error, as `default.apply(x)` types as `B1`, which doesn't
+ // conform to the required `B1&0`
+ //
+ // I see three courses of action.
+ //
+ // 1) synthesize a `asInstanceOf[B1]` below (I tried this first. But... ewwww.)
+ // 2) install an 'original' TypeTree that will used after ResetAttrs (the solution below)
+ // 3) Figure out how the almost-correct type is recovered on re-typechecking, and
+ // substitute in the skolems.
+ //
+ // For 2.11, we'll probably shift this transformation back a phase or two, so macros
+ // won't be affected. But in any case, we should satisfy retypecheckability.
+ //
+ val originals: Map[Symbol, Tree] = {
+ def typedIdent(sym: Symbol) = methodBodyTyper.typedType(Ident(sym), mode)
+ val A1Tpt = typedIdent(A1)
+ val B1Tpt = typedIdent(B1)
+ Map(
+ x -> A1Tpt,
+ default -> gen.scalaFunctionConstr(List(A1Tpt), B1Tpt)
+ )
+ }
+ val rhs = methodBodyTyper.virtualizedMatch(match_, mode, B1.tpe)
+ val defdef = DefDef(methodSym, Modifiers(methodSym.flags), originals, rhs)
+
+ (defdef, matchResTp)
}
- // `def isDefinedAt(x: $argTp): Boolean = ${`$selector match { $casesTrue ` updateAttachment DefaultOverrideMatchAttachment(FALSE_typed)}`
+ // `def isDefinedAt(x: $argTp): Boolean = ${`$selector match { $casesTrue; case default$ => false } }`
def isDefinedAtMethod = {
val methodSym = anonClass.newMethod(nme.isDefinedAt, tree.pos.makeTransparent, FINAL)
val paramSym = mkParam(methodSym)
@@ -2737,10 +2702,10 @@ trait Typers extends Modes with Adaptations with Tags {
methodBodyTyper.context.scope enter paramSym
methodSym setInfo MethodType(List(paramSym), BooleanClass.tpe)
- val match_ = methodBodyTyper.typedMatch(selector, casesTrue, mode, BooleanClass.tpe)
+ val defaultCase = mkDefaultCase(FALSE_typed)
+ val match_ = methodBodyTyper.typedMatch(selector, casesTrue :+ defaultCase, mode, BooleanClass.tpe)
- val matchWithDefault = match_ updateAttachment DefaultOverrideMatchAttachment(FALSE_typed)
- DefDef(methodSym, methodBodyTyper.virtualizedMatch(matchWithDefault, mode, BooleanClass.tpe))
+ DefDef(methodSym, methodBodyTyper.virtualizedMatch(match_, mode, BooleanClass.tpe))
}
// only used for @cps annotated partial functions
@@ -2785,7 +2750,9 @@ trait Typers extends Modes with Adaptations with Tags {
members foreach (m => anonClass.info.decls enter m.symbol)
val typedBlock = typedPos(tree.pos, mode, pt) {
- Block(ClassDef(anonClass, NoMods, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(New(anonClass.tpe)))
+ Block(ClassDef(anonClass, NoMods, ListOfNil, ListOfNil, members, tree.pos.focus), atPos(tree.pos.focus)(
+ Apply(Select(New(Ident(anonClass.name).setSymbol(anonClass)), nme.CONSTRUCTOR), List())
+ ))
}
if (typedBlock.isErrorTyped) typedBlock
@@ -4434,8 +4401,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (typed(expr).tpe.typeSymbol != UnitClass)
unit.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded")
}
- treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
- .setType(adaptTypeOfReturn(expr1, restpt.tpe, NothingClass.tpe))
+ val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner)
+ val tp = pluginsTypedReturn(NothingClass.tpe, this, res, restpt.tpe)
+ res.setType(tp)
}
}
}
@@ -5339,10 +5307,14 @@ trait Typers extends Modes with Adaptations with Tags {
typed(docdef.definition, mode, pt)
}
+ /**
+ * The typer with the correct context for a method definition. If the method is a default getter for
+ * a constructor default, the resulting typer has a constructor context (fixes SI-5543).
+ */
def defDefTyper(ddef: DefDef) = {
- val flag = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
+ val isConstrDefaultGetter = ddef.mods.hasDefaultFlag && sym.owner.isModuleClass &&
nme.defaultGetterToMethod(sym.name) == nme.CONSTRUCTOR
- newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(flag)
+ newTyper(context.makeNewScope(ddef, sym)).constrTyperIf(isConstrDefaultGetter)
}
def typedAlternative(alt: Alternative) = {
@@ -5367,7 +5339,7 @@ trait Typers extends Modes with Adaptations with Tags {
var block1 = typed(tree.block, pt)
var catches1 = typedCases(tree.catches, ThrowableClass.tpe, pt)
- for (cdef <- catches1 if cdef.guard.isEmpty) {
+ for (cdef <- catches1 if !isPastTyper && cdef.guard.isEmpty) {
def warn(name: Name) = context.warning(cdef.pat.pos, s"This catches all Throwables. If this is really intended, use `case ${name.decoded} : Throwable` to clear this warning.")
def unbound(t: Tree) = t.symbol == null || t.symbol == NoSymbol
cdef.pat match {
@@ -5629,20 +5601,21 @@ trait Typers extends Modes with Adaptations with Tags {
lastTreeToTyper = tree
indentTyping()
- var alreadyTyped = false
+ val ptPlugins = pluginsPt(pt, this, tree, mode)
+
val startByType = if (Statistics.canEnable) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null
if (Statistics.canEnable) Statistics.incCounter(visitsByType, tree.getClass)
try {
if (context.retyping &&
- (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< pt))) {
+ (tree.tpe ne null) && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins))) {
tree.tpe = null
if (tree.hasSymbol) tree.symbol = NoSymbol
}
- alreadyTyped = tree.tpe ne null
+ val alreadyTyped = tree.tpe ne null
var tree1: Tree = if (alreadyTyped) tree else {
printTyping(
- ptLine("typing %s: pt = %s".format(ptTree(tree), pt),
+ ptLine("typing %s: pt = %s".format(ptTree(tree), ptPlugins),
"undetparams" -> context.undetparams,
"implicitsEnabled" -> context.implicitsEnabled,
"enrichmentEnabled" -> context.enrichmentEnabled,
@@ -5651,7 +5624,7 @@ trait Typers extends Modes with Adaptations with Tags {
"context.owner" -> context.owner
)
)
- typed1(tree, mode, dropExistential(pt))
+ typed1(tree, mode, dropExistential(ptPlugins))
}
// Can happen during erroneous compilation - error(s) have been
// reported, but we need to avoid causing an NPE with this tree
@@ -5665,12 +5638,12 @@ trait Typers extends Modes with Adaptations with Tags {
)
}
- tree1.tpe = addAnnotations(tree1, tree1.tpe)
- val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, tree)
+ tree1.tpe = pluginsTyped(tree1.tpe, this, tree1, mode, ptPlugins)
+ val result = if (tree1.isEmpty) tree1 else adapt(tree1, mode, ptPlugins, tree)
if (!alreadyTyped) {
printTyping("adapted %s: %s to %s, %s".format(
- tree1, tree1.tpe.widen, pt, context.undetparamsString)
+ tree1, tree1.tpe.widen, ptPlugins, context.undetparamsString)
) //DEBUG
}
if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result)
@@ -5685,7 +5658,7 @@ trait Typers extends Modes with Adaptations with Tags {
setError(tree)
case ex: Exception =>
if (settings.debug.value) // @M causes cyclic reference error
- Console.println("exception when typing "+tree+", pt = "+pt)
+ Console.println("exception when typing "+tree+", pt = "+ptPlugins)
if (context != null && context.unit.exists && tree != null)
logError("AT: " + (tree.pos).dbgString, ex)
throw ex
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index 5782d7bbca..b51dc0ccd5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -79,8 +79,9 @@ trait Unapplies extends ast.TreeDSL
private def toIdent(x: DefTree) = Ident(x.name) setPos x.pos.focus
- private def classType(cdef: ClassDef, tparams: List[TypeDef], symbolic: Boolean = true): Tree = {
- val tycon = if (symbolic) REF(cdef.symbol) else Ident(cdef.name)
+ private def classType(cdef: ClassDef, tparams: List[TypeDef]): Tree = {
+ // SI-7033 Unattributed to avoid forcing `cdef.symbol.info`.
+ val tycon = Ident(cdef.symbol)
if (tparams.isEmpty) tycon else AppliedTypeTree(tycon, tparams map toIdent)
}
@@ -93,12 +94,33 @@ trait Unapplies extends ast.TreeDSL
* @param param The name of the parameter of the unapply method, assumed to be of type C[Ts]
* @param caseclazz The case class C[Ts]
*/
- private def caseClassUnapplyReturnValue(param: Name, caseclazz: Symbol) = {
- def caseFieldAccessorValue(selector: Symbol): Tree = Ident(param) DOT selector
+ private def caseClassUnapplyReturnValue(param: Name, caseclazz: ClassDef) = {
+ def caseFieldAccessorValue(selector: ValDef): Tree = {
+ val accessorName = selector.name
+ val privateLocalParamAccessor = caseclazz.impl.body.collectFirst {
+ case dd: ValOrDefDef if dd.name == accessorName && dd.mods.isPrivateLocal => dd.symbol
+ }
+ privateLocalParamAccessor match {
+ case None =>
+ // Selecting by name seems to be the most straight forward way here to
+ // avoid forcing the symbol of the case class in order to list the accessors.
+ val maybeRenamedAccessorName = caseAccessorName(caseclazz.symbol, accessorName)
+ Ident(param) DOT maybeRenamedAccessorName
+ case Some(sym) =>
+ // But, that gives a misleading error message in neg/t1422.scala, where a case
+ // class has an illegal private[this] parameter. We can detect this by checking
+ // the modifiers on the param accessors.
+ //
+ // We just generate a call to that param accessor here, which gives us an inaccessible
+ // symbol error, as before.
+ Ident(param) DOT sym
+ }
+ }
- caseclazz.caseFieldAccessors match {
- case Nil => TRUE
- case xs => SOME(xs map caseFieldAccessorValue: _*)
+ // Working with trees, rather than symbols, to avoid cycles like SI-5082
+ constrParamss(caseclazz).take(1).flatten match {
+ case Nil => TRUE
+ case xs => SOME(xs map caseFieldAccessorValue: _*)
}
}
@@ -112,7 +134,7 @@ trait Unapplies extends ast.TreeDSL
def createFun = gen.scalaFunctionConstr(primaries, toIdent(cdef), abstractFun = true)
def parents = if (inheritFromFun) List(createFun) else Nil
def toString = DefDef(
- Modifiers(OVERRIDE | FINAL),
+ Modifiers(OVERRIDE | FINAL | SYNTHETIC),
nme.toString_,
Nil,
ListOfNil,
@@ -126,17 +148,17 @@ trait Unapplies extends ast.TreeDSL
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
+ Template(parents, emptyValDef, NoMods, Nil, ListOfNil, body, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
/** The apply method corresponding to a case class
*/
- def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef, symbolic: Boolean): DefDef = {
+ def factoryMeth(mods: Modifiers, name: TermName, cdef: ClassDef): DefDef = {
val tparams = cdef.tparams map copyUntypedInvariant
val cparamss = constrParamss(cdef)
- def classtpe = classType(cdef, tparams, symbolic)
+ def classtpe = classType(cdef, tparams)
atPos(cdef.pos.focus)(
DefDef(mods, name, tparams, cparamss, classtpe,
New(classtpe, mmap(cparamss)(gen.paramToArg)))
@@ -145,7 +167,7 @@ trait Unapplies extends ast.TreeDSL
/** The apply method corresponding to a case class
*/
- def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef, symbolic = true)
+ def caseModuleApplyMeth(cdef: ClassDef): DefDef = factoryMeth(caseMods, nme.apply, cdef)
/** The unapply method corresponding to a case class
*/
@@ -157,7 +179,7 @@ trait Unapplies extends ast.TreeDSL
}
val cparams = List(ValDef(Modifiers(PARAM | SYNTHETIC), unapplyParamName, classType(cdef, tparams), EmptyTree))
val ifNull = if (constrParamss(cdef).head.isEmpty) FALSE else REF(NoneModule)
- val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef.symbol) }, ifNull)(Ident(unapplyParamName))
+ val body = nullSafe({ case Ident(x) => caseClassUnapplyReturnValue(x, cdef) }, ifNull)(Ident(unapplyParamName))
atPos(cdef.pos.focus)(
DefDef(caseMods, method, tparams, List(cparams), TypeTree(), body)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Variances.scala b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
index 7d97b0c782..ea436a71fb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Variances.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Variances.scala
@@ -67,6 +67,8 @@ trait Variances {
def varianceInType(tp: Type)(tparam: Symbol): Int = tp match {
case ErrorType | WildcardType | NoType | NoPrefix | ThisType(_) | ConstantType(_) =>
VARIANCES
+ case BoundedWildcardType(bounds) =>
+ varianceInType(bounds)(tparam)
case SingleType(pre, sym) =>
varianceInType(pre)(tparam)
case TypeRef(pre, sym, args) =>
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index 0125f1b189..95135b84e0 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -230,6 +230,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
emptyValDef,
NoMods,
List(),
+ List(List()),
List(methdef),
NoPosition))
trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value))
diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala
index d29a370c28..1b06ce2ff2 100644
--- a/src/compiler/scala/tools/util/SocketServer.scala
+++ b/src/compiler/scala/tools/util/SocketServer.scala
@@ -16,8 +16,8 @@ trait CompileOutputCommon {
def verbose: Boolean
def info(msg: String) = if (verbose) echo(msg)
- def echo(msg: String) = Console println msg
- def warn(msg: String) = System.err println msg
+ def echo(msg: String) = {Console println msg; Console.flush}
+ def warn(msg: String) = {Console.err println msg; Console.flush}
def fatal(msg: String) = { warn(msg) ; sys.exit(1) }
}
diff --git a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
index 15025f85e3..00c72cf423 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/CPSAnnotationChecker.scala
@@ -9,6 +9,7 @@ import scala.tools.nsc.MissingRequirementError
abstract class CPSAnnotationChecker extends CPSUtils with Modes {
val global: Global
import global._
+ import analyzer.{AnalyzerPlugin, Typer}
import definitions._
//override val verbose = true
@@ -18,12 +19,12 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* Checks whether @cps annotations conform
*/
object checker extends AnnotationChecker {
- private def addPlusMarker(tp: Type) = tp withAnnotation newPlusMarker()
- private def addMinusMarker(tp: Type) = tp withAnnotation newMinusMarker()
+ private[CPSAnnotationChecker] def addPlusMarker(tp: Type) = tp withAnnotation newPlusMarker()
+ private[CPSAnnotationChecker] def addMinusMarker(tp: Type) = tp withAnnotation newMinusMarker()
- private def cleanPlus(tp: Type) =
+ private[CPSAnnotationChecker] def cleanPlus(tp: Type) =
removeAttribs(tp, MarkerCPSAdaptPlus, MarkerCPSTypes)
- private def cleanPlusWith(tp: Type)(newAnnots: AnnotationInfo*) =
+ private[CPSAnnotationChecker] def cleanPlusWith(tp: Type)(newAnnots: AnnotationInfo*) =
cleanPlus(tp) withAnnotations newAnnots.toList
/** Check annotations to decide whether tpe1 <:< tpe2 */
@@ -116,8 +117,13 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else
bounds
}
+ }
+
+ object plugin extends AnalyzerPlugin {
+
+ import checker._
- override def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
if (!cpsEnabled) return false
vprintln("can adapt annotations? " + tree + " / " + tree.tpe + " / " + Integer.toHexString(mode) + " / " + pt)
@@ -183,7 +189,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
} else false
}
- override def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
+ override def adaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Tree = {
if (!cpsEnabled) return tree
vprintln("adapt annotations " + tree + " / " + tree.tpe + " / " + modeString(mode) + " / " + pt)
@@ -239,14 +245,15 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
* is in tail position. Therefore, we are making sure that only the types of return expressions
* are adapted which will either be removed, or lead to an error.
*/
- override def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = {
+ override def pluginsTypedReturn(default: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ val expr = tree.expr
// only adapt if method's result type (pt) is cps type
val annots = cpsParamAnnotation(pt)
if (annots.nonEmpty) {
- // return type of `tree` without plus marker, but only if it doesn't have other cps annots
- if (hasPlusMarker(tree.tpe) && !hasCpsParamTypes(tree.tpe))
- tree.setType(removeAttribs(tree.tpe, MarkerCPSAdaptPlus))
- tree.tpe
+ // return type of `expr` without plus marker, but only if it doesn't have other cps annots
+ if (hasPlusMarker(expr.tpe) && !hasCpsParamTypes(expr.tpe))
+ expr.setType(removeAttribs(expr.tpe, MarkerCPSAdaptPlus))
+ expr.tpe
} else default
}
@@ -393,7 +400,7 @@ abstract class CPSAnnotationChecker extends CPSUtils with Modes {
/** Modify the type that has thus far been inferred
* for a tree. All this should do is add annotations. */
- override def addAnnotations(tree: Tree, tpe: Type): Type = {
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
import scala.util.control._
if (!cpsEnabled) {
if (Exception.failAsValue(classOf[MissingRequirementError])(false)(hasCpsParamTypes(tpe)))
diff --git a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
index 8a500d6c4d..237159795a 100644
--- a/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
+++ b/src/continuations/plugin/scala/tools/selectivecps/SelectiveCPSPlugin.scala
@@ -33,6 +33,7 @@ class SelectiveCPSPlugin(val global: Global) extends Plugin {
val global: SelectiveCPSPlugin.this.global.type = SelectiveCPSPlugin.this.global
}
global.addAnnotationChecker(checker.checker)
+ global.analyzer.addAnalyzerPlugin(checker.plugin)
global.log("instantiated cps plugin: " + this)
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index 90684b5fdd..b9f51803ec 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -115,6 +115,8 @@ object Array extends FallbackArrayBuilding {
* @param xs the elements to put in the array
* @return an array containing all elements from xs.
*/
+ // Subject to a compiler optimization in Cleanup.
+ // Array(e0, ..., en) is translated to { val a = new Array(3); a(i) = ei; a }
def apply[T: ClassTag](xs: T*): Array[T] = {
val array = new Array[T](xs.length)
var i = 0
@@ -123,6 +125,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Boolean` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Boolean, xs: Boolean*): Array[Boolean] = {
val array = new Array[Boolean](xs.length + 1)
array(0) = x
@@ -132,6 +135,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Byte` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Byte, xs: Byte*): Array[Byte] = {
val array = new Array[Byte](xs.length + 1)
array(0) = x
@@ -141,6 +145,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Short` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Short, xs: Short*): Array[Short] = {
val array = new Array[Short](xs.length + 1)
array(0) = x
@@ -150,6 +155,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Char` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Char, xs: Char*): Array[Char] = {
val array = new Array[Char](xs.length + 1)
array(0) = x
@@ -159,6 +165,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Int` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Int, xs: Int*): Array[Int] = {
val array = new Array[Int](xs.length + 1)
array(0) = x
@@ -168,6 +175,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Long` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Long, xs: Long*): Array[Long] = {
val array = new Array[Long](xs.length + 1)
array(0) = x
@@ -177,6 +185,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Float` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Float, xs: Float*): Array[Float] = {
val array = new Array[Float](xs.length + 1)
array(0) = x
@@ -186,6 +195,7 @@ object Array extends FallbackArrayBuilding {
}
/** Creates an array of `Double` objects */
+ // Subject to a compiler optimization in Cleanup, see above.
def apply(x: Double, xs: Double*): Array[Double] = {
val array = new Array[Double](xs.length + 1)
array(0) = x
diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala
index 49fea9434c..adb6de6afd 100644
--- a/src/library/scala/annotation/migration.scala
+++ b/src/library/scala/annotation/migration.scala
@@ -17,7 +17,8 @@ package scala.annotation
* order between Scala 2.7 and 2.8.
*
* @param message A message describing the change, which is emitted
- * by the compiler if the flag `-Xmigration` is set.
+ * by the compiler if the flag `-Xmigration` indicates a version
+ * prior to the changedIn version.
*
* @param changedIn The version, in which the behaviour change was
* introduced.
diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala
index 63e5adf428..2de0043c96 100644
--- a/src/library/scala/collection/IndexedSeq.scala
+++ b/src/library/scala/collection/IndexedSeq.scala
@@ -28,10 +28,10 @@ trait IndexedSeq[+A] extends Seq[A]
* @define coll indexed sequence
* @define Coll `IndexedSeq`
*/
-object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
+object IndexedSeq extends SeqFactory[IndexedSeq] {
// A single CBF which can be checked against to identify
// an indexed collection type.
- override val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
+ override lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
def newBuilder[A]: Builder[A, IndexedSeq[A]] = immutable.IndexedSeq.newBuilder[A]
diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala
index a43862abaf..2d3f7e609b 100644
--- a/src/library/scala/collection/generic/GenTraversableFactory.scala
+++ b/src/library/scala/collection/generic/GenTraversableFactory.scala
@@ -38,10 +38,12 @@ import scala.language.higherKinds
abstract class GenTraversableFactory[CC[X] <: GenTraversable[X] with GenericTraversableTemplate[X, CC]]
extends GenericCompanion[CC] {
- private[this] val ReusableCBFInstance: GenericCanBuildFrom[Nothing] = new GenericCanBuildFrom[Nothing] {
+ // A default implementation of GenericCanBuildFrom which can be cast
+ // to whatever is desired.
+ private class ReusableCBF extends GenericCanBuildFrom[Nothing] {
override def apply() = newBuilder[Nothing]
}
- def ReusableCBF: GenericCanBuildFrom[Nothing] = ReusableCBFInstance
+ lazy val ReusableCBF: GenericCanBuildFrom[Nothing] = new ReusableCBF
/** A generic implementation of the `CanBuildFrom` trait, which forwards
* all calls to `apply(from)` to the `genericBuilder` method of
diff --git a/src/library/scala/collection/generic/IndexedSeqFactory.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala
deleted file mode 100644
index 200d033c2d..0000000000
--- a/src/library/scala/collection/generic/IndexedSeqFactory.scala
+++ /dev/null
@@ -1,21 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2011, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.collection
-package generic
-
-import language.higherKinds
-
-/** A template for companion objects of IndexedSeq and subclasses thereof.
- *
- * @since 2.10
- */
-abstract class IndexedSeqFactory[CC[X] <: IndexedSeq[X] with GenericTraversableTemplate[X, CC]] extends SeqFactory[CC] {
- override def ReusableCBF: GenericCanBuildFrom[Nothing] =
- scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
-}
diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala
index bf4ba3a381..96414c07ef 100644
--- a/src/library/scala/collection/immutable/IndexedSeq.scala
+++ b/src/library/scala/collection/immutable/IndexedSeq.scala
@@ -31,7 +31,9 @@ trait IndexedSeq[+A] extends Seq[A]
* @define coll indexed sequence
* @define Coll `IndexedSeq`
*/
-object IndexedSeq extends IndexedSeqFactory[IndexedSeq] {
+object IndexedSeq extends SeqFactory[IndexedSeq] {
+ override lazy val ReusableCBF =
+ scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
class Impl[A](buf: ArrayBuffer[A]) extends AbstractSeq[A] with IndexedSeq[A] with Serializable {
def length = buf.length
def apply(idx: Int) = buf.apply(idx)
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 56e386ad67..9765e7c52f 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -55,6 +55,12 @@ import java.io._
* val shorter = mainList.tail // costs nothing as it uses the same 2::1::Nil instances as mainList
* }}}
*
+ * @note The functional list is characterized by persistence and structural sharing, thus offering considerable
+ * performance and space consumption benefits in some scenarios if used correctly.
+ * However, note that objects having multiple references into the same functional list (that is,
+ * objects that rely on structural sharing), will be serialized and deserialized with multiple lists, one for
+ * each reference to it. I.e. structural sharing is lost after serialization/deserialization.
+ *
* @author Martin Odersky and others
* @version 2.8
* @since 1.0
@@ -295,6 +301,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
}
result
}
+
+ override def foldRight[B](z: B)(op: (A, B) => B): B =
+ reverse.foldLeft(z)((right, left) => op(left, right))
override def stringPrefix = "List"
@@ -349,25 +358,8 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
override def tail : List[B] = tl
override def isEmpty: Boolean = false
- private def writeObject(out: ObjectOutputStream) {
- out.writeObject(ListSerializeStart) // needed to differentiate with the legacy `::` serialization
- out.writeObject(this.hd)
- out.writeObject(this.tl)
- }
-
private def readObject(in: ObjectInputStream) {
- val obj = in.readObject()
- if (obj == ListSerializeStart) {
- this.hd = in.readObject().asInstanceOf[B]
- this.tl = in.readObject().asInstanceOf[List[B]]
- } else oldReadObject(in, obj)
- }
-
- /* The oldReadObject method exists here for compatibility reasons.
- * :: objects used to be serialized by serializing all the elements to
- * the output stream directly, but this was broken (see SI-5374).
- */
- private def oldReadObject(in: ObjectInputStream, firstObject: AnyRef) {
+ val firstObject = in.readObject()
hd = firstObject.asInstanceOf[B]
assert(hd != ListSerializeEnd)
var current: ::[B] = this
@@ -375,14 +367,14 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend
case ListSerializeEnd =>
current.tl = Nil
return
- case a : Any =>
+ case a =>
val list : ::[B] = new ::(a.asInstanceOf[B], Nil)
current.tl = list
current = list
}
}
- private def oldWriteObject(out: ObjectOutputStream) {
+ private def writeObject(out: ObjectOutputStream) {
var xs: List[B] = this
while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail }
out.writeObject(ListSerializeEnd)
@@ -651,10 +643,6 @@ object List extends SeqFactory[List] {
}
/** Only used for list serialization */
-@SerialVersionUID(0L - 8287891243975527522L)
-private[scala] case object ListSerializeStart
-
-/** Only used for list serialization */
@SerialVersionUID(0L - 8476791151975527571L)
private[scala] case object ListSerializeEnd
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 1c461973e4..5bb4ef5f21 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -841,9 +841,16 @@ self =>
* // produces: "1, 2, 3, 4, 5, 6"
* }}}
*/
- override def distinct: Stream[A] =
- if (isEmpty) this
- else cons(head, tail.filter(head != _).distinct)
+ override def distinct: Stream[A] = {
+ // This should use max memory proportional to N, whereas
+ // recursively calling distinct on the tail is N^2.
+ def loop(seen: Set[A], rest: Stream[A]): Stream[A] = {
+ if (rest.isEmpty) rest
+ else if (seen(rest.head)) loop(seen, rest.tail)
+ else cons(rest.head, loop(seen + rest.head, rest.tail))
+ }
+ loop(Set(), this)
+ }
/** Returns a new sequence of given length containing the elements of this
* sequence followed by zero or more occurrences of given elements.
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index f083e80175..bcce4a99bd 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -18,7 +18,16 @@ import scala.collection.parallel.immutable.ParVector
/** Companion object to the Vector class
*/
-object Vector extends IndexedSeqFactory[Vector] {
+object Vector extends SeqFactory[Vector] {
+ // left lying around for binary compatibility check
+ private[collection] class VectorReusableCBF extends GenericCanBuildFrom[Nothing] {
+ override def apply() = newBuilder[Nothing]
+ }
+ // left lying around for binary compatibility check
+ private val VectorReusableCBF: GenericCanBuildFrom[Nothing] = new VectorReusableCBF
+
+ override lazy val ReusableCBF =
+ scala.collection.IndexedSeq.ReusableCBF.asInstanceOf[GenericCanBuildFrom[Nothing]]
def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A]
implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] =
ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]]
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index 6b778b26f5..bb938a7aeb 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -52,20 +52,6 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
super.toArray[U]
}
- def :+[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = {
- val result = Array.ofDim[B](repr.length + 1)
- Array.copy(repr, 0, result, 0, repr.length)
- result(repr.length) = elem
- result
- }
-
- def +:[B >: T: scala.reflect.ClassTag](elem: B): Array[B] = {
- val result = Array.ofDim[B](repr.length + 1)
- result(0) = elem
- Array.copy(repr, 0, result, 1, repr.length)
- result
- }
-
override def par = ParArray.handoff(repr)
/** Flattens a two-dimensional array by concatenating all its rows
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index 74f2a6c762..c60e363f8f 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -88,7 +88,7 @@ extends AbstractSet[A]
}
private def readObject(in: java.io.ObjectInputStream) {
- init(in, x => x)
+ init(in, x => ())
}
/** Toggles whether a size map is used to track hash map statistics.
diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala
index 212ee917c5..7f05deffc8 100644
--- a/src/library/scala/collection/mutable/ListMap.scala
+++ b/src/library/scala/collection/mutable/ListMap.scala
@@ -12,6 +12,7 @@ package scala.collection
package mutable
import generic._
+import annotation.tailrec
/** A simple mutable map backed by a list.
*
@@ -47,13 +48,17 @@ extends AbstractMap[A, B]
def get(key: A): Option[B] = elems find (_._1 == key) map (_._2)
def iterator: Iterator[(A, B)] = elems.iterator
- def += (kv: (A, B)) = { elems = remove(kv._1, elems); elems = kv :: elems; siz += 1; this }
- def -= (key: A) = { elems = remove(key, elems); this }
- private def remove(key: A, elems: List[(A, B)]): List[(A, B)] =
- if (elems.isEmpty) elems
- else if (elems.head._1 == key) { siz -= 1; elems.tail }
- else elems.head :: remove(key, elems.tail)
+ def += (kv: (A, B)) = { elems = remove(kv._1, elems, List()); elems = kv :: elems; siz += 1; this }
+ def -= (key: A) = { elems = remove(key, elems, List()); this }
+
+ @tailrec
+ private def remove(key: A, elems: List[(A, B)], acc: List[(A, B)]): List[(A, B)] = {
+ if (elems.isEmpty) acc
+ else if (elems.head._1 == key) { siz -= 1; acc ::: elems.tail }
+ else remove(key, elems.tail, elems.head :: acc)
+ }
+
override def clear() = { elems = List(); siz = 0 }
override def size: Int = siz
diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala
index e4c8e5fae2..0a4f30131f 100644
--- a/src/library/scala/collection/parallel/mutable/ParArray.scala
+++ b/src/library/scala/collection/parallel/mutable/ParArray.scala
@@ -469,7 +469,6 @@ self =>
Array.copy(arr, i, targetarr, 0, until - i)
pac.buff.size = pac.buff.size + until - i
pac.buff.lastPtr.size = until - i
- pac
} otherwise {
copy2builder_quick(cb, arr, until, i)
i = until
@@ -531,7 +530,6 @@ self =>
val targetarr: Array[Any] = pac.lastbuff.internalArray.asInstanceOf[Array[Any]]
reverse2combiner_quick(targetarr, arr, 0, i, until)
pac.lastbuff.setInternalSize(sz)
- pac
} otherwise {
cb.ifIs[UnrolledParArrayCombiner[T]] {
pac =>
@@ -542,7 +540,6 @@ self =>
reverse2combiner_quick(targetarr, arr, 0, i, until)
pac.buff.size = pac.buff.size + sz
pac.buff.lastPtr.size = sz
- pac
} otherwise super.reverse2combiner(cb)
}
cb
diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
index 3b1278f3be..57fab57348 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala
@@ -85,7 +85,7 @@ extends ParSet[T]
}
private def readObject(in: java.io.ObjectInputStream) {
- init(in, x => x)
+ init(in, x => ())
}
import scala.collection.DebugUtils._
diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala
new file mode 100644
index 0000000000..a0d7aaea47
--- /dev/null
+++ b/src/library/scala/concurrent/BatchingExecutor.scala
@@ -0,0 +1,117 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.concurrent
+
+import java.util.concurrent.Executor
+import scala.annotation.tailrec
+
+/**
+ * Mixin trait for an Executor
+ * which groups multiple nested `Runnable.run()` calls
+ * into a single Runnable passed to the original
+ * Executor. This can be a useful optimization
+ * because it bypasses the original context's task
+ * queue and keeps related (nested) code on a single
+ * thread which may improve CPU affinity. However,
+ * if tasks passed to the Executor are blocking
+ * or expensive, this optimization can prevent work-stealing
+ * and make performance worse. Also, some ExecutionContext
+ * may be fast enough natively that this optimization just
+ * adds overhead.
+ * The default ExecutionContext.global is already batching
+ * or fast enough not to benefit from it; while
+ * `fromExecutor` and `fromExecutorService` do NOT add
+ * this optimization since they don't know whether the underlying
+ * executor will benefit from it.
+ * A batching executor can create deadlocks if code does
+ * not use `scala.concurrent.blocking` when it should,
+ * because tasks created within other tasks will block
+ * on the outer task completing.
+ * This executor may run tasks in any order, including LIFO order.
+ * There are no ordering guarantees.
+ *
+ * WARNING: The underlying Executor's execute-method must not execute the submitted Runnable
+ * in the calling thread synchronously. It must enqueue/handoff the Runnable.
+ */
+private[concurrent] trait BatchingExecutor extends Executor {
+
+ // invariant: if "_tasksLocal.get ne null" then we are inside BatchingRunnable.run; if it is null, we are outside
+ private val _tasksLocal = new ThreadLocal[List[Runnable]]()
+
+ private class Batch(val initial: List[Runnable]) extends Runnable with BlockContext {
+ private var parentBlockContext: BlockContext = _
+ // this method runs in the delegate ExecutionContext's thread
+ override def run(): Unit = {
+ require(_tasksLocal.get eq null)
+
+ val prevBlockContext = BlockContext.current
+ BlockContext.withBlockContext(this) {
+ try {
+ parentBlockContext = prevBlockContext
+
+ @tailrec def processBatch(batch: List[Runnable]): Unit = batch match {
+ case Nil => ()
+ case head :: tail =>
+ _tasksLocal set tail
+ try {
+ head.run()
+ } catch {
+ case t: Throwable =>
+ // if one task throws, move the
+ // remaining tasks to another thread
+ // so we can throw the exception
+ // up to the invoking executor
+ val remaining = _tasksLocal.get
+ _tasksLocal set Nil
+ unbatchedExecute(new Batch(remaining)) //TODO what if this submission fails?
+ throw t // rethrow
+ }
+ processBatch(_tasksLocal.get) // since head.run() can add entries, always do _tasksLocal.get here
+ }
+
+ processBatch(initial)
+ } finally {
+ _tasksLocal.remove()
+ parentBlockContext = null
+ }
+ }
+ }
+
+ override def blockOn[T](thunk: => T)(implicit permission: CanAwait): T = {
+ // if we know there will be blocking, we don't want to keep tasks queued up because it could deadlock.
+ {
+ val tasks = _tasksLocal.get
+ _tasksLocal set Nil
+ if ((tasks ne null) && tasks.nonEmpty)
+ unbatchedExecute(new Batch(tasks))
+ }
+
+ // now delegate the blocking to the previous BC
+ require(parentBlockContext ne null)
+ parentBlockContext.blockOn(thunk)
+ }
+ }
+
+ protected def unbatchedExecute(r: Runnable): Unit
+
+ override def execute(runnable: Runnable): Unit = {
+ if (batchable(runnable)) { // If we can batch the runnable
+ _tasksLocal.get match {
+ case null => unbatchedExecute(new Batch(List(runnable))) // If we aren't in batching mode yet, enqueue batch
+ case some => _tasksLocal.set(runnable :: some) // If we are already in batching mode, add to batch
+ }
+ } else unbatchedExecute(runnable) // If not batchable, just delegate to underlying
+ }
+
+ /** Override this to define which runnables will be batched. */
+ def batchable(runnable: Runnable): Boolean = runnable match {
+ case _: OnCompleteRunnable => true
+ case _ => false
+ }
+}
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 4b9e74708d..36f3be341f 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -675,9 +675,9 @@ object Future {
// by just not ever using it itself. scala.concurrent
// doesn't need to create defaultExecutionContext as
// a side effect.
- private[concurrent] object InternalCallbackExecutor extends ExecutionContext {
- override def execute(runnable: Runnable): Unit =
- runnable.run()
+ private[concurrent] object InternalCallbackExecutor extends ExecutionContext with BatchingExecutor {
+ override protected def unbatchedExecute(r: Runnable): Unit =
+ r.run()
override def reportFailure(t: Throwable): Unit =
throw new IllegalStateException("problem in scala.concurrent internal callback", t)
}
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 215f90b17e..77625e381c 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -25,11 +25,15 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
case some => some
}
+ private val uncaughtExceptionHandler: Thread.UncaughtExceptionHandler = new Thread.UncaughtExceptionHandler {
+ def uncaughtException(thread: Thread, cause: Throwable): Unit = reporter(cause)
+ }
+
// Implement BlockContext on FJP threads
class DefaultThreadFactory(daemonic: Boolean) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory {
def wire[T <: Thread](thread: T): T = {
thread.setDaemon(daemonic)
- //Potentially set things like uncaught exception handler, name etc
+ thread.setUncaughtExceptionHandler(uncaughtExceptionHandler)
thread
}
@@ -73,7 +77,7 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
new ForkJoinPool(
desiredParallelism,
threadFactory,
- null, //FIXME we should have an UncaughtExceptionHandler, see what Akka does
+ uncaughtExceptionHandler,
true) // Async all the way baby
} catch {
case NonFatal(t) =>
@@ -94,13 +98,13 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
def execute(runnable: Runnable): Unit = executor match {
case fj: ForkJoinPool =>
+ val fjt = runnable match {
+ case t: ForkJoinTask[_] => t
+ case r => new ExecutionContextImpl.AdaptedForkJoinTask(r)
+ }
Thread.currentThread match {
- case fjw: ForkJoinWorkerThread if fjw.getPool eq fj =>
- (runnable match {
- case fjt: ForkJoinTask[_] => fjt
- case _ => ForkJoinTask.adapt(runnable)
- }).fork
- case _ => fj.execute(runnable)
+ case fjw: ForkJoinWorkerThread if fjw.getPool eq fj => fjt.fork()
+ case _ => fj execute fjt
}
case generic => generic execute runnable
}
@@ -111,6 +115,20 @@ private[scala] class ExecutionContextImpl private[impl] (es: Executor, reporter:
private[concurrent] object ExecutionContextImpl {
+ final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] {
+ final override def setRawResult(u: Unit): Unit = ()
+ final override def getRawResult(): Unit = ()
+ final override def exec(): Boolean = try { runnable.run(); true } catch {
+ case anything: Throwable ⇒
+ val t = Thread.currentThread
+ t.getUncaughtExceptionHandler match {
+ case null ⇒
+ case some ⇒ some.uncaughtException(t, anything)
+ }
+ throw anything
+ }
+ }
+
def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = new ExecutionContextImpl(e, reporter)
def fromExecutorService(es: ExecutorService, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl with ExecutionContextExecutorService =
new ExecutionContextImpl(es, reporter) with ExecutionContextExecutorService {
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index e9da45a079..52f1075137 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -34,7 +34,7 @@ private class CallbackRunnable[T](val executor: ExecutionContext, val onComplete
value = v
// Note that we cannot prepare the ExecutionContext at this point, since we might
// already be running on a different thread!
- executor.execute(this)
+ try executor.execute(this) catch { case NonFatal(t) => executor reportFailure t }
}
}
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index d3f8df9110..84f6f0be9c 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -95,10 +95,7 @@ package object scala {
val Equiv = scala.math.Equiv
type Fractional[T] = scala.math.Fractional[T]
- val Fractional = scala.math.Fractional
-
type Integral[T] = scala.math.Integral[T]
- val Integral = scala.math.Integral
type Numeric[T] = scala.math.Numeric[T]
val Numeric = scala.math.Numeric
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
index 78817cfb67..89832d3fb2 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
@@ -21,11 +21,12 @@ import scala.annotation.migration
* - `floatingPointNumber`
*/
trait JavaTokenParsers extends RegexParsers {
- /** Anything starting with an ASCII alphabetic character or underscore,
- * followed by zero or more repetitions of regex's `\w`.
+ /** Anything that is a valid Java identifier, according to
+ * <a href="http://docs.oracle.com/javase/specs/jls/se7/html/jls-3.html#jls-3.8">The Java Language Spec</a>.
+ * Generally, this means a letter, followed by zero or more letters or numbers.
*/
def ident: Parser[String] =
- """[a-zA-Z_]\w*""".r
+ """\p{javaJavaIdentifierStart}\p{javaJavaIdentifierPart}*""".r
/** An integer, without sign or with a negative sign. */
def wholeNumber: Parser[String] =
"""-?\d+""".r
diff --git a/src/partest/scala/tools/partest/ASMConverters.scala b/src/partest/scala/tools/partest/ASMConverters.scala
new file mode 100644
index 0000000000..d618e086f4
--- /dev/null
+++ b/src/partest/scala/tools/partest/ASMConverters.scala
@@ -0,0 +1,71 @@
+package scala.tools.partest
+
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm.tree.{ClassNode, MethodNode, InsnList}
+
+/** Makes using ASM from ByteCodeTests more convenient.
+ *
+ * Wraps ASM instructions in case classes so that equals and toString work
+ * for the purpose of bytecode diffing and pretty printing.
+ */
+trait ASMConverters {
+ // wrap ASM's instructions so we get case class-style `equals` and `toString`
+ object instructions {
+ def fromMethod(meth: MethodNode): List[Instruction] = {
+ val insns = meth.instructions
+ val asmToScala = new AsmToScala{ def labelIndex(l: asm.tree.AbstractInsnNode) = insns.indexOf(l) }
+
+ asmToScala.mapOver(insns.iterator.asScala.toList).asInstanceOf[List[Instruction]]
+ }
+
+ sealed abstract class Instruction { def opcode: String }
+ case class Field (opcode: String, desc: String, name: String, owner: String) extends Instruction
+ case class Incr (opcode: String, incr: Int, `var`: Int) extends Instruction
+ case class Op (opcode: String) extends Instruction
+ case class IntOp (opcode: String, operand: Int) extends Instruction
+ case class Jump (opcode: String, label: Label) extends Instruction
+ case class Ldc (opcode: String, cst: Any) extends Instruction
+ case class LookupSwitch (opcode: String, dflt: Label, keys: List[Integer], labels: List[Label]) extends Instruction
+ case class TableSwitch (opcode: String, dflt: Label, max: Int, min: Int, labels: List[Label]) extends Instruction
+ case class Method (opcode: String, desc: String, name: String, owner: String) extends Instruction
+ case class NewArray (opcode: String, desc: String, dims: Int) extends Instruction
+ case class TypeOp (opcode: String, desc: String) extends Instruction
+ case class VarOp (opcode: String, `var`: Int) extends Instruction
+ case class Label (offset: Int) extends Instruction { def opcode: String = "" }
+ case class FrameEntry (local: List[Any], stack: List[Any]) extends Instruction { def opcode: String = "" }
+ case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: String = "" }
+ }
+
+ abstract class AsmToScala {
+ import instructions._
+
+ def labelIndex(l: asm.tree.AbstractInsnNode): Int
+
+ def mapOver(is: List[Any]): List[Any] = is map {
+ case i: asm.tree.AbstractInsnNode => apply(i)
+ case x => x
+ }
+
+ def op(i: asm.tree.AbstractInsnNode) = if (asm.util.Printer.OPCODES.isDefinedAt(i.getOpcode)) asm.util.Printer.OPCODES(i.getOpcode) else "?"
+ def lst[T](xs: java.util.List[T]): List[T] = if (xs == null) Nil else xs.asScala.toList
+ def apply(l: asm.tree.LabelNode): Label = this(l: asm.tree.AbstractInsnNode).asInstanceOf[Label]
+ def apply(x: asm.tree.AbstractInsnNode): Instruction = x match {
+ case i: asm.tree.FieldInsnNode => Field (op(i), i.desc: String, i.name: String, i.owner: String)
+ case i: asm.tree.IincInsnNode => Incr (op(i), i.incr: Int, i.`var`: Int)
+ case i: asm.tree.InsnNode => Op (op(i))
+ case i: asm.tree.IntInsnNode => IntOp (op(i), i.operand: Int)
+ case i: asm.tree.JumpInsnNode => Jump (op(i), this(i.label))
+ case i: asm.tree.LdcInsnNode => Ldc (op(i), i.cst: Any)
+ case i: asm.tree.LookupSwitchInsnNode => LookupSwitch (op(i), this(i.dflt), lst(i.keys), mapOver(lst(i.labels)).asInstanceOf[List[Label]])
+ case i: asm.tree.TableSwitchInsnNode => TableSwitch (op(i), this(i.dflt), i.max: Int, i.min: Int, mapOver(lst(i.labels)).asInstanceOf[List[Label]])
+ case i: asm.tree.MethodInsnNode => Method (op(i), i.desc: String, i.name: String, i.owner: String)
+ case i: asm.tree.MultiANewArrayInsnNode => NewArray (op(i), i.desc: String, i.dims: Int)
+ case i: asm.tree.TypeInsnNode => TypeOp (op(i), i.desc: String)
+ case i: asm.tree.VarInsnNode => VarOp (op(i), i.`var`: Int)
+ case i: asm.tree.LabelNode => Label (labelIndex(x))
+ case i: asm.tree.FrameNode => FrameEntry (mapOver(lst(i.local)), mapOver(lst(i.stack)))
+ case i: asm.tree.LineNumberNode => LineNumber (i.line: Int, this(i.start): Label)
+ }
+ }
+} \ No newline at end of file
diff --git a/src/partest/scala/tools/partest/BytecodeTest.scala b/src/partest/scala/tools/partest/BytecodeTest.scala
new file mode 100644
index 0000000000..41329a8264
--- /dev/null
+++ b/src/partest/scala/tools/partest/BytecodeTest.scala
@@ -0,0 +1,102 @@
+package scala.tools.partest
+
+import scala.tools.nsc.util.JavaClassPath
+import scala.collection.JavaConverters._
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, MethodNode, InsnList}
+import java.io.InputStream
+
+/**
+ * Provides utilities for inspecting bytecode using ASM library.
+ *
+ * HOW TO USE
+ * 1. Create subdirectory in test/files/jvm for your test. Let's name it $TESTDIR.
+ * 2. Create $TESTDIR/BytecodeSrc_1.scala that contains Scala source file that you
+ * want to inspect the bytecode for. The '_1' suffix signals to partest that it
+ * should compile this file first.
+ * 3. Create $TESTDIR/Test.scala:
+ * import scala.tools.partest.BytecodeTest
+ * object Test extends BytecodeTest {
+ * def show {
+ * // your code that inspect ASM trees and prints values
+ * }
+ * }
+ * 4. Create corresponding check file.
+ *
+ * EXAMPLE
+ * See test/files/jvm/bytecode-test-example for an example of bytecode test.
+ *
+ */
+abstract class BytecodeTest extends ASMConverters {
+
+ /** produce the output to be compared against a checkfile */
+ protected def show(): Unit
+
+ def main(args: Array[String]): Unit = show
+
+// asserts
+ def sameBytecode(methA: MethodNode, methB: MethodNode) = {
+ val isa = instructions.fromMethod(methA)
+ val isb = instructions.fromMethod(methB)
+ if (isa == isb) println("bytecode identical")
+ else diffInstructions(isa, isb)
+ }
+
+ import instructions._
+ // bytecode is equal modulo local variable numbering
+ def equalsModuloVar(a: Instruction, b: Instruction) = (a, b) match {
+ case _ if a == b => true
+ case (VarOp(op1, _), VarOp(op2, _)) if op1 == op2 => true
+ case _ => false
+ }
+
+ def similarBytecode(methA: MethodNode, methB: MethodNode, similar: (Instruction, Instruction) => Boolean) = {
+ val isa = fromMethod(methA)
+ val isb = fromMethod(methB)
+ if (isa == isb) println("bytecode identical")
+ else if ((isa, isb).zipped.forall { case (a, b) => similar(a, b) }) println("bytecode similar")
+ else diffInstructions(isa, isb)
+ }
+
+ def diffInstructions(isa: List[Instruction], isb: List[Instruction]) = {
+ val len = Math.max(isa.length, isb.length)
+ if (len > 0 ) {
+ val width = isa.map(_.toString.length).max
+ val lineWidth = len.toString.length
+ (1 to len) foreach { line =>
+ val isaPadded = isa.map(_.toString) orElse Stream.continually("")
+ val isbPadded = isb.map(_.toString) orElse Stream.continually("")
+ val a = isaPadded(line-1)
+ val b = isbPadded(line-1)
+
+ println(s"""$line${" " * (lineWidth-line.toString.length)} ${if (a==b) "==" else "<>"} $a${" " * (width-a.length)} | $b""")
+ }
+ }
+ }
+
+// loading
+ protected def getMethod(classNode: ClassNode, name: String): MethodNode =
+ classNode.methods.asScala.find(_.name == name) getOrElse
+ sys.error(s"Didn't find method '$name' in class '${classNode.name}'")
+
+ protected def loadClassNode(name: String, skipDebugInfo: Boolean = true): ClassNode = {
+ val classBytes: InputStream = (for {
+ classRep <- classpath.findClass(name)
+ binary <- classRep.binary
+ } yield binary.input) getOrElse sys.error(s"failed to load class '$name'; classpath = $classpath")
+
+ val cr = new ClassReader(classBytes)
+ val cn = new ClassNode()
+ cr.accept(cn, if (skipDebugInfo) ClassReader.SKIP_DEBUG else 0)
+ cn
+ }
+
+ protected lazy val classpath: JavaClassPath = {
+ import scala.tools.nsc.util.ClassPath.DefaultJavaContext
+ import scala.tools.util.PathResolver.Defaults
+ // logic inspired by scala.tools.util.PathResolver implementation
+ val containers = DefaultJavaContext.classesInExpandedPath(Defaults.javaUserClassPath)
+ new JavaClassPath(containers, DefaultJavaContext)
+ }
+}
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index d9f2bfe765..0199400ada 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -355,7 +355,7 @@ class PartestTask extends Task with CompilationPathProperty {
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
- scalacArgsFlat foreach (antFileManager.SCALAC_OPTS = _)
+ scalacArgsFlat foreach (antFileManager.SCALAC_OPTS ++= _)
timeout foreach (antFileManager.timeout = _)
type TFSet = (Array[File], String, String)
diff --git a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
index 494a5a99be..878c8613d5 100644
--- a/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
+++ b/src/partest/scala/tools/partest/javaagent/ASMTransformer.java
@@ -26,9 +26,18 @@ public class ASMTransformer implements ClassFileTransformer {
className.startsWith("instrumented/"));
}
- public byte[] transform(ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
+ public byte[] transform(final ClassLoader classLoader, final String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) {
if (shouldTransform(className)) {
- ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS);
+ ClassWriter writer = new ClassWriter(ClassWriter.COMPUTE_MAXS) {
+ @Override protected String getCommonSuperClass(final String type1, final String type2) {
+ // Since we are not recomputing stack frame map, this should never be called we override this method because
+ // default implementation uses reflection for implementation and might try to load the class that we are
+ // currently processing. That leads to weird results like swallowed exceptions and classes being not
+ // transformed.
+ throw new RuntimeException("Unexpected call to getCommonSuperClass(" + type1 + ", " + type2 +
+ ") while transforming " + className);
+ }
+ };
ProfilerVisitor visitor = new ProfilerVisitor(writer);
ClassReader reader = new ClassReader(classfileBuffer);
reader.accept(visitor, 0);
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
index ac83f66506..8306327b14 100644
--- a/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
+++ b/src/partest/scala/tools/partest/javaagent/ProfilerVisitor.java
@@ -33,6 +33,19 @@ public class ProfilerVisitor extends ClassVisitor implements Opcodes {
// only instrument non-abstract methods
if((access & ACC_ABSTRACT) == 0) {
assert(className != null);
+ /* The following instructions do not modify compressed stack frame map so
+ * we don't need to worry about recalculating stack frame map. Specifically,
+ * let's quote "ASM 4.0, A Java bytecode engineering library" guide (p. 40):
+ *
+ * In order to save space, a compiled method does not contain one frame per
+ * instruction: in fact it contains only the frames for the instructions
+ * that correspond to jump targets or exception handlers, or that follow
+ * unconditional jump instructions. Indeed the other frames can be easily
+ * and quickly inferred from these ones.
+ *
+ * Instructions below are just loading constants and calling a method so according
+ * to definition above they do not contribute to compressed stack frame map.
+ */
mv.visitLdcInsn(className);
mv.visitLdcInsn(name);
mv.visitLdcInsn(desc);
diff --git a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
index c2e4dc69f4..3b18987040 100644
--- a/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
+++ b/src/partest/scala/tools/partest/javaagent/ProfilingAgent.java
@@ -20,6 +20,6 @@ public class ProfilingAgent {
// and the test-case itself won't be loaded yet. We rely here on the fact that ASMTransformer does
// not depend on Scala library. In case our assumptions are wrong we can always insert call to
// inst.retransformClasses.
- inst.addTransformer(new ASMTransformer(), true);
+ inst.addTransformer(new ASMTransformer(), false);
}
}
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index b0ce6579ac..20f9c701d5 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -46,7 +46,7 @@ object SBTRunner extends DirectRunner {
case x => sys.error("Unknown command line options: " + x)
}
val config = parseArgs(args, CommandLineOptions())
- fileManager.SCALAC_OPTS = config.scalacOptions
+ fileManager.SCALAC_OPTS ++= config.scalacOptions
fileManager.CLASSPATH = config.classpath getOrElse sys.error("No classpath set")
def findClasspath(jar: String, name: String): Option[String] = {
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 8f256aa1f5..0c8e81a220 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -59,6 +59,8 @@ private[reflect] trait BuildUtils { self: Universe =>
def flagsFromBits(bits: Long): FlagSet
+ def emptyValDef: ValDef
+
def This(sym: Symbol): Tree
def Select(qualifier: Tree, sym: Symbol): Select
diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala
index 562b1da8e3..2ba18a8207 100644
--- a/src/reflect/scala/reflect/api/Exprs.scala
+++ b/src/reflect/scala/reflect/api/Exprs.scala
@@ -90,6 +90,7 @@ trait Exprs { self: Universe =>
* }}}
* because expr of type Expr[T] itself does not have a method foo.
*/
+ // @compileTimeOnly("Cannot use splice outside reify")
def splice: T
/**
@@ -106,6 +107,7 @@ trait Exprs { self: Universe =>
* object Impls { def foo_impl(c: Context)(x: c.Expr[X]): c.Expr[x.value.T] = ... }
* }}}
*/
+ // @compileTimeOnly("Cannot use value except for signatures of macro implementations")
val value: T
override def canEqual(x: Any) = x.isInstanceOf[Expr[_]]
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index cfa6315797..0937a93738 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -75,26 +75,11 @@ trait Trees { self: Universe =>
def isDef: Boolean
/** Is this tree one of the empty trees?
- *
* Empty trees are: the `EmptyTree` null object, `TypeTree` instances that don't carry a type
* and the special `emptyValDef` singleton.
- *
- * In the compiler the `isEmpty` check and the derived `orElse` method are mostly used
- * as a check for a tree being a null object (`EmptyTree` for term trees and empty TypeTree for type trees).
- *
- * Unfortunately `emptyValDef` is also considered to be `isEmpty`, but this is deemed to be
- * a conceptual mistake pending a fix in https://issues.scala-lang.org/browse/SI-6762.
- *
- * @see `canHaveAttrs`
*/
def isEmpty: Boolean
- /** Can this tree carry attributes (i.e. symbols, types or positions)?
- * Typically the answer is yes, except for the `EmptyTree` null object and
- * two special singletons: `emptyValDef` and `pendingSuperCall`.
- */
- def canHaveAttrs: Boolean
-
/** The canonical way to test if a Tree represents a term.
*/
def isTerm: Boolean
@@ -2420,15 +2405,6 @@ trait Trees { self: Universe =>
*/
val emptyValDef: ValDef
- /** An empty superclass constructor call corresponding to:
- * super.<init>()
- * This is used as a placeholder in the primary constructor body in class templates
- * to denote the insertion point of a call to superclass constructor after the typechecker
- * figures out the superclass of a given template.
- * @group Trees
- */
- val pendingSuperCall: Apply
-
// ---------------------- factories ----------------------------------------------
/** A factory method for `ClassDef` nodes.
@@ -2931,8 +2907,7 @@ trait Trees { self: Universe =>
trees mapConserve (tree => transform(tree).asInstanceOf[TypeDef])
/** Transforms a `ValDef`. */
def transformValDef(tree: ValDef): ValDef =
- if (tree eq emptyValDef) tree
- else transform(tree).asInstanceOf[ValDef]
+ if (tree.isEmpty) tree else transform(tree).asInstanceOf[ValDef]
/** Transforms a list of `ValDef` nodes. */
def transformValDefs(trees: List[ValDef]): List[ValDef] =
trees mapConserve (transformValDef(_))
diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
index 5318d3e540..1ab975b233 100644
--- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala
@@ -16,7 +16,15 @@ trait AnnotationCheckers {
/** An additional checker for annotations on types.
* Typically these are registered by compiler plugins
* with the addAnnotationChecker method. */
- abstract class AnnotationChecker {
+ trait AnnotationChecker {
+
+ /**
+ * Selectively activate this annotation checker. When using both an annotation checker
+ * and an analyzer plugin, it is common to run both of them only during selected
+ * compiler phases. See documentation in AnalyzerPlugin.isActive.
+ */
+ def isActive(): Boolean = true
+
/** Check the annotations on two types conform. */
def annotationsConform(tpe1: Type, tpe2: Type): Boolean
@@ -29,39 +37,51 @@ trait AnnotationCheckers {
def annotationsGlb(tp: Type, ts: List[Type]): Type = tp
/** Refine the bounds on type parameters to the given type arguments. */
- def adaptBoundsToAnnotations(bounds: List[TypeBounds],
- tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = bounds
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol],
+ targs: List[Type]): List[TypeBounds] = bounds
- /** Modify the type that has thus far been inferred
- * for a tree. All this should do is add annotations. */
+ /**
+ * Modify the type that has thus far been inferred for a tree. All this should
+ * do is add annotations.
+ */
+ @deprecated("Create an AnalyzerPlugin and use pluginsTyped", "2.10.1")
def addAnnotations(tree: Tree, tpe: Type): Type = tpe
- /** Decide whether this annotation checker can adapt a tree
- * that has an annotated type to the given type tp, taking
- * into account the given mode (see method adapt in trait Typers).*/
+ /**
+ * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the
+ * given type tp, taking into account the given mode (see method adapt in trait Typers).
+ */
+ @deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1")
def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = false
- /** Adapt a tree that has an annotated type to the given type tp,
- * taking into account the given mode (see method adapt in trait Typers).
- * An implementation cannot rely on canAdaptAnnotations being called
- * before. If the implementing class cannot do the adaptiong, it
- * should return the tree unchanged.*/
+ /**
+ * Adapt a tree that has an annotated type to the given type tp, taking into account the given
+ * mode (see method adapt in trait Typers).
+ *
+ * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing
+ * class cannot do the adaptiong, it should return the tree unchanged.
+ */
+ @deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1")
def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree
- /** Adapt the type of a return expression. The decision of an annotation checker
- * whether the type should be adapted is based on the type of the expression
- * which is returned, as well as the result type of the method (pt).
- * By default, this method simply returns the passed `default` type.
+ /**
+ * Adapt the type of a return expression. The decision of a typer plugin whether the type
+ * should be adapted is based on the type of the expression which is returned, as well as the
+ * result type of the method (pt).
+ *
+ * By default, this method simply returns the passed `default` type.
*/
+ @deprecated("Create an AnalyzerPlugin and use pluginsTypedReturn. Note: the 'tree' argument here is\n"+
+ "the 'expr' of a Return tree; 'pluginsTypedReturn' takes the Return tree itself as argument", "2.10.1")
def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = default
}
// Syncnote: Annotation checkers inaccessible to reflection, so no sync in var necessary.
+
/** The list of annotation checkers that have been registered */
private var annotationCheckers: List[AnnotationChecker] = Nil
- /** Register an annotation checker. Typically these
- * are added by compiler plugins. */
+ /** Register an annotation checker. Typically these are added by compiler plugins. */
def addAnnotationChecker(checker: AnnotationChecker) {
if (!(annotationCheckers contains checker))
annotationCheckers = checker :: annotationCheckers
@@ -72,76 +92,53 @@ trait AnnotationCheckers {
annotationCheckers = Nil
}
- /** Check that the annotations on two types conform. To do
- * so, consult all registered annotation checkers. */
- def annotationsConform(tp1: Type, tp2: Type): Boolean = {
- /* Finish quickly if there are no annotations */
- if (tp1.annotations.isEmpty && tp2.annotations.isEmpty)
- true
- else
- annotationCheckers.forall(
- _.annotationsConform(tp1,tp2))
- }
-
- /** Refine the computed least upper bound of a list of types.
- * All this should do is add annotations. */
- def annotationsLub(tpe: Type, ts: List[Type]): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.annotationsLub(tpe, ts))
- }
-
- /** Refine the computed greatest lower bound of a list of types.
- * All this should do is add annotations. */
- def annotationsGlb(tpe: Type, ts: List[Type]): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.annotationsGlb(tpe, ts))
- }
-
- /** Refine the bounds on type parameters to the given type arguments. */
- def adaptBoundsToAnnotations(bounds: List[TypeBounds],
- tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
- annotationCheckers.foldLeft(bounds)((bounds, checker) =>
- checker.adaptBoundsToAnnotations(bounds, tparams, targs))
- }
-
- /** Let all annotations checkers add extra annotations
- * to this tree's type. */
- def addAnnotations(tree: Tree, tpe: Type): Type = {
- annotationCheckers.foldLeft(tpe)((tpe, checker) =>
- checker.addAnnotations(tree, tpe))
- }
-
- /** Find out whether any annotation checker can adapt a tree
- * to a given type. Called by Typers.adapt. */
- def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean = {
- annotationCheckers.exists(_.canAdaptAnnotations(tree, mode, pt))
- }
-
- /** Let registered annotation checkers adapt a tree
- * to a given type (called by Typers.adapt). Annotation checkers
- * that cannot do the adaption should pass the tree through
- * unchanged. */
- def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = {
- annotationCheckers.foldLeft(tree)((tree, checker) =>
- checker.adaptAnnotations(tree, mode, pt))
- }
-
- /** Let a registered annotation checker adapt the type of a return expression.
- * Annotation checkers that cannot do the adaptation should simply return
- * the `default` argument.
- *
- * Note that the result is undefined if more than one annotation checker
- * returns an adapted type which is not a subtype of `default`.
- */
- def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = {
- val adaptedTypes = annotationCheckers flatMap { checker =>
- val adapted = checker.adaptTypeOfReturn(tree, pt, default)
- if (!(adapted <:< default)) List(adapted)
- else List()
- }
- adaptedTypes match {
- case fst :: _ => fst
- case List() => default
- }
- }
+ /** @see AnnotationChecker.annotationsConform */
+ def annotationsConform(tp1: Type, tp2: Type): Boolean =
+ if (annotationCheckers.isEmpty || (tp1.annotations.isEmpty && tp2.annotations.isEmpty)) true
+ else annotationCheckers.forall(checker => {
+ !checker.isActive() || checker.annotationsConform(tp1,tp2)
+ })
+
+ /** @see AnnotationChecker.annotationsLub */
+ def annotationsLub(tpe: Type, ts: List[Type]): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.annotationsLub(tpe, ts))
+
+ /** @see AnnotationChecker.annotationsGlb */
+ def annotationsGlb(tpe: Type, ts: List[Type]): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.annotationsGlb(tpe, ts))
+
+ /** @see AnnotationChecker.adaptBoundsToAnnotations */
+ def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol],
+ targs: List[Type]): List[TypeBounds] =
+ if (annotationCheckers.isEmpty) bounds
+ else annotationCheckers.foldLeft(bounds)((bounds, checker) =>
+ if (!checker.isActive()) bounds else checker.adaptBoundsToAnnotations(bounds, tparams, targs))
+
+
+ /* The following methods will be removed with the deprecated methods is AnnotationChecker. */
+
+ def addAnnotations(tree: Tree, tpe: Type): Type =
+ if (annotationCheckers.isEmpty) tpe
+ else annotationCheckers.foldLeft(tpe)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.addAnnotations(tree, tpe))
+
+ def canAdaptAnnotations(tree: Tree, mode: Int, pt: Type): Boolean =
+ if (annotationCheckers.isEmpty) false
+ else annotationCheckers.exists(checker => {
+ checker.isActive() && checker.canAdaptAnnotations(tree, mode, pt)
+ })
+
+ def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree =
+ if (annotationCheckers.isEmpty) tree
+ else annotationCheckers.foldLeft(tree)((tree, checker) =>
+ if (!checker.isActive()) tree else checker.adaptAnnotations(tree, mode, pt))
+
+ def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type =
+ if (annotationCheckers.isEmpty) default
+ else annotationCheckers.foldLeft(default)((tpe, checker) =>
+ if (!checker.isActive()) tpe else checker.adaptTypeOfReturn(tree, pt, tpe))
}
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 6a5a742cc7..032b45316e 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -33,6 +33,17 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
case ThrownException(exc) => exc
}
+ def addThrowsAnnotation(throwableSym: Symbol): Self = {
+ val throwableTpe = if (throwableSym.isMonomorphicType) throwableSym.tpe else {
+ debuglog(s"Encountered polymorphic exception `${throwableSym.fullName}` while parsing class file.")
+ // in case we encounter polymorphic exception the best we can do is to convert that type to
+ // monomorphic one by introducing existentials, see SI-7009 for details
+ existentialAbstraction(throwableSym.typeParams, throwableSym.tpe)
+ }
+ val throwsAnn = AnnotationInfo(appliedType(definitions.ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil)
+ withAnnotations(List(throwsAnn))
+ }
+
/** Tests for, get, or remove an annotation */
def hasAnnotation(cls: Symbol): Boolean =
//OPT inlined from exists to save on #closures; was: annotations exists (_ matches cls)
@@ -330,14 +341,14 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
implicit val AnnotationTag = ClassTag[AnnotationInfo](classOf[AnnotationInfo])
object UnmappableAnnotation extends CompleteAnnotationInfo(NoType, Nil, Nil)
-
+
/** Extracts symbol of thrown exception from AnnotationInfo.
- *
+ *
* Supports both “old-style” `@throws(classOf[Exception])`
* as well as “new-stye” `@throws[Exception]("cause")` annotations.
*/
object ThrownException {
- def unapply(ann: AnnotationInfo): Option[Symbol] =
+ def unapply(ann: AnnotationInfo): Option[Symbol] =
ann match {
case AnnotationInfo(tpe, _, _) if tpe.typeSymbol != ThrowsClass =>
None
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index b1b0c5b60b..9f41f0336e 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -47,6 +47,8 @@ trait BuildUtils { self: SymbolTable =>
def flagsFromBits(bits: Long): FlagSet = bits
+ def emptyValDef: ValDef = self.emptyValDef
+
def This(sym: Symbol): Tree = self.This(sym)
def Select(qualifier: Tree, sym: Symbol): Select = self.Select(qualifier, sym)
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 2a7b55cb5a..6e4ca76382 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -337,12 +337,13 @@ trait Definitions extends api.StandardDefinitions {
lazy val PredefModule = requiredModule[scala.Predef.type]
lazy val PredefModuleClass = PredefModule.moduleClass
- def Predef_classOf = getMemberMethod(PredefModule, nme.classOf)
- def Predef_identity = getMemberMethod(PredefModule, nme.identity)
- def Predef_conforms = getMemberMethod(PredefModule, nme.conforms)
- def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
- def Predef_??? = getMemberMethod(PredefModule, nme.???)
- def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
+ def Predef_classOf = getMemberMethod(PredefModule, nme.classOf)
+ def Predef_identity = getMemberMethod(PredefModule, nme.identity)
+ def Predef_conforms = getMemberMethod(PredefModule, nme.conforms)
+ def Predef_wrapRefArray = getMemberMethod(PredefModule, nme.wrapRefArray)
+ def Predef_wrapArray(tp: Type) = getMemberMethod(PredefModule, wrapArrayMethodName(tp))
+ def Predef_??? = getMemberMethod(PredefModule, nme.???)
+ def Predef_implicitly = getMemberMethod(PredefModule, nme.implicitly)
/** Is `sym` a member of Predef with the given name?
* Note: DON't replace this by sym == Predef_conforms/etc, as Predef_conforms is a `def`
@@ -466,6 +467,8 @@ trait Definitions extends api.StandardDefinitions {
// arrays and their members
lazy val ArrayModule = requiredModule[scala.Array.type]
lazy val ArrayModule_overloadedApply = getMemberMethod(ArrayModule, nme.apply)
+ def ArrayModule_genericApply = ArrayModule_overloadedApply.suchThat(_.paramss.flatten.last.tpe.typeSymbol == ClassTagClass) // [T: ClassTag](xs: T*): Array[T]
+ def ArrayModule_apply(tp: Type) = ArrayModule_overloadedApply.suchThat(_.tpe.resultType =:= arrayType(tp)) // (p1: AnyVal1, ps: AnyVal1*): Array[AnyVal1]
lazy val ArrayClass = getRequiredClass("scala.Array") // requiredClass[scala.Array[_]]
lazy val Array_apply = getMemberMethod(ArrayClass, nme.apply)
lazy val Array_update = getMemberMethod(ArrayClass, nme.update)
@@ -536,10 +539,12 @@ trait Definitions extends api.StandardDefinitions {
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
// Option classes
- lazy val OptionClass: ClassSymbol = requiredClass[Option[_]]
- lazy val SomeClass: ClassSymbol = requiredClass[Some[_]]
- lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type]
- lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type]
+ lazy val OptionClass: ClassSymbol = requiredClass[Option[_]]
+ lazy val OptionModule: ModuleSymbol = requiredModule[scala.Option.type]
+ lazy val Option_apply = getMemberMethod(OptionModule, nme.apply)
+ lazy val SomeClass: ClassSymbol = requiredClass[Some[_]]
+ lazy val NoneModule: ModuleSymbol = requiredModule[scala.None.type]
+ lazy val SomeModule: ModuleSymbol = requiredModule[scala.Some.type]
def compilerTypeFromTag(tt: ApiUniverse # WeakTypeTag[_]): Type = tt.in(rootMirror).tpe
def compilerSymbolFromTag(tt: ApiUniverse # WeakTypeTag[_]): Symbol = tt.in(rootMirror).tpe.typeSymbol
@@ -666,6 +671,11 @@ trait Definitions extends api.StandardDefinitions {
case _ => Nil
}
+ def dropNullaryMethod(tp: Type) = tp match {
+ case NullaryMethodType(restpe) => restpe
+ case _ => tp
+ }
+
def unapplyUnwrap(tpe:Type) = tpe.finalResultType.normalize match {
case RefinedType(p :: _, _) => p.normalize
case tp => tp
@@ -673,9 +683,10 @@ trait Definitions extends api.StandardDefinitions {
def functionApply(n: Int) = getMemberMethod(FunctionClass(n), nme.apply)
- def abstractFunctionForFunctionType(tp: Type) =
- if (isFunctionType(tp)) abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
- else NoType
+ def abstractFunctionForFunctionType(tp: Type) = {
+ assert(isFunctionType(tp), tp)
+ abstractFunctionType(tp.typeArgs.init, tp.typeArgs.last)
+ }
def isFunctionType(tp: Type): Boolean = tp.normalize match {
case TypeRef(_, sym, args) if args.nonEmpty =>
@@ -859,6 +870,12 @@ trait Definitions extends api.StandardDefinitions {
removeRedundantObjects(parents)
}
+ /** Flatten curried parameter lists of a method type. */
+ def allParameters(tpe: Type): List[Symbol] = tpe match {
+ case MethodType(params, res) => params ::: allParameters(res)
+ case _ => Nil
+ }
+
def typeStringNoPackage(tp: Type) =
"" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "."
@@ -944,7 +961,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val BeanPropertyAttr = requiredClass[scala.beans.BeanProperty]
lazy val BooleanBeanPropertyAttr = requiredClass[scala.beans.BooleanBeanProperty]
lazy val CloneableAttr = requiredClass[scala.annotation.cloneable]
- lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.macros.compileTimeOnly")
+ lazy val CompileTimeOnlyAttr = getClassIfDefined("scala.reflect.internal.annotations.compileTimeOnly")
lazy val DeprecatedAttr = requiredClass[scala.deprecated]
lazy val DeprecatedNameAttr = requiredClass[scala.deprecatedName]
lazy val DeprecatedInheritanceAttr = requiredClass[scala.deprecatedInheritance]
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 59c027868e..8b24678fd6 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -32,19 +32,4 @@ trait ExistentialsAndSkolems {
}
(new Deskolemizer).typeSkolems
}
-
- /** Convert to corresponding type parameters all skolems of method
- * parameters which appear in `tparams`.
- */
- def deskolemizeTypeParams(tparams: List[Symbol])(tp: Type): Type = {
- class DeSkolemizeMap extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) if sym.isTypeSkolem && (tparams contains sym.deSkolemize) =>
- mapOver(typeRef(NoPrefix, sym.deSkolemize, args))
- case _ =>
- mapOver(tp)
- }
- }
- new DeSkolemizeMap mapOver tp
- }
}
diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala
index 2f2b02975c..43902c1930 100644
--- a/src/reflect/scala/reflect/internal/Importers.scala
+++ b/src/reflect/scala/reflect/internal/Importers.scala
@@ -334,8 +334,6 @@ trait Importers extends api.Importers { self: SymbolTable =>
new ModuleDef(importModifiers(mods), importName(name).toTermName, importTemplate(impl))
case from.emptyValDef =>
emptyValDef
- case from.pendingSuperCall =>
- pendingSuperCall
case from.ValDef(mods, name, tpt, rhs) =>
new ValDef(importModifiers(mods), importName(name).toTermName, importTree(tpt), importTree(rhs))
case from.DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala
index f8c670827a..faa161d6b1 100644
--- a/src/reflect/scala/reflect/internal/Positions.scala
+++ b/src/reflect/scala/reflect/internal/Positions.scala
@@ -38,7 +38,7 @@ trait Positions extends api.Positions { self: SymbolTable =>
protected class DefaultPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
- if (!t.canHaveAttrs) ()
+ if (t eq EmptyTree) ()
else if (t.pos == NoPosition) {
t.setPos(pos)
super.traverse(t) // TODO: bug? shouldn't the traverse be outside of the if?
diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala
index a8085a4c58..80d247c0ea 100644
--- a/src/reflect/scala/reflect/internal/Printers.scala
+++ b/src/reflect/scala/reflect/internal/Printers.scala
@@ -435,7 +435,7 @@ trait Printers extends api.Printers { self: SymbolTable =>
case tree =>
xprintTree(this, tree)
}
- if (printTypes && tree.isTerm && tree.canHaveAttrs) {
+ if (printTypes && tree.isTerm && !tree.isEmpty) {
print("{", if (tree.tpe eq null) "<null>" else tree.tpe.toString, "}")
}
}
@@ -542,10 +542,8 @@ trait Printers extends api.Printers { self: SymbolTable =>
print(")")
case EmptyTree =>
print("EmptyTree")
- case self.emptyValDef =>
+ case emptyValDef: AnyRef if emptyValDef eq self.emptyValDef =>
print("emptyValDef")
- case self.pendingSuperCall =>
- print("pendingSuperCall")
case tree: Tree =>
val hasSymbol = tree.hasSymbol && tree.symbol != NoSymbol
val isError = hasSymbol && tree.symbol.name.toString == nme.ERROR.toString
diff --git a/src/reflect/scala/reflect/internal/PrivateWithin.scala b/src/reflect/scala/reflect/internal/PrivateWithin.scala
new file mode 100644
index 0000000000..9b99b94b41
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/PrivateWithin.scala
@@ -0,0 +1,23 @@
+package scala.reflect
+package internal
+
+import ClassfileConstants._
+
+trait PrivateWithin {
+ self: SymbolTable =>
+
+ def importPrivateWithinFromJavaFlags(sym: Symbol, jflags: Int): Symbol = {
+ if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0)
+ // See ticket #1687 for an example of when topLevelClass is NoSymbol: it
+ // apparently occurs when processing v45.3 bytecode.
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
+
+ // protected in java means package protected. #3946
+ if ((jflags & JAVA_ACC_PROTECTED) != 0)
+ if (sym.enclosingTopLevelClass != NoSymbol)
+ sym.privateWithin = sym.enclosingTopLevelClass.owner
+
+ sym
+ }
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index c870d8972d..ddc5d94e70 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -297,6 +297,7 @@ trait StdNames {
// Compiler internal names
val ANYname: NameType = "<anyname>"
val CONSTRUCTOR: NameType = "<init>"
+ val DEFAULT_CASE: NameType = "defaultCase$"
val EQEQ_LOCAL_VAR: NameType = "eqEqTemp$"
val FAKE_LOCAL_THIS: NameType = "this$"
val INITIALIZER: NameType = CONSTRUCTOR // Is this buying us something?
@@ -730,7 +731,6 @@ trait StdNames {
val null_ : NameType = "null"
val ofDim: NameType = "ofDim"
val origin: NameType = "origin"
- val pendingSuperCall: NameType = "pendingSuperCall"
val prefix : NameType = "prefix"
val productArity: NameType = "productArity"
val productElement: NameType = "productElement"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index 02ac59a461..f75855f1ec 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -38,6 +38,7 @@ abstract class SymbolTable extends macros.Universe
with StdAttachments
with StdCreators
with BuildUtils
+ with PrivateWithin
{
val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index 923dac7498..d9eb48ff2d 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -86,7 +86,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case n: TypeName => if (isClass) newClassSymbol(n, pos, newFlags) else newNonClassSymbol(n, pos, newFlags)
}
- def knownDirectSubclasses = children
+ def knownDirectSubclasses = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ children
+ }
+
def baseClasses = info.baseClasses
def module = sourceModule
def thisPrefix: Type = thisType
@@ -1188,6 +1192,10 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* to generate a type of kind *
* for a term symbol, its usual type.
* See the tpe/tpeHK overrides in TypeSymbol for more.
+ *
+ * For type symbols, `tpe` is different than `info`. `tpe` returns a typeRef
+ * to the type symbol, `info` returns the type information of the type symbol,
+ * e.g. a ClassInfoType for classes or a TypeBounds for abstract types.
*/
def tpe: Type = info
def tpeHK: Type = tpe
@@ -1583,8 +1591,21 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
setAnnotations(annot :: annotations)
// Convenience for the overwhelmingly common case
- def addAnnotation(sym: Symbol, args: Tree*): this.type =
+ def addAnnotation(sym: Symbol, args: Tree*): this.type = {
+ // The assertion below is meant to prevent from issues like SI-7009 but it's disabled
+ // due to problems with cycles while compiling Scala library. It's rather shocking that
+ // just checking if sym is monomorphic type introduces nasty cycles. We are definitively
+ // forcing too much because monomorphism is a local property of a type that can be checked
+ // syntactically
+ // assert(sym.initialize.isMonomorphicType, sym)
addAnnotation(AnnotationInfo(sym.tpe, args.toList, Nil))
+ }
+
+ /** Use that variant if you want to pass (for example) an applied type */
+ def addAnnotation(tp: Type, args: Tree*): this.type = {
+ assert(tp.typeParams.isEmpty, tp)
+ addAnnotation(AnnotationInfo(tp, args.toList, Nil))
+ }
// ------ comparisons ----------------------------------------------------------------
@@ -1651,6 +1672,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
@inline final def map(f: Symbol => Symbol): Symbol = if (this eq NoSymbol) this else f(this)
+ final def toOption: Option[Symbol] = if (exists) Some(this) else None
+
// ------ cloneing -------------------------------------------------------------------
/** A clone of this symbol. */
@@ -1728,8 +1751,27 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** For a case class, the symbols of the accessor methods, one for each
* argument in the first parameter list of the primary constructor.
* The empty list for all other classes.
- */
- final def caseFieldAccessors: List[Symbol] =
+ *
+ * This list will be sorted to correspond to the declaration order
+ * in the constructor parameter
+ */
+ final def caseFieldAccessors: List[Symbol] = {
+ // We can't rely on the ordering of the case field accessors within decls --
+ // handling of non-public parameters seems to change the order (see SI-7035.)
+ //
+ // Luckily, the constrParamAccessors are still sorted properly, so sort the field-accessors using them
+ // (need to undo name-mangling, including the sneaky trailing whitespace)
+ //
+ // The slightly more principled approach of using the paramss of the
+ // primary constructor leads to cycles in, for example, pos/t5084.scala.
+ val primaryNames = constrParamAccessors.map(acc => nme.dropLocalSuffix(acc.name))
+ caseFieldAccessorsUnsorted.sortBy { acc =>
+ primaryNames indexWhere { orig =>
+ (acc.name == orig) || (acc.name startsWith (orig append "$"))
+ }
+ }
+ }
+ private final def caseFieldAccessorsUnsorted: List[Symbol] =
(info.decls filter (_.isCaseAccessorMethod)).toList
final def constrParamAccessors: List[Symbol] =
@@ -2473,7 +2515,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
override def outerSource: Symbol =
- if (originalName == nme.OUTER) initialize.referenced
+ // SI-6888 Approximate the name to workaround the deficiencies in `nme.originalName`
+ // in the face of clases named '$'. SI-2806 remains open to address the deeper problem.
+ if (originalName endsWith (nme.OUTER)) initialize.referenced
else NoSymbol
def setModuleClass(clazz: Symbol): TermSymbol = {
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index ebf0998573..c1753fc5a1 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -172,10 +172,29 @@ abstract class TreeGen extends macros.TreeBuilder {
if (qual.symbol != null && (qual.symbol.isEffectiveRoot || qual.symbol.isEmptyPackage))
mkAttributedIdent(sym)
else {
+ // Have to recognize anytime a selection is made on a package
+ // so it can be rewritten to foo.bar.`package`.name rather than
+ // foo.bar.name if name is in the package object.
+ // TODO - factor out the common logic between this and
+ // the Typers method "isInPackageObject", used in typedIdent.
+ val qualsym = (
+ if (qual.tpe ne null) qual.tpe.typeSymbol
+ else if (qual.symbol ne null) qual.symbol
+ else NoSymbol
+ )
+ val needsPackageQualifier = (
+ (sym ne null)
+ && qualsym.isPackage
+ && !sym.isDefinedInPackage
+ )
val pkgQualifier =
- if (sym != null && sym.owner.isPackageObjectClass && sym.effectiveOwner == qual.tpe.typeSymbol) {
- val obj = sym.owner.sourceModule
- Select(qual, nme.PACKAGE) setSymbol obj setType singleType(qual.tpe, obj)
+ if (needsPackageQualifier) {
+ // The owner of a symbol which requires package qualification may be the
+ // package object iself, but it also could be any superclass of the package
+ // object. In the latter case, we must go through the qualifier's info
+ // to obtain the right symbol.
+ val packageObject = if (sym.owner.isModuleClass) sym.owner.sourceModule else qual.tpe member nme.PACKAGE
+ Select(qual, nme.PACKAGE) setSymbol packageObject setType singleType(qual.tpe, packageObject)
}
else qual
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 8908036442..8b5dc80c83 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -234,6 +234,20 @@ abstract class TreeInfo {
tree
}
+ /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */
+ def stripCast(tree: Tree): Tree = tree match {
+ case TypeApply(sel @ Select(inner, _), _) if isCastSymbol(sel.symbol) =>
+ stripCast(inner)
+ case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCastSymbol(sel.symbol) =>
+ stripCast(inner)
+ case t =>
+ t
+ }
+
+ object StripCast {
+ def unapply(tree: Tree): Some[Tree] = Some(stripCast(tree))
+ }
+
/** Is tree a self or super constructor call? */
def isSelfOrSuperConstrCall(tree: Tree) = {
// stripNamedApply for SI-3584: adaptToImplicitMethod in Typers creates a special context
@@ -247,22 +261,24 @@ abstract class TreeInfo {
* in the position `for { <tree> <- expr }` based only
* on information at the `parser` phase? To qualify, there
* may be no subtree that will be interpreted as a
- * Stable Identifier Pattern.
+ * Stable Identifier Pattern, nor any type tests, even
+ * on TupleN. See SI-6968.
*
* For instance:
*
* {{{
- * foo @ (bar, (baz, quux))
+ * (foo @ (bar @ _)) = 0
* }}}
*
- * is a variable pattern; if the structure matches,
- * then the remainder is inevitable.
+ * is a not a variable pattern; if only binds names.
*
* The following are not variable patterns.
*
* {{{
- * foo @ (bar, (`baz`, quux)) // back quoted ident, not at top level
- * foo @ (bar, Quux) // UpperCase ident, not at top level
+ * `bar`
+ * Bar
+ * (a, b)
+ * _: T
* }}}
*
* If the pattern is a simple identifier, it is always
@@ -291,10 +307,6 @@ abstract class TreeInfo {
tree match {
case Bind(name, pat) => isVarPatternDeep0(pat)
case Ident(name) => isVarPattern(tree)
- case Apply(sel, args) =>
- ( isReferenceToScalaMember(sel, TupleClass(args.size).name.toTermName)
- && (args forall isVarPatternDeep0)
- )
case _ => false
}
}
@@ -330,9 +342,6 @@ abstract class TreeInfo {
def preSuperFields(stats: List[Tree]): List[ValDef] =
stats collect { case vd: ValDef if isEarlyValDef(vd) => vd }
- def hasUntypedPreSuperFields(stats: List[Tree]): Boolean =
- preSuperFields(stats) exists (_.tpt.isEmpty)
-
def isEarlyDef(tree: Tree) = tree match {
case TypeDef(mods, _, _, _) => mods hasFlag PRESUPER
case ValDef(mods, _, _, _) => mods hasFlag PRESUPER
@@ -419,12 +428,26 @@ abstract class TreeInfo {
case _ => false
}
+ /** Is the argument a wildcard star type of the form `_*`?
+ */
+ def isWildcardStarType(tree: Tree): Boolean = tree match {
+ case Ident(tpnme.WILDCARD_STAR) => true
+ case _ => false
+ }
+
/** Is this pattern node a catch-all (wildcard or variable) pattern? */
def isDefaultCase(cdef: CaseDef) = cdef match {
case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat)
case _ => false
}
+ /** Is this pattern node a synthetic catch-all case, added during PartialFuction synthesis before we know
+ * whether the user provided cases are exhaustive. */
+ def isSyntheticDefaultCase(cdef: CaseDef) = cdef match {
+ case CaseDef(Bind(nme.DEFAULT_CASE, _), EmptyTree, _) => true
+ case _ => false
+ }
+
/** Does this CaseDef catch Throwable? */
def catchesThrowable(cdef: CaseDef) = catchesAllOf(cdef, ThrowableClass.tpe)
@@ -497,10 +520,6 @@ abstract class TreeInfo {
def isSynthCaseSymbol(sym: Symbol) = sym hasAllFlags SYNTH_CASE_FLAGS
def hasSynthCaseSymbol(t: Tree) = t.symbol != null && isSynthCaseSymbol(t.symbol)
- def isTraitRef(tree: Tree): Boolean = {
- val sym = if (tree.tpe != null) tree.tpe.typeSymbol else null
- ((sym ne null) && sym.initialize.isTrait)
- }
/** Applications in Scala can have one of the following shapes:
*
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 431afd286d..754adcb80d 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -36,7 +36,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
def isDef = false
def isEmpty = false
- def canHaveAttrs = true
/** The canonical way to test if a Tree represents a term.
*/
@@ -229,6 +228,14 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def isDef = true
}
+ case object EmptyTree extends TermTree {
+ val asList = List(this)
+ super.tpe_=(NoType)
+ override def tpe_=(t: Type) =
+ if (t != NoType) throw new UnsupportedOperationException("tpe_=("+t+") inapplicable for <empty>")
+ override def isEmpty = true
+ }
+
abstract class MemberDef extends DefTree with MemberDefApi {
def mods: Modifiers
def keyword: String = this match {
@@ -516,7 +523,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
override private[scala] def copyAttrs(tree: Tree) = {
super.copyAttrs(tree)
tree match {
- case other: TypeTree => wasEmpty = other.wasEmpty // SI-6648 Critical for correct operation of `resetAttrs`.
+ case other: TypeTree =>
+ // SI-6648 Critical for correct operation of `resetAttrs`.
+ wasEmpty = other.wasEmpty
+ if (other.orig != null)
+ orig = other.orig.duplicate
case _ =>
}
this
@@ -592,7 +603,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
case _: ApplyToImplicitArgs => new ApplyToImplicitArgs(fun, args)
case _: ApplyImplicitView => new ApplyImplicitView(fun, args)
// TODO: ApplyConstructor ???
- case self.pendingSuperCall => self.pendingSuperCall
case _ => new Apply(fun, args)
}).copyAttrs(tree)
def ApplyDynamic(tree: Tree, qual: Tree, args: List[Tree]) =
@@ -955,23 +965,12 @@ trait Trees extends api.Trees { self: SymbolTable =>
def ValDef(sym: Symbol): ValDef = ValDef(sym, EmptyTree)
- trait CannotHaveAttrs extends Tree {
- override def canHaveAttrs = false
-
- private def unsupported(what: String, args: Any*) =
- throw new UnsupportedOperationException(s"$what($args) inapplicable for "+self.toString)
-
+ object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) {
+ override def isEmpty = true
super.setPos(NoPosition)
- override def setPos(pos: Position) = unsupported("setPos", pos)
-
- super.setType(NoType)
- override def tpe_=(t: Type) = if (t != NoType) unsupported("tpe_=", t)
+ override def setPos(pos: Position) = { assert(false); this }
}
- case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) }
- object emptyValDef extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs
- object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs
-
def DefDef(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree): DefDef =
atPos(sym.pos) {
assert(sym != NoSymbol)
@@ -989,6 +988,18 @@ trait Trees extends api.Trees { self: SymbolTable =>
def DefDef(sym: Symbol, mods: Modifiers, rhs: Tree): DefDef =
DefDef(sym, mods, mapParamss(sym)(ValDef), rhs)
+ /** A DefDef with original trees attached to the TypeTree of each parameter */
+ def DefDef(sym: Symbol, mods: Modifiers, originalParamTpts: Symbol => Tree, rhs: Tree): DefDef = {
+ val paramms = mapParamss(sym){ sym =>
+ val vd = ValDef(sym, EmptyTree)
+ (vd.tpt : @unchecked) match {
+ case tt: TypeTree => tt setOriginal (originalParamTpts(sym) setPos sym.pos.focus)
+ }
+ vd
+ }
+ DefDef(sym, mods, paramms, rhs)
+ }
+
def DefDef(sym: Symbol, rhs: Tree): DefDef =
DefDef(sym, Modifiers(sym.flags), rhs)
@@ -1039,9 +1050,6 @@ trait Trees extends api.Trees { self: SymbolTable =>
def New(tpe: Type, args: Tree*): Tree =
ApplyConstructor(TypeTree(tpe), args.toList)
- def New(tpe: Type, argss: List[List[Tree]]): Tree =
- New(TypeTree(tpe), argss)
-
def New(sym: Symbol, args: Tree*): Tree =
New(sym.tpe, args: _*)
@@ -1122,7 +1130,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
traverse(annot); traverse(arg)
case Template(parents, self, body) =>
traverseTrees(parents)
- if (self ne emptyValDef) traverse(self)
+ if (!self.isEmpty) traverse(self)
traverseStats(body, tree.symbol)
case Block(stats, expr) =>
traverseTrees(stats); traverse(expr)
@@ -1440,6 +1448,22 @@ trait Trees extends api.Trees { self: SymbolTable =>
if (tree.hasSymbol) {
subst(from, to)
tree match {
+ case _: DefTree =>
+ val newInfo = symSubst(tree.symbol.info)
+ if (!(newInfo =:= tree.symbol.info)) {
+ debuglog(sm"""
+ |TreeSymSubstituter: updated info of symbol ${tree.symbol}
+ | Old: ${showRaw(tree.symbol.info, printTypes = true, printIds = true)}
+ | New: ${showRaw(newInfo, printTypes = true, printIds = true)}""")
+ tree.symbol updateInfo newInfo
+ }
+ case _ =>
+ // no special handling is required for Function or Import nodes here.
+ // as they don't have interesting infos attached to their symbols.
+ // Subsitution of the referenced symbol of Return nodes is handled
+ // in .ChangeOwnerTraverser
+ }
+ tree match {
case Ident(name0) if tree.symbol != NoSymbol =>
treeCopy.Ident(tree, tree.symbol.name)
case Select(qual, name0) if tree.symbol != NoSymbol =>
@@ -1488,6 +1512,15 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
}
+ trait TreeStackTraverser extends Traverser {
+ import collection.mutable
+ val path: mutable.Stack[Tree] = mutable.Stack()
+ abstract override def traverse(t: Tree) = {
+ path push t
+ try super.traverse(t) finally path.pop()
+ }
+ }
+
private lazy val duplicator = new Transformer {
override val treeCopy = newStrictTreeCopier
override def transform(t: Tree) = {
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index c2637e6967..b708ca0fd6 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -22,6 +22,8 @@ import util.ThreeValues._
// internal: error
case WildcardType =>
// internal: unknown
+ case BoundedWildcardType(bounds) =>
+ // internal: unknown
case NoType =>
case NoPrefix =>
case ThisType(sym) =>
@@ -584,9 +586,9 @@ trait Types extends api.Types { self: SymbolTable =>
* Expands type aliases and converts higher-kinded TypeRefs to PolyTypes.
* Functions on types are also implemented as PolyTypes.
*
- * Example: (in the below, <List> is the type constructor of List)
- * TypeRef(pre, <List>, List()) is replaced by
- * PolyType(X, TypeRef(pre, <List>, List(X)))
+ * Example: (in the below, `<List>` is the type constructor of List)
+ * TypeRef(pre, `<List>`, List()) is replaced by
+ * PolyType(X, TypeRef(pre, `<List>`, List(X)))
*/
def normalize = this // @MAT
@@ -744,7 +746,7 @@ trait Types extends api.Types { self: SymbolTable =>
val trivial = (
this.isTrivial
|| phase.erasedTypes && pre.typeSymbol != ArrayClass
- || pre.normalize.isTrivial && !isPossiblePrefix(clazz)
+ || skipPrefixOf(pre, clazz)
)
if (trivial) this
else {
@@ -1798,7 +1800,7 @@ trait Types extends api.Types { self: SymbolTable =>
// TODO see comments around def intersectionType and def merge
def flatten(tps: List[Type]): List[Type] = tps flatMap { case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) case tp => List(tp) }
val flattened = flatten(parents).distinct
- if (decls.isEmpty && flattened.tail.isEmpty) {
+ if (decls.isEmpty && hasLength(flattened, 1)) {
flattened.head
} else if (flattened != parents) {
refinedType(flattened, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition)
@@ -2481,8 +2483,10 @@ trait Types extends api.Types { self: SymbolTable =>
// from (T1, T2) => R.
targs match {
case in :: out :: Nil if !isTupleType(in) =>
- // A => B => C should be (A => B) => C or A => (B => C)
- val in_s = if (isFunctionType(in)) "(" + in + ")" else "" + in
+ // A => B => C should be (A => B) => C or A => (B => C).
+ // Also if A is byname, then we want (=> A) => B because => is right associative and => A => B
+ // would mean => (A => B) which is a different type
+ val in_s = if (isFunctionType(in) || isByNameParamType(in)) "(" + in + ")" else "" + in
val out_s = if (isFunctionType(out)) "(" + out + ")" else "" + out
in_s + " => " + out_s
case xs =>
@@ -3538,7 +3542,7 @@ trait Types extends api.Types { self: SymbolTable =>
if (phase.erasedTypes)
if (parents.isEmpty) ObjectClass.tpe else parents.head
else {
- val clazz = owner.newRefinementClass(pos) // TODO: why were we passing in NoPosition instead of pos?
+ val clazz = owner.newRefinementClass(pos)
val result = RefinedType(parents, decls, clazz)
clazz.setInfo(result)
result
@@ -3588,12 +3592,6 @@ trait Types extends api.Types { self: SymbolTable =>
val pre1 = pre match {
case x: SuperType if sym1.isEffectivelyFinal || sym1.isDeferred =>
x.thistpe
- case _: CompoundType if sym1.isClass =>
- // sharpen prefix so that it is maximal and still contains the class.
- pre.parents.reverse dropWhile (_.member(sym1.name) != sym1) match {
- case Nil => pre
- case parent :: _ => parent
- }
case _ => pre
}
if (pre eq pre1) TypeRef(pre, sym1, args)
@@ -3850,12 +3848,16 @@ trait Types extends api.Types { self: SymbolTable =>
// This is the specified behavior.
protected def etaExpandKeepsStar = false
+ /** Turn any T* types into Seq[T] except when
+ * in method parameter position.
+ */
object dropRepeatedParamType extends TypeMap {
def apply(tp: Type): Type = tp match {
case MethodType(params, restpe) =>
- MethodType(params, apply(restpe))
- case PolyType(tparams, restpe) =>
- PolyType(tparams, apply(restpe))
+ // Not mapping over params
+ val restpe1 = apply(restpe)
+ if (restpe eq restpe1) tp
+ else MethodType(params, restpe1)
case TypeRef(_, RepeatedParamClass, arg :: Nil) =>
seqType(arg)
case _ =>
@@ -4467,14 +4469,15 @@ trait Types extends api.Types { self: SymbolTable =>
*/
def isPossiblePrefix(clazz: Symbol) = clazz.isClass && !clazz.isPackageClass
+ private def skipPrefixOf(pre: Type, clazz: Symbol) = (
+ (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
+ )
+
/** A map to compute the asSeenFrom method */
class AsSeenFromMap(pre: Type, clazz: Symbol) extends TypeMap with KeepOnlyTypeConstraints {
var capturedSkolems: List[Symbol] = List()
var capturedParams: List[Symbol] = List()
- private def skipPrefixOf(pre: Type, clazz: Symbol) = (
- (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz)
- )
override def mapOver(tree: Tree, giveup: ()=>Nothing): Tree = {
object annotationArgRewriter extends TypeMapTransformer {
private def canRewriteThis(sym: Symbol) = (
@@ -4507,8 +4510,7 @@ trait Types extends api.Types { self: SymbolTable =>
}
def apply(tp: Type): Type =
- if (skipPrefixOf(pre, clazz)) tp
- else tp match {
+ tp match {
case ThisType(sym) =>
def toPrefix(pre: Type, clazz: Symbol): Type =
if (skipPrefixOf(pre, clazz)) tp
@@ -4670,6 +4672,8 @@ trait Types extends api.Types { self: SymbolTable =>
/** A map to implement the `substSym` method. */
class SubstSymMap(from: List[Symbol], to: List[Symbol]) extends SubstMap(from, to) {
+ def this(pairs: (Symbol, Symbol)*) = this(pairs.toList.map(_._1), pairs.toList.map(_._2))
+
protected def toType(fromtp: Type, sym: Symbol) = fromtp match {
case TypeRef(pre, _, args) => copyTypeRef(fromtp, pre, sym, args)
case SingleType(pre, _) => singleType(pre, sym)
@@ -4702,23 +4706,13 @@ trait Types extends api.Types { self: SymbolTable =>
case idx => Some(to(idx))
}
- override def transform(tree: Tree) =
- tree match {
- case tree@Ident(_) =>
- termMapsTo(tree.symbol) match {
- case Some(tosym) =>
- if (tosym.info.bounds.hi.typeSymbol isSubClass SingletonClass) {
- Ident(tosym.existentialToString)
- .setSymbol(tosym)
- .setPos(tosym.pos)
- .setType(dropSingletonType(tosym.info.bounds.hi))
- } else {
- giveup()
- }
- case none => super.transform(tree)
- }
- case tree => super.transform(tree)
+ override def transform(tree: Tree) = {
+ termMapsTo(tree.symbol) match {
+ case Some(tosym) => tree.symbol = tosym
+ case None => ()
}
+ super.transform(tree)
+ }
}
trans.transform(tree)
}
@@ -4980,6 +4974,51 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
+ /**
+ * A more persistent version of `Type#memberType` which does not require
+ * that the symbol is a direct member of the prefix.
+ *
+ * For instance:
+ *
+ * {{{
+ * class C[T] {
+ * sealed trait F[A]
+ * object X {
+ * object S1 extends F[T]
+ * }
+ * class S2 extends F[T]
+ * }
+ * object O extends C[Int] {
+ * def foo(f: F[Int]) = f match {...} // need to enumerate sealed subtypes of the scrutinee here.
+ * }
+ * class S3 extends O.F[String]
+ *
+ * nestedMemberType(<S1>, <O.type>, <C>) = O.X.S1.type
+ * nestedMemberType(<S2>, <O.type>, <C>) = O.S2.type
+ * nestedMemberType(<S3>, <O.type>, <C>) = S3.type
+ * }}}
+ *
+ * @param sym The symbol of the subtype
+ * @param pre The prefix from which the symbol is seen
+ * @param owner
+ */
+ def nestedMemberType(sym: Symbol, pre: Type, owner: Symbol): Type = {
+ def loop(tp: Type): Type =
+ if (tp.isTrivial) tp
+ else if (tp.prefix.typeSymbol isNonBottomSubClass owner) {
+ val widened = tp match {
+ case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
+ case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
+ }
+ widened asSeenFrom (pre, tp.typeSymbol.owner)
+ }
+ else loop(tp.prefix) memberType tp.typeSymbol
+
+ val result = loop(sym.tpeHK)
+ assert(sym.isTerm || result.typeSymbol == sym, s"($result).typeSymbol = ${result.typeSymbol}; expected ${sym}")
+ result
+ }
+
/** The most deeply nested owner that contains all the symbols
* of thistype or prefixless typerefs/singletype occurrences in given type.
*/
@@ -6071,7 +6110,7 @@ trait Types extends api.Types { self: SymbolTable =>
(sameLength(params1, params2) &&
mt1.isImplicit == mt2.isImplicit &&
matchingParams(params1, params2, mt1.isJava, mt2.isJava) &&
- isSubType(res1, res2.substSym(params2, params1), depth))
+ isSubType(res1.substSym(params1, params2), res2, depth))
// TODO: if mt1.params.isEmpty, consider NullaryMethodType?
case _ =>
false
@@ -6614,7 +6653,7 @@ trait Types extends api.Types { self: SymbolTable =>
val ts0 = elimSub0(ts)
if (ts0.isEmpty || ts0.tail.isEmpty) ts0
else {
- val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.underlying))
+ val ts1 = ts0 mapConserve (t => elimAnonymousClass(t.dealiasWiden))
if (ts1 eq ts0) ts0
else elimSub(ts1, depth)
}
@@ -6731,6 +6770,8 @@ trait Types extends api.Types { self: SymbolTable =>
NullaryMethodType(lub0(matchingRestypes(ts, Nil)))
case ts @ TypeBounds(_, _) :: rest =>
TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth))
+ case ts @ AnnotatedType(annots, tpe, _) :: rest =>
+ annotationsLub(lub0(ts map (_.withoutAnnotations)), ts)
case ts =>
lubResults get (depth, ts) match {
case Some(lubType) =>
@@ -7110,6 +7151,14 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
+ def isJavaVarargsAncestor(clazz: Symbol) = (
+ clazz.isClass
+ && clazz.isJavaDefined
+ && (clazz.info.nonPrivateDecls exists isJavaVarArgsMethod)
+ )
+ def inheritsJavaVarArgsMethod(clazz: Symbol) =
+ clazz.thisType.baseClasses exists isJavaVarargsAncestor
+
/** All types in list must be polytypes with type parameter lists of
* same length as tparams.
* Returns list of list of bounds infos, where corresponding type
@@ -7222,6 +7271,12 @@ trait Types extends api.Types { self: SymbolTable =>
else (ps :+ SerializableClass.tpe).toList
)
+ /** Members of the given class, other than those inherited
+ * from Any or AnyRef.
+ */
+ def nonTrivialMembers(clazz: Symbol): Iterable[Symbol] =
+ clazz.info.members filterNot (sym => sym.owner == ObjectClass || sym.owner == AnyClass)
+
def objToAny(tp: Type): Type =
if (!phase.erasedTypes && tp.typeSymbol == ObjectClass) AnyClass.tpe
else tp
diff --git a/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
new file mode 100644
index 0000000000..058ff61fbf
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/annotations/compileTimeOnly.scala
@@ -0,0 +1,31 @@
+package scala.reflect
+package internal
+package annotations
+
+import scala.annotation.meta._
+
+/**
+ * An annotation that designates a member should not be referred to after
+ * type checking (which includes macro expansion); it must only be used in
+ * the arguments of some other macro that will eliminate it from the AST.
+ *
+ * Later on, this annotation should be removed and implemented with domain-specific macros.
+ * If a certain method `inner` mustn't be called outside the context of a given macro `outer`,
+ * then it should itself be declared as a macro.
+ *
+ * Approach #1. Expansion of `inner` checks whether its enclosures contain `outer` and
+ * report an error if `outer` is not detected. In principle, we could use this approach right now,
+ * but currently enclosures are broken, because contexts aren't exactly famous for keeping precise
+ * track of the stack of the trees being typechecked.
+ *
+ * Approach #2. Default implementation of `inner` is just an invocation of `c.abort`.
+ * `outer` is an untyped macro, which expands into a block, which contains a redefinition of `inner`
+ * and a call to itself. The redefined `inner` could either be a stub like `Expr.splice` or carry out
+ * domain-specific logic.
+ *
+ * @param message the error message to print during compilation if a reference remains
+ * after type checking
+ * @since 2.10.1
+ */
+@getter @setter @beanGetter @beanSetter
+final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index ec3501d5bc..81368df7a6 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -47,6 +47,4 @@ abstract class MutableSettings extends AbsSettings {
def XoldPatmat: BooleanSetting
def XnoPatmatAnalysis: BooleanSetting
def XfullLubs: BooleanSetting
- def companionsInPkgObjs: BooleanSetting
-
}
diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala
index 3d10d4c9ce..8f287a1640 100644
--- a/src/reflect/scala/reflect/internal/util/Position.scala
+++ b/src/reflect/scala/reflect/internal/util/Position.scala
@@ -91,7 +91,7 @@ abstract class Position extends scala.reflect.api.Position { self =>
/** An optional value containing the source file referred to by this position, or
* None if not defined.
*/
- def source: SourceFile = throw new UnsupportedOperationException("Position.source")
+ def source: SourceFile = throw new UnsupportedOperationException(s"Position.source on ${this.getClass}")
/** Is this position neither a NoPosition nor a FakePosition?
* If isDefined is true, offset and source are both defined.
@@ -111,19 +111,19 @@ abstract class Position extends scala.reflect.api.Position { self =>
def makeTransparent: Position = this
/** The start of the position's range, error if not a range position */
- def start: Int = throw new UnsupportedOperationException("Position.start")
+ def start: Int = throw new UnsupportedOperationException(s"Position.start on ${this.getClass}")
/** The start of the position's range, or point if not a range position */
def startOrPoint: Int = point
/** The point (where the ^ is) of the position */
- def point: Int = throw new UnsupportedOperationException("Position.point")
+ def point: Int = throw new UnsupportedOperationException(s"Position.point on ${this.getClass}")
/** The point (where the ^ is) of the position, or else `default` if undefined */
def pointOrElse(default: Int): Int = default
/** The end of the position's range, error if not a range position */
- def end: Int = throw new UnsupportedOperationException("Position.end")
+ def end: Int = throw new UnsupportedOperationException(s"Position.end on ${this.getClass}")
/** The end of the position's range, or point if not a range position */
def endOrPoint: Int = point
diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala
index a77cebf415..007df3b6e2 100644
--- a/src/reflect/scala/reflect/macros/Attachments.scala
+++ b/src/reflect/scala/reflect/macros/Attachments.scala
@@ -44,17 +44,21 @@ abstract class Attachments { self =>
* Replaces an existing payload of the same type, if exists.
*/
def update[T: ClassTag](attachment: T): Attachments { type Pos = self.Pos } =
- new NonemptyAttachments(this.pos, remove[T].all + attachment)
+ new NonemptyAttachments[Pos](this.pos, remove[T].all + attachment)
/** Creates a copy of this attachment with the payload of the given class type `T` removed. */
def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = {
val newAll = all filterNot matchesTag[T]
if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }]
- else new NonemptyAttachments(this.pos, newAll)
+ else new NonemptyAttachments[Pos](this.pos, newAll)
}
+}
- private class NonemptyAttachments(override val pos: Pos, override val all: Set[Any]) extends Attachments {
- type Pos = self.Pos
- def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
- }
+// SI-7018: This used to be an inner class of `Attachments`, but that led to a memory leak in the
+// IDE via $outer pointers.
+// Forward compatibility note: This class used to be Attachments$NonemptyAttachments.
+// However it's private, therefore it transcends the compatibility policy for 2.10.x.
+private final class NonemptyAttachments[P >: Null](override val pos: P, override val all: Set[Any]) extends Attachments {
+ type Pos = P
+ def withPos(newPos: Pos) = new NonemptyAttachments(newPos, all)
}
diff --git a/src/reflect/scala/reflect/macros/compileTimeOnly.scala b/src/reflect/scala/reflect/macros/compileTimeOnly.scala
deleted file mode 100644
index 5a3a352a53..0000000000
--- a/src/reflect/scala/reflect/macros/compileTimeOnly.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package scala.reflect
-package macros
-
-import scala.annotation.meta._
-
-/**
- * An annotation that designates a member should not be referred to after
- * type checking (which includes macro expansion); it must only be used in
- * the arguments of some other macro that will eliminate it from the AST.
- *
- * @param message the error message to print during compilation if a reference remains
- * after type checking
- * @since 2.10.1
- */
-@getter @setter @beanGetter @beanSetter
-final class compileTimeOnly(message: String) extends scala.annotation.StaticAnnotation
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 01e0634902..778c826dc0 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -610,11 +610,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
/**
* Copy all annotations of Java annotated element `jann` over to Scala symbol `sym`.
+ * Also creates `@throws` annotations if necessary.
* Pre: `sym` is already initialized with a concrete type.
* Note: If `sym` is a method or constructor, its parameter annotations are copied as well.
*/
private def copyAnnotations(sym: Symbol, jann: AnnotatedElement) {
sym setAnnotations (jann.getAnnotations map JavaAnnotationProxy).toList
+ // SI-7065: we're not using getGenericExceptionTypes here to be consistent with ClassfileParser
+ val jexTpes = jann match {
+ case jm: jMethod => jm.getExceptionTypes.toList
+ case jconstr: jConstructor[_] => jconstr.getExceptionTypes.toList
+ case _ => Nil
+ }
+ jexTpes foreach (jexTpe => sym.addThrowsAnnotation(classSymbol(jexTpe)))
}
/**
@@ -631,6 +639,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
/** used to avoid cycles while initializing classes */
private var parentsLevel = 0
private var pendingLoadActions: List[() => Unit] = Nil
+ private val relatedSymbols = clazz +: (if (module != NoSymbol) List(module, module.moduleClass) else Nil)
override def load(sym: Symbol): Unit = {
debugInfo("completing from Java " + sym + "/" + clazz.fullName)//debug
@@ -642,6 +651,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
module.moduleClass setFlag (flags & PRIVATE | JAVA)
}
+ relatedSymbols foreach (importPrivateWithinFromJavaFlags(_, jclazz.getModifiers))
copyAnnotations(clazz, jclazz)
// to do: annotations to set also for module?
@@ -1060,6 +1070,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
.newValue(newTermName(jfield.getName), NoPosition, toScalaFieldFlags(jfield.getModifiers))
.setInfo(typeToScala(jfield.getGenericType))
fieldCache enter (jfield, field)
+ importPrivateWithinFromJavaFlags(field, jfield.getModifiers)
copyAnnotations(field, jfield)
field
}
@@ -1085,6 +1096,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala
val resulttpe = typeToScala(jmeth.getGenericReturnType)
setMethType(meth, tparams, paramtpes, resulttpe)
+ importPrivateWithinFromJavaFlags(meth, jmeth.getModifiers)
copyAnnotations(meth, jmeth)
if ((jmeth.getModifiers & JAVA_ACC_VARARGS) != 0) meth.setInfo(arrayToRepeated(meth.info))
meth
@@ -1108,6 +1120,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala
setMethType(constr, tparams, paramtpes, clazz.tpe)
constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe))
+ importPrivateWithinFromJavaFlags(constr, jconstr.getModifiers)
copyAnnotations(constr, jconstr)
constr
}
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index 2d5b76f094..0e0cf3fc40 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -43,7 +43,6 @@ private[reflect] class Settings extends MutableSettings {
val printtypes = new BooleanSetting(false)
val uniqid = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
- val companionsInPkgObjs = new BooleanSetting(false)
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala
index 7c84279699..90f8cb8d71 100644
--- a/src/scalap/scala/tools/scalap/Main.scala
+++ b/src/scalap/scala/tools/scalap/Main.scala
@@ -184,7 +184,7 @@ object Main extends Main {
val cparg = List("-classpath", "-cp") map (arguments getArgument _) reduceLeft (_ orElse _)
val path = cparg match {
case Some(cp) => new JavaClassPath(DefaultJavaContext.classesInExpandedPath(cp), DefaultJavaContext)
- case _ => PathResolver.fromPathString("")
+ case _ => PathResolver.fromPathString(".") // include '.' in the default classpath SI-6669
}
// print the classpath if output is verbose
if (verbose)
diff --git a/test/benchmarking/t6726-patmat-analysis.scala b/test/benchmarking/t6726-patmat-analysis.scala
new file mode 100644
index 0000000000..bcb7f6c6b2
--- /dev/null
+++ b/test/benchmarking/t6726-patmat-analysis.scala
@@ -0,0 +1,4005 @@
+trait Foo{
+abstract class Base
+case class Dummy0(x: Int) extends Base
+case class Dummy1(x: Int) extends Base
+case class Dummy2(x: Int) extends Base
+case class Dummy3(x: Int) extends Base
+case class Dummy4(x: Int) extends Base
+case class Dummy5(x: Int) extends Base
+case class Dummy6(x: Int) extends Base
+case class Dummy7(x: Int) extends Base
+case class Dummy8(x: Int) extends Base
+case class Dummy9(x: Int) extends Base
+case class Dummy10(x: Int) extends Base
+case class Dummy11(x: Int) extends Base
+case class Dummy12(x: Int) extends Base
+case class Dummy13(x: Int) extends Base
+case class Dummy14(x: Int) extends Base
+case class Dummy15(x: Int) extends Base
+case class Dummy16(x: Int) extends Base
+case class Dummy17(x: Int) extends Base
+case class Dummy18(x: Int) extends Base
+case class Dummy19(x: Int) extends Base
+case class Dummy20(x: Int) extends Base
+case class Dummy21(x: Int) extends Base
+case class Dummy22(x: Int) extends Base
+case class Dummy23(x: Int) extends Base
+case class Dummy24(x: Int) extends Base
+case class Dummy25(x: Int) extends Base
+case class Dummy26(x: Int) extends Base
+case class Dummy27(x: Int) extends Base
+case class Dummy28(x: Int) extends Base
+case class Dummy29(x: Int) extends Base
+case class Dummy30(x: Int) extends Base
+case class Dummy31(x: Int) extends Base
+case class Dummy32(x: Int) extends Base
+case class Dummy33(x: Int) extends Base
+case class Dummy34(x: Int) extends Base
+case class Dummy35(x: Int) extends Base
+case class Dummy36(x: Int) extends Base
+case class Dummy37(x: Int) extends Base
+case class Dummy38(x: Int) extends Base
+case class Dummy39(x: Int) extends Base
+case class Dummy40(x: Int) extends Base
+case class Dummy41(x: Int) extends Base
+case class Dummy42(x: Int) extends Base
+case class Dummy43(x: Int) extends Base
+case class Dummy44(x: Int) extends Base
+case class Dummy45(x: Int) extends Base
+case class Dummy46(x: Int) extends Base
+case class Dummy47(x: Int) extends Base
+case class Dummy48(x: Int) extends Base
+case class Dummy49(x: Int) extends Base
+case class Dummy50(x: Int) extends Base
+case class Dummy51(x: Int) extends Base
+case class Dummy52(x: Int) extends Base
+case class Dummy53(x: Int) extends Base
+case class Dummy54(x: Int) extends Base
+case class Dummy55(x: Int) extends Base
+case class Dummy56(x: Int) extends Base
+case class Dummy57(x: Int) extends Base
+case class Dummy58(x: Int) extends Base
+case class Dummy59(x: Int) extends Base
+case class Dummy60(x: Int) extends Base
+case class Dummy61(x: Int) extends Base
+case class Dummy62(x: Int) extends Base
+case class Dummy63(x: Int) extends Base
+case class Dummy64(x: Int) extends Base
+case class Dummy65(x: Int) extends Base
+case class Dummy66(x: Int) extends Base
+case class Dummy67(x: Int) extends Base
+case class Dummy68(x: Int) extends Base
+case class Dummy69(x: Int) extends Base
+case class Dummy70(x: Int) extends Base
+case class Dummy71(x: Int) extends Base
+case class Dummy72(x: Int) extends Base
+case class Dummy73(x: Int) extends Base
+case class Dummy74(x: Int) extends Base
+case class Dummy75(x: Int) extends Base
+case class Dummy76(x: Int) extends Base
+case class Dummy77(x: Int) extends Base
+case class Dummy78(x: Int) extends Base
+case class Dummy79(x: Int) extends Base
+case class Dummy80(x: Int) extends Base
+case class Dummy81(x: Int) extends Base
+case class Dummy82(x: Int) extends Base
+case class Dummy83(x: Int) extends Base
+case class Dummy84(x: Int) extends Base
+case class Dummy85(x: Int) extends Base
+case class Dummy86(x: Int) extends Base
+case class Dummy87(x: Int) extends Base
+case class Dummy88(x: Int) extends Base
+case class Dummy89(x: Int) extends Base
+case class Dummy90(x: Int) extends Base
+case class Dummy91(x: Int) extends Base
+case class Dummy92(x: Int) extends Base
+case class Dummy93(x: Int) extends Base
+case class Dummy94(x: Int) extends Base
+case class Dummy95(x: Int) extends Base
+case class Dummy96(x: Int) extends Base
+case class Dummy97(x: Int) extends Base
+case class Dummy98(x: Int) extends Base
+case class Dummy99(x: Int) extends Base
+case class Dummy100(x: Int) extends Base
+case class Dummy101(x: Int) extends Base
+case class Dummy102(x: Int) extends Base
+case class Dummy103(x: Int) extends Base
+case class Dummy104(x: Int) extends Base
+case class Dummy105(x: Int) extends Base
+case class Dummy106(x: Int) extends Base
+case class Dummy107(x: Int) extends Base
+case class Dummy108(x: Int) extends Base
+case class Dummy109(x: Int) extends Base
+case class Dummy110(x: Int) extends Base
+case class Dummy111(x: Int) extends Base
+case class Dummy112(x: Int) extends Base
+case class Dummy113(x: Int) extends Base
+case class Dummy114(x: Int) extends Base
+case class Dummy115(x: Int) extends Base
+case class Dummy116(x: Int) extends Base
+case class Dummy117(x: Int) extends Base
+case class Dummy118(x: Int) extends Base
+case class Dummy119(x: Int) extends Base
+case class Dummy120(x: Int) extends Base
+case class Dummy121(x: Int) extends Base
+case class Dummy122(x: Int) extends Base
+case class Dummy123(x: Int) extends Base
+case class Dummy124(x: Int) extends Base
+case class Dummy125(x: Int) extends Base
+case class Dummy126(x: Int) extends Base
+case class Dummy127(x: Int) extends Base
+case class Dummy128(x: Int) extends Base
+case class Dummy129(x: Int) extends Base
+case class Dummy130(x: Int) extends Base
+case class Dummy131(x: Int) extends Base
+case class Dummy132(x: Int) extends Base
+case class Dummy133(x: Int) extends Base
+case class Dummy134(x: Int) extends Base
+case class Dummy135(x: Int) extends Base
+case class Dummy136(x: Int) extends Base
+case class Dummy137(x: Int) extends Base
+case class Dummy138(x: Int) extends Base
+case class Dummy139(x: Int) extends Base
+case class Dummy140(x: Int) extends Base
+case class Dummy141(x: Int) extends Base
+case class Dummy142(x: Int) extends Base
+case class Dummy143(x: Int) extends Base
+case class Dummy144(x: Int) extends Base
+case class Dummy145(x: Int) extends Base
+case class Dummy146(x: Int) extends Base
+case class Dummy147(x: Int) extends Base
+case class Dummy148(x: Int) extends Base
+case class Dummy149(x: Int) extends Base
+case class Dummy150(x: Int) extends Base
+case class Dummy151(x: Int) extends Base
+case class Dummy152(x: Int) extends Base
+case class Dummy153(x: Int) extends Base
+case class Dummy154(x: Int) extends Base
+case class Dummy155(x: Int) extends Base
+case class Dummy156(x: Int) extends Base
+case class Dummy157(x: Int) extends Base
+case class Dummy158(x: Int) extends Base
+case class Dummy159(x: Int) extends Base
+case class Dummy160(x: Int) extends Base
+case class Dummy161(x: Int) extends Base
+case class Dummy162(x: Int) extends Base
+case class Dummy163(x: Int) extends Base
+case class Dummy164(x: Int) extends Base
+case class Dummy165(x: Int) extends Base
+case class Dummy166(x: Int) extends Base
+case class Dummy167(x: Int) extends Base
+case class Dummy168(x: Int) extends Base
+case class Dummy169(x: Int) extends Base
+case class Dummy170(x: Int) extends Base
+case class Dummy171(x: Int) extends Base
+case class Dummy172(x: Int) extends Base
+case class Dummy173(x: Int) extends Base
+case class Dummy174(x: Int) extends Base
+case class Dummy175(x: Int) extends Base
+case class Dummy176(x: Int) extends Base
+case class Dummy177(x: Int) extends Base
+case class Dummy178(x: Int) extends Base
+case class Dummy179(x: Int) extends Base
+case class Dummy180(x: Int) extends Base
+case class Dummy181(x: Int) extends Base
+case class Dummy182(x: Int) extends Base
+case class Dummy183(x: Int) extends Base
+case class Dummy184(x: Int) extends Base
+case class Dummy185(x: Int) extends Base
+case class Dummy186(x: Int) extends Base
+case class Dummy187(x: Int) extends Base
+case class Dummy188(x: Int) extends Base
+case class Dummy189(x: Int) extends Base
+case class Dummy190(x: Int) extends Base
+case class Dummy191(x: Int) extends Base
+case class Dummy192(x: Int) extends Base
+case class Dummy193(x: Int) extends Base
+case class Dummy194(x: Int) extends Base
+case class Dummy195(x: Int) extends Base
+case class Dummy196(x: Int) extends Base
+case class Dummy197(x: Int) extends Base
+case class Dummy198(x: Int) extends Base
+case class Dummy199(x: Int) extends Base
+case class Dummy200(x: Int) extends Base
+case class Dummy201(x: Int) extends Base
+case class Dummy202(x: Int) extends Base
+case class Dummy203(x: Int) extends Base
+case class Dummy204(x: Int) extends Base
+case class Dummy205(x: Int) extends Base
+case class Dummy206(x: Int) extends Base
+case class Dummy207(x: Int) extends Base
+case class Dummy208(x: Int) extends Base
+case class Dummy209(x: Int) extends Base
+case class Dummy210(x: Int) extends Base
+case class Dummy211(x: Int) extends Base
+case class Dummy212(x: Int) extends Base
+case class Dummy213(x: Int) extends Base
+case class Dummy214(x: Int) extends Base
+case class Dummy215(x: Int) extends Base
+case class Dummy216(x: Int) extends Base
+case class Dummy217(x: Int) extends Base
+case class Dummy218(x: Int) extends Base
+case class Dummy219(x: Int) extends Base
+case class Dummy220(x: Int) extends Base
+case class Dummy221(x: Int) extends Base
+case class Dummy222(x: Int) extends Base
+case class Dummy223(x: Int) extends Base
+case class Dummy224(x: Int) extends Base
+case class Dummy225(x: Int) extends Base
+case class Dummy226(x: Int) extends Base
+case class Dummy227(x: Int) extends Base
+case class Dummy228(x: Int) extends Base
+case class Dummy229(x: Int) extends Base
+case class Dummy230(x: Int) extends Base
+case class Dummy231(x: Int) extends Base
+case class Dummy232(x: Int) extends Base
+case class Dummy233(x: Int) extends Base
+case class Dummy234(x: Int) extends Base
+case class Dummy235(x: Int) extends Base
+case class Dummy236(x: Int) extends Base
+case class Dummy237(x: Int) extends Base
+case class Dummy238(x: Int) extends Base
+case class Dummy239(x: Int) extends Base
+case class Dummy240(x: Int) extends Base
+case class Dummy241(x: Int) extends Base
+case class Dummy242(x: Int) extends Base
+case class Dummy243(x: Int) extends Base
+case class Dummy244(x: Int) extends Base
+case class Dummy245(x: Int) extends Base
+case class Dummy246(x: Int) extends Base
+case class Dummy247(x: Int) extends Base
+case class Dummy248(x: Int) extends Base
+case class Dummy249(x: Int) extends Base
+case class Dummy250(x: Int) extends Base
+case class Dummy251(x: Int) extends Base
+case class Dummy252(x: Int) extends Base
+case class Dummy253(x: Int) extends Base
+case class Dummy254(x: Int) extends Base
+case class Dummy255(x: Int) extends Base
+case class Dummy256(x: Int) extends Base
+case class Dummy257(x: Int) extends Base
+case class Dummy258(x: Int) extends Base
+case class Dummy259(x: Int) extends Base
+case class Dummy260(x: Int) extends Base
+case class Dummy261(x: Int) extends Base
+case class Dummy262(x: Int) extends Base
+case class Dummy263(x: Int) extends Base
+case class Dummy264(x: Int) extends Base
+case class Dummy265(x: Int) extends Base
+case class Dummy266(x: Int) extends Base
+case class Dummy267(x: Int) extends Base
+case class Dummy268(x: Int) extends Base
+case class Dummy269(x: Int) extends Base
+case class Dummy270(x: Int) extends Base
+case class Dummy271(x: Int) extends Base
+case class Dummy272(x: Int) extends Base
+case class Dummy273(x: Int) extends Base
+case class Dummy274(x: Int) extends Base
+case class Dummy275(x: Int) extends Base
+case class Dummy276(x: Int) extends Base
+case class Dummy277(x: Int) extends Base
+case class Dummy278(x: Int) extends Base
+case class Dummy279(x: Int) extends Base
+case class Dummy280(x: Int) extends Base
+case class Dummy281(x: Int) extends Base
+case class Dummy282(x: Int) extends Base
+case class Dummy283(x: Int) extends Base
+case class Dummy284(x: Int) extends Base
+case class Dummy285(x: Int) extends Base
+case class Dummy286(x: Int) extends Base
+case class Dummy287(x: Int) extends Base
+case class Dummy288(x: Int) extends Base
+case class Dummy289(x: Int) extends Base
+case class Dummy290(x: Int) extends Base
+case class Dummy291(x: Int) extends Base
+case class Dummy292(x: Int) extends Base
+case class Dummy293(x: Int) extends Base
+case class Dummy294(x: Int) extends Base
+case class Dummy295(x: Int) extends Base
+case class Dummy296(x: Int) extends Base
+case class Dummy297(x: Int) extends Base
+case class Dummy298(x: Int) extends Base
+case class Dummy299(x: Int) extends Base
+case class Dummy300(x: Int) extends Base
+case class Dummy301(x: Int) extends Base
+case class Dummy302(x: Int) extends Base
+case class Dummy303(x: Int) extends Base
+case class Dummy304(x: Int) extends Base
+case class Dummy305(x: Int) extends Base
+case class Dummy306(x: Int) extends Base
+case class Dummy307(x: Int) extends Base
+case class Dummy308(x: Int) extends Base
+case class Dummy309(x: Int) extends Base
+case class Dummy310(x: Int) extends Base
+case class Dummy311(x: Int) extends Base
+case class Dummy312(x: Int) extends Base
+case class Dummy313(x: Int) extends Base
+case class Dummy314(x: Int) extends Base
+case class Dummy315(x: Int) extends Base
+case class Dummy316(x: Int) extends Base
+case class Dummy317(x: Int) extends Base
+case class Dummy318(x: Int) extends Base
+case class Dummy319(x: Int) extends Base
+case class Dummy320(x: Int) extends Base
+case class Dummy321(x: Int) extends Base
+case class Dummy322(x: Int) extends Base
+case class Dummy323(x: Int) extends Base
+case class Dummy324(x: Int) extends Base
+case class Dummy325(x: Int) extends Base
+case class Dummy326(x: Int) extends Base
+case class Dummy327(x: Int) extends Base
+case class Dummy328(x: Int) extends Base
+case class Dummy329(x: Int) extends Base
+case class Dummy330(x: Int) extends Base
+case class Dummy331(x: Int) extends Base
+case class Dummy332(x: Int) extends Base
+case class Dummy333(x: Int) extends Base
+case class Dummy334(x: Int) extends Base
+case class Dummy335(x: Int) extends Base
+case class Dummy336(x: Int) extends Base
+case class Dummy337(x: Int) extends Base
+case class Dummy338(x: Int) extends Base
+case class Dummy339(x: Int) extends Base
+case class Dummy340(x: Int) extends Base
+case class Dummy341(x: Int) extends Base
+case class Dummy342(x: Int) extends Base
+case class Dummy343(x: Int) extends Base
+case class Dummy344(x: Int) extends Base
+case class Dummy345(x: Int) extends Base
+case class Dummy346(x: Int) extends Base
+case class Dummy347(x: Int) extends Base
+case class Dummy348(x: Int) extends Base
+case class Dummy349(x: Int) extends Base
+case class Dummy350(x: Int) extends Base
+case class Dummy351(x: Int) extends Base
+case class Dummy352(x: Int) extends Base
+case class Dummy353(x: Int) extends Base
+case class Dummy354(x: Int) extends Base
+case class Dummy355(x: Int) extends Base
+case class Dummy356(x: Int) extends Base
+case class Dummy357(x: Int) extends Base
+case class Dummy358(x: Int) extends Base
+case class Dummy359(x: Int) extends Base
+case class Dummy360(x: Int) extends Base
+case class Dummy361(x: Int) extends Base
+case class Dummy362(x: Int) extends Base
+case class Dummy363(x: Int) extends Base
+case class Dummy364(x: Int) extends Base
+case class Dummy365(x: Int) extends Base
+case class Dummy366(x: Int) extends Base
+case class Dummy367(x: Int) extends Base
+case class Dummy368(x: Int) extends Base
+case class Dummy369(x: Int) extends Base
+case class Dummy370(x: Int) extends Base
+case class Dummy371(x: Int) extends Base
+case class Dummy372(x: Int) extends Base
+case class Dummy373(x: Int) extends Base
+case class Dummy374(x: Int) extends Base
+case class Dummy375(x: Int) extends Base
+case class Dummy376(x: Int) extends Base
+case class Dummy377(x: Int) extends Base
+case class Dummy378(x: Int) extends Base
+case class Dummy379(x: Int) extends Base
+case class Dummy380(x: Int) extends Base
+case class Dummy381(x: Int) extends Base
+case class Dummy382(x: Int) extends Base
+case class Dummy383(x: Int) extends Base
+case class Dummy384(x: Int) extends Base
+case class Dummy385(x: Int) extends Base
+case class Dummy386(x: Int) extends Base
+case class Dummy387(x: Int) extends Base
+case class Dummy388(x: Int) extends Base
+case class Dummy389(x: Int) extends Base
+case class Dummy390(x: Int) extends Base
+case class Dummy391(x: Int) extends Base
+case class Dummy392(x: Int) extends Base
+case class Dummy393(x: Int) extends Base
+case class Dummy394(x: Int) extends Base
+case class Dummy395(x: Int) extends Base
+case class Dummy396(x: Int) extends Base
+case class Dummy397(x: Int) extends Base
+case class Dummy398(x: Int) extends Base
+case class Dummy399(x: Int) extends Base
+case class Dummy400(x: Int) extends Base
+case class Dummy401(x: Int) extends Base
+case class Dummy402(x: Int) extends Base
+case class Dummy403(x: Int) extends Base
+case class Dummy404(x: Int) extends Base
+case class Dummy405(x: Int) extends Base
+case class Dummy406(x: Int) extends Base
+case class Dummy407(x: Int) extends Base
+case class Dummy408(x: Int) extends Base
+case class Dummy409(x: Int) extends Base
+case class Dummy410(x: Int) extends Base
+case class Dummy411(x: Int) extends Base
+case class Dummy412(x: Int) extends Base
+case class Dummy413(x: Int) extends Base
+case class Dummy414(x: Int) extends Base
+case class Dummy415(x: Int) extends Base
+case class Dummy416(x: Int) extends Base
+case class Dummy417(x: Int) extends Base
+case class Dummy418(x: Int) extends Base
+case class Dummy419(x: Int) extends Base
+case class Dummy420(x: Int) extends Base
+case class Dummy421(x: Int) extends Base
+case class Dummy422(x: Int) extends Base
+case class Dummy423(x: Int) extends Base
+case class Dummy424(x: Int) extends Base
+case class Dummy425(x: Int) extends Base
+case class Dummy426(x: Int) extends Base
+case class Dummy427(x: Int) extends Base
+case class Dummy428(x: Int) extends Base
+case class Dummy429(x: Int) extends Base
+case class Dummy430(x: Int) extends Base
+case class Dummy431(x: Int) extends Base
+case class Dummy432(x: Int) extends Base
+case class Dummy433(x: Int) extends Base
+case class Dummy434(x: Int) extends Base
+case class Dummy435(x: Int) extends Base
+case class Dummy436(x: Int) extends Base
+case class Dummy437(x: Int) extends Base
+case class Dummy438(x: Int) extends Base
+case class Dummy439(x: Int) extends Base
+case class Dummy440(x: Int) extends Base
+case class Dummy441(x: Int) extends Base
+case class Dummy442(x: Int) extends Base
+case class Dummy443(x: Int) extends Base
+case class Dummy444(x: Int) extends Base
+case class Dummy445(x: Int) extends Base
+case class Dummy446(x: Int) extends Base
+case class Dummy447(x: Int) extends Base
+case class Dummy448(x: Int) extends Base
+case class Dummy449(x: Int) extends Base
+case class Dummy450(x: Int) extends Base
+case class Dummy451(x: Int) extends Base
+case class Dummy452(x: Int) extends Base
+case class Dummy453(x: Int) extends Base
+case class Dummy454(x: Int) extends Base
+case class Dummy455(x: Int) extends Base
+case class Dummy456(x: Int) extends Base
+case class Dummy457(x: Int) extends Base
+case class Dummy458(x: Int) extends Base
+case class Dummy459(x: Int) extends Base
+case class Dummy460(x: Int) extends Base
+case class Dummy461(x: Int) extends Base
+case class Dummy462(x: Int) extends Base
+case class Dummy463(x: Int) extends Base
+case class Dummy464(x: Int) extends Base
+case class Dummy465(x: Int) extends Base
+case class Dummy466(x: Int) extends Base
+case class Dummy467(x: Int) extends Base
+case class Dummy468(x: Int) extends Base
+case class Dummy469(x: Int) extends Base
+case class Dummy470(x: Int) extends Base
+case class Dummy471(x: Int) extends Base
+case class Dummy472(x: Int) extends Base
+case class Dummy473(x: Int) extends Base
+case class Dummy474(x: Int) extends Base
+case class Dummy475(x: Int) extends Base
+case class Dummy476(x: Int) extends Base
+case class Dummy477(x: Int) extends Base
+case class Dummy478(x: Int) extends Base
+case class Dummy479(x: Int) extends Base
+case class Dummy480(x: Int) extends Base
+case class Dummy481(x: Int) extends Base
+case class Dummy482(x: Int) extends Base
+case class Dummy483(x: Int) extends Base
+case class Dummy484(x: Int) extends Base
+case class Dummy485(x: Int) extends Base
+case class Dummy486(x: Int) extends Base
+case class Dummy487(x: Int) extends Base
+case class Dummy488(x: Int) extends Base
+case class Dummy489(x: Int) extends Base
+case class Dummy490(x: Int) extends Base
+case class Dummy491(x: Int) extends Base
+case class Dummy492(x: Int) extends Base
+case class Dummy493(x: Int) extends Base
+case class Dummy494(x: Int) extends Base
+case class Dummy495(x: Int) extends Base
+case class Dummy496(x: Int) extends Base
+case class Dummy497(x: Int) extends Base
+case class Dummy498(x: Int) extends Base
+case class Dummy499(x: Int) extends Base
+case class Dummy500(x: Int) extends Base
+case class Dummy501(x: Int) extends Base
+case class Dummy502(x: Int) extends Base
+case class Dummy503(x: Int) extends Base
+case class Dummy504(x: Int) extends Base
+case class Dummy505(x: Int) extends Base
+case class Dummy506(x: Int) extends Base
+case class Dummy507(x: Int) extends Base
+case class Dummy508(x: Int) extends Base
+case class Dummy509(x: Int) extends Base
+case class Dummy510(x: Int) extends Base
+case class Dummy511(x: Int) extends Base
+case class Dummy512(x: Int) extends Base
+case class Dummy513(x: Int) extends Base
+case class Dummy514(x: Int) extends Base
+case class Dummy515(x: Int) extends Base
+case class Dummy516(x: Int) extends Base
+case class Dummy517(x: Int) extends Base
+case class Dummy518(x: Int) extends Base
+case class Dummy519(x: Int) extends Base
+case class Dummy520(x: Int) extends Base
+case class Dummy521(x: Int) extends Base
+case class Dummy522(x: Int) extends Base
+case class Dummy523(x: Int) extends Base
+case class Dummy524(x: Int) extends Base
+case class Dummy525(x: Int) extends Base
+case class Dummy526(x: Int) extends Base
+case class Dummy527(x: Int) extends Base
+case class Dummy528(x: Int) extends Base
+case class Dummy529(x: Int) extends Base
+case class Dummy530(x: Int) extends Base
+case class Dummy531(x: Int) extends Base
+case class Dummy532(x: Int) extends Base
+case class Dummy533(x: Int) extends Base
+case class Dummy534(x: Int) extends Base
+case class Dummy535(x: Int) extends Base
+case class Dummy536(x: Int) extends Base
+case class Dummy537(x: Int) extends Base
+case class Dummy538(x: Int) extends Base
+case class Dummy539(x: Int) extends Base
+case class Dummy540(x: Int) extends Base
+case class Dummy541(x: Int) extends Base
+case class Dummy542(x: Int) extends Base
+case class Dummy543(x: Int) extends Base
+case class Dummy544(x: Int) extends Base
+case class Dummy545(x: Int) extends Base
+case class Dummy546(x: Int) extends Base
+case class Dummy547(x: Int) extends Base
+case class Dummy548(x: Int) extends Base
+case class Dummy549(x: Int) extends Base
+case class Dummy550(x: Int) extends Base
+case class Dummy551(x: Int) extends Base
+case class Dummy552(x: Int) extends Base
+case class Dummy553(x: Int) extends Base
+case class Dummy554(x: Int) extends Base
+case class Dummy555(x: Int) extends Base
+case class Dummy556(x: Int) extends Base
+case class Dummy557(x: Int) extends Base
+case class Dummy558(x: Int) extends Base
+case class Dummy559(x: Int) extends Base
+case class Dummy560(x: Int) extends Base
+case class Dummy561(x: Int) extends Base
+case class Dummy562(x: Int) extends Base
+case class Dummy563(x: Int) extends Base
+case class Dummy564(x: Int) extends Base
+case class Dummy565(x: Int) extends Base
+case class Dummy566(x: Int) extends Base
+case class Dummy567(x: Int) extends Base
+case class Dummy568(x: Int) extends Base
+case class Dummy569(x: Int) extends Base
+case class Dummy570(x: Int) extends Base
+case class Dummy571(x: Int) extends Base
+case class Dummy572(x: Int) extends Base
+case class Dummy573(x: Int) extends Base
+case class Dummy574(x: Int) extends Base
+case class Dummy575(x: Int) extends Base
+case class Dummy576(x: Int) extends Base
+case class Dummy577(x: Int) extends Base
+case class Dummy578(x: Int) extends Base
+case class Dummy579(x: Int) extends Base
+case class Dummy580(x: Int) extends Base
+case class Dummy581(x: Int) extends Base
+case class Dummy582(x: Int) extends Base
+case class Dummy583(x: Int) extends Base
+case class Dummy584(x: Int) extends Base
+case class Dummy585(x: Int) extends Base
+case class Dummy586(x: Int) extends Base
+case class Dummy587(x: Int) extends Base
+case class Dummy588(x: Int) extends Base
+case class Dummy589(x: Int) extends Base
+case class Dummy590(x: Int) extends Base
+case class Dummy591(x: Int) extends Base
+case class Dummy592(x: Int) extends Base
+case class Dummy593(x: Int) extends Base
+case class Dummy594(x: Int) extends Base
+case class Dummy595(x: Int) extends Base
+case class Dummy596(x: Int) extends Base
+case class Dummy597(x: Int) extends Base
+case class Dummy598(x: Int) extends Base
+case class Dummy599(x: Int) extends Base
+case class Dummy600(x: Int) extends Base
+case class Dummy601(x: Int) extends Base
+case class Dummy602(x: Int) extends Base
+case class Dummy603(x: Int) extends Base
+case class Dummy604(x: Int) extends Base
+case class Dummy605(x: Int) extends Base
+case class Dummy606(x: Int) extends Base
+case class Dummy607(x: Int) extends Base
+case class Dummy608(x: Int) extends Base
+case class Dummy609(x: Int) extends Base
+case class Dummy610(x: Int) extends Base
+case class Dummy611(x: Int) extends Base
+case class Dummy612(x: Int) extends Base
+case class Dummy613(x: Int) extends Base
+case class Dummy614(x: Int) extends Base
+case class Dummy615(x: Int) extends Base
+case class Dummy616(x: Int) extends Base
+case class Dummy617(x: Int) extends Base
+case class Dummy618(x: Int) extends Base
+case class Dummy619(x: Int) extends Base
+case class Dummy620(x: Int) extends Base
+case class Dummy621(x: Int) extends Base
+case class Dummy622(x: Int) extends Base
+case class Dummy623(x: Int) extends Base
+case class Dummy624(x: Int) extends Base
+case class Dummy625(x: Int) extends Base
+case class Dummy626(x: Int) extends Base
+case class Dummy627(x: Int) extends Base
+case class Dummy628(x: Int) extends Base
+case class Dummy629(x: Int) extends Base
+case class Dummy630(x: Int) extends Base
+case class Dummy631(x: Int) extends Base
+case class Dummy632(x: Int) extends Base
+case class Dummy633(x: Int) extends Base
+case class Dummy634(x: Int) extends Base
+case class Dummy635(x: Int) extends Base
+case class Dummy636(x: Int) extends Base
+case class Dummy637(x: Int) extends Base
+case class Dummy638(x: Int) extends Base
+case class Dummy639(x: Int) extends Base
+case class Dummy640(x: Int) extends Base
+case class Dummy641(x: Int) extends Base
+case class Dummy642(x: Int) extends Base
+case class Dummy643(x: Int) extends Base
+case class Dummy644(x: Int) extends Base
+case class Dummy645(x: Int) extends Base
+case class Dummy646(x: Int) extends Base
+case class Dummy647(x: Int) extends Base
+case class Dummy648(x: Int) extends Base
+case class Dummy649(x: Int) extends Base
+case class Dummy650(x: Int) extends Base
+case class Dummy651(x: Int) extends Base
+case class Dummy652(x: Int) extends Base
+case class Dummy653(x: Int) extends Base
+case class Dummy654(x: Int) extends Base
+case class Dummy655(x: Int) extends Base
+case class Dummy656(x: Int) extends Base
+case class Dummy657(x: Int) extends Base
+case class Dummy658(x: Int) extends Base
+case class Dummy659(x: Int) extends Base
+case class Dummy660(x: Int) extends Base
+case class Dummy661(x: Int) extends Base
+case class Dummy662(x: Int) extends Base
+case class Dummy663(x: Int) extends Base
+case class Dummy664(x: Int) extends Base
+case class Dummy665(x: Int) extends Base
+case class Dummy666(x: Int) extends Base
+case class Dummy667(x: Int) extends Base
+case class Dummy668(x: Int) extends Base
+case class Dummy669(x: Int) extends Base
+case class Dummy670(x: Int) extends Base
+case class Dummy671(x: Int) extends Base
+case class Dummy672(x: Int) extends Base
+case class Dummy673(x: Int) extends Base
+case class Dummy674(x: Int) extends Base
+case class Dummy675(x: Int) extends Base
+case class Dummy676(x: Int) extends Base
+case class Dummy677(x: Int) extends Base
+case class Dummy678(x: Int) extends Base
+case class Dummy679(x: Int) extends Base
+case class Dummy680(x: Int) extends Base
+case class Dummy681(x: Int) extends Base
+case class Dummy682(x: Int) extends Base
+case class Dummy683(x: Int) extends Base
+case class Dummy684(x: Int) extends Base
+case class Dummy685(x: Int) extends Base
+case class Dummy686(x: Int) extends Base
+case class Dummy687(x: Int) extends Base
+case class Dummy688(x: Int) extends Base
+case class Dummy689(x: Int) extends Base
+case class Dummy690(x: Int) extends Base
+case class Dummy691(x: Int) extends Base
+case class Dummy692(x: Int) extends Base
+case class Dummy693(x: Int) extends Base
+case class Dummy694(x: Int) extends Base
+case class Dummy695(x: Int) extends Base
+case class Dummy696(x: Int) extends Base
+case class Dummy697(x: Int) extends Base
+case class Dummy698(x: Int) extends Base
+case class Dummy699(x: Int) extends Base
+case class Dummy700(x: Int) extends Base
+case class Dummy701(x: Int) extends Base
+case class Dummy702(x: Int) extends Base
+case class Dummy703(x: Int) extends Base
+case class Dummy704(x: Int) extends Base
+case class Dummy705(x: Int) extends Base
+case class Dummy706(x: Int) extends Base
+case class Dummy707(x: Int) extends Base
+case class Dummy708(x: Int) extends Base
+case class Dummy709(x: Int) extends Base
+case class Dummy710(x: Int) extends Base
+case class Dummy711(x: Int) extends Base
+case class Dummy712(x: Int) extends Base
+case class Dummy713(x: Int) extends Base
+case class Dummy714(x: Int) extends Base
+case class Dummy715(x: Int) extends Base
+case class Dummy716(x: Int) extends Base
+case class Dummy717(x: Int) extends Base
+case class Dummy718(x: Int) extends Base
+case class Dummy719(x: Int) extends Base
+case class Dummy720(x: Int) extends Base
+case class Dummy721(x: Int) extends Base
+case class Dummy722(x: Int) extends Base
+case class Dummy723(x: Int) extends Base
+case class Dummy724(x: Int) extends Base
+case class Dummy725(x: Int) extends Base
+case class Dummy726(x: Int) extends Base
+case class Dummy727(x: Int) extends Base
+case class Dummy728(x: Int) extends Base
+case class Dummy729(x: Int) extends Base
+case class Dummy730(x: Int) extends Base
+case class Dummy731(x: Int) extends Base
+case class Dummy732(x: Int) extends Base
+case class Dummy733(x: Int) extends Base
+case class Dummy734(x: Int) extends Base
+case class Dummy735(x: Int) extends Base
+case class Dummy736(x: Int) extends Base
+case class Dummy737(x: Int) extends Base
+case class Dummy738(x: Int) extends Base
+case class Dummy739(x: Int) extends Base
+case class Dummy740(x: Int) extends Base
+case class Dummy741(x: Int) extends Base
+case class Dummy742(x: Int) extends Base
+case class Dummy743(x: Int) extends Base
+case class Dummy744(x: Int) extends Base
+case class Dummy745(x: Int) extends Base
+case class Dummy746(x: Int) extends Base
+case class Dummy747(x: Int) extends Base
+case class Dummy748(x: Int) extends Base
+case class Dummy749(x: Int) extends Base
+case class Dummy750(x: Int) extends Base
+case class Dummy751(x: Int) extends Base
+case class Dummy752(x: Int) extends Base
+case class Dummy753(x: Int) extends Base
+case class Dummy754(x: Int) extends Base
+case class Dummy755(x: Int) extends Base
+case class Dummy756(x: Int) extends Base
+case class Dummy757(x: Int) extends Base
+case class Dummy758(x: Int) extends Base
+case class Dummy759(x: Int) extends Base
+case class Dummy760(x: Int) extends Base
+case class Dummy761(x: Int) extends Base
+case class Dummy762(x: Int) extends Base
+case class Dummy763(x: Int) extends Base
+case class Dummy764(x: Int) extends Base
+case class Dummy765(x: Int) extends Base
+case class Dummy766(x: Int) extends Base
+case class Dummy767(x: Int) extends Base
+case class Dummy768(x: Int) extends Base
+case class Dummy769(x: Int) extends Base
+case class Dummy770(x: Int) extends Base
+case class Dummy771(x: Int) extends Base
+case class Dummy772(x: Int) extends Base
+case class Dummy773(x: Int) extends Base
+case class Dummy774(x: Int) extends Base
+case class Dummy775(x: Int) extends Base
+case class Dummy776(x: Int) extends Base
+case class Dummy777(x: Int) extends Base
+case class Dummy778(x: Int) extends Base
+case class Dummy779(x: Int) extends Base
+case class Dummy780(x: Int) extends Base
+case class Dummy781(x: Int) extends Base
+case class Dummy782(x: Int) extends Base
+case class Dummy783(x: Int) extends Base
+case class Dummy784(x: Int) extends Base
+case class Dummy785(x: Int) extends Base
+case class Dummy786(x: Int) extends Base
+case class Dummy787(x: Int) extends Base
+case class Dummy788(x: Int) extends Base
+case class Dummy789(x: Int) extends Base
+case class Dummy790(x: Int) extends Base
+case class Dummy791(x: Int) extends Base
+case class Dummy792(x: Int) extends Base
+case class Dummy793(x: Int) extends Base
+case class Dummy794(x: Int) extends Base
+case class Dummy795(x: Int) extends Base
+case class Dummy796(x: Int) extends Base
+case class Dummy797(x: Int) extends Base
+case class Dummy798(x: Int) extends Base
+case class Dummy799(x: Int) extends Base
+case class Dummy800(x: Int) extends Base
+case class Dummy801(x: Int) extends Base
+case class Dummy802(x: Int) extends Base
+case class Dummy803(x: Int) extends Base
+case class Dummy804(x: Int) extends Base
+case class Dummy805(x: Int) extends Base
+case class Dummy806(x: Int) extends Base
+case class Dummy807(x: Int) extends Base
+case class Dummy808(x: Int) extends Base
+case class Dummy809(x: Int) extends Base
+case class Dummy810(x: Int) extends Base
+case class Dummy811(x: Int) extends Base
+case class Dummy812(x: Int) extends Base
+case class Dummy813(x: Int) extends Base
+case class Dummy814(x: Int) extends Base
+case class Dummy815(x: Int) extends Base
+case class Dummy816(x: Int) extends Base
+case class Dummy817(x: Int) extends Base
+case class Dummy818(x: Int) extends Base
+case class Dummy819(x: Int) extends Base
+case class Dummy820(x: Int) extends Base
+case class Dummy821(x: Int) extends Base
+case class Dummy822(x: Int) extends Base
+case class Dummy823(x: Int) extends Base
+case class Dummy824(x: Int) extends Base
+case class Dummy825(x: Int) extends Base
+case class Dummy826(x: Int) extends Base
+case class Dummy827(x: Int) extends Base
+case class Dummy828(x: Int) extends Base
+case class Dummy829(x: Int) extends Base
+case class Dummy830(x: Int) extends Base
+case class Dummy831(x: Int) extends Base
+case class Dummy832(x: Int) extends Base
+case class Dummy833(x: Int) extends Base
+case class Dummy834(x: Int) extends Base
+case class Dummy835(x: Int) extends Base
+case class Dummy836(x: Int) extends Base
+case class Dummy837(x: Int) extends Base
+case class Dummy838(x: Int) extends Base
+case class Dummy839(x: Int) extends Base
+case class Dummy840(x: Int) extends Base
+case class Dummy841(x: Int) extends Base
+case class Dummy842(x: Int) extends Base
+case class Dummy843(x: Int) extends Base
+case class Dummy844(x: Int) extends Base
+case class Dummy845(x: Int) extends Base
+case class Dummy846(x: Int) extends Base
+case class Dummy847(x: Int) extends Base
+case class Dummy848(x: Int) extends Base
+case class Dummy849(x: Int) extends Base
+case class Dummy850(x: Int) extends Base
+case class Dummy851(x: Int) extends Base
+case class Dummy852(x: Int) extends Base
+case class Dummy853(x: Int) extends Base
+case class Dummy854(x: Int) extends Base
+case class Dummy855(x: Int) extends Base
+case class Dummy856(x: Int) extends Base
+case class Dummy857(x: Int) extends Base
+case class Dummy858(x: Int) extends Base
+case class Dummy859(x: Int) extends Base
+case class Dummy860(x: Int) extends Base
+case class Dummy861(x: Int) extends Base
+case class Dummy862(x: Int) extends Base
+case class Dummy863(x: Int) extends Base
+case class Dummy864(x: Int) extends Base
+case class Dummy865(x: Int) extends Base
+case class Dummy866(x: Int) extends Base
+case class Dummy867(x: Int) extends Base
+case class Dummy868(x: Int) extends Base
+case class Dummy869(x: Int) extends Base
+case class Dummy870(x: Int) extends Base
+case class Dummy871(x: Int) extends Base
+case class Dummy872(x: Int) extends Base
+case class Dummy873(x: Int) extends Base
+case class Dummy874(x: Int) extends Base
+case class Dummy875(x: Int) extends Base
+case class Dummy876(x: Int) extends Base
+case class Dummy877(x: Int) extends Base
+case class Dummy878(x: Int) extends Base
+case class Dummy879(x: Int) extends Base
+case class Dummy880(x: Int) extends Base
+case class Dummy881(x: Int) extends Base
+case class Dummy882(x: Int) extends Base
+case class Dummy883(x: Int) extends Base
+case class Dummy884(x: Int) extends Base
+case class Dummy885(x: Int) extends Base
+case class Dummy886(x: Int) extends Base
+case class Dummy887(x: Int) extends Base
+case class Dummy888(x: Int) extends Base
+case class Dummy889(x: Int) extends Base
+case class Dummy890(x: Int) extends Base
+case class Dummy891(x: Int) extends Base
+case class Dummy892(x: Int) extends Base
+case class Dummy893(x: Int) extends Base
+case class Dummy894(x: Int) extends Base
+case class Dummy895(x: Int) extends Base
+case class Dummy896(x: Int) extends Base
+case class Dummy897(x: Int) extends Base
+case class Dummy898(x: Int) extends Base
+case class Dummy899(x: Int) extends Base
+case class Dummy900(x: Int) extends Base
+case class Dummy901(x: Int) extends Base
+case class Dummy902(x: Int) extends Base
+case class Dummy903(x: Int) extends Base
+case class Dummy904(x: Int) extends Base
+case class Dummy905(x: Int) extends Base
+case class Dummy906(x: Int) extends Base
+case class Dummy907(x: Int) extends Base
+case class Dummy908(x: Int) extends Base
+case class Dummy909(x: Int) extends Base
+case class Dummy910(x: Int) extends Base
+case class Dummy911(x: Int) extends Base
+case class Dummy912(x: Int) extends Base
+case class Dummy913(x: Int) extends Base
+case class Dummy914(x: Int) extends Base
+case class Dummy915(x: Int) extends Base
+case class Dummy916(x: Int) extends Base
+case class Dummy917(x: Int) extends Base
+case class Dummy918(x: Int) extends Base
+case class Dummy919(x: Int) extends Base
+case class Dummy920(x: Int) extends Base
+case class Dummy921(x: Int) extends Base
+case class Dummy922(x: Int) extends Base
+case class Dummy923(x: Int) extends Base
+case class Dummy924(x: Int) extends Base
+case class Dummy925(x: Int) extends Base
+case class Dummy926(x: Int) extends Base
+case class Dummy927(x: Int) extends Base
+case class Dummy928(x: Int) extends Base
+case class Dummy929(x: Int) extends Base
+case class Dummy930(x: Int) extends Base
+case class Dummy931(x: Int) extends Base
+case class Dummy932(x: Int) extends Base
+case class Dummy933(x: Int) extends Base
+case class Dummy934(x: Int) extends Base
+case class Dummy935(x: Int) extends Base
+case class Dummy936(x: Int) extends Base
+case class Dummy937(x: Int) extends Base
+case class Dummy938(x: Int) extends Base
+case class Dummy939(x: Int) extends Base
+case class Dummy940(x: Int) extends Base
+case class Dummy941(x: Int) extends Base
+case class Dummy942(x: Int) extends Base
+case class Dummy943(x: Int) extends Base
+case class Dummy944(x: Int) extends Base
+case class Dummy945(x: Int) extends Base
+case class Dummy946(x: Int) extends Base
+case class Dummy947(x: Int) extends Base
+case class Dummy948(x: Int) extends Base
+case class Dummy949(x: Int) extends Base
+case class Dummy950(x: Int) extends Base
+case class Dummy951(x: Int) extends Base
+case class Dummy952(x: Int) extends Base
+case class Dummy953(x: Int) extends Base
+case class Dummy954(x: Int) extends Base
+case class Dummy955(x: Int) extends Base
+case class Dummy956(x: Int) extends Base
+case class Dummy957(x: Int) extends Base
+case class Dummy958(x: Int) extends Base
+case class Dummy959(x: Int) extends Base
+case class Dummy960(x: Int) extends Base
+case class Dummy961(x: Int) extends Base
+case class Dummy962(x: Int) extends Base
+case class Dummy963(x: Int) extends Base
+case class Dummy964(x: Int) extends Base
+case class Dummy965(x: Int) extends Base
+case class Dummy966(x: Int) extends Base
+case class Dummy967(x: Int) extends Base
+case class Dummy968(x: Int) extends Base
+case class Dummy969(x: Int) extends Base
+case class Dummy970(x: Int) extends Base
+case class Dummy971(x: Int) extends Base
+case class Dummy972(x: Int) extends Base
+case class Dummy973(x: Int) extends Base
+case class Dummy974(x: Int) extends Base
+case class Dummy975(x: Int) extends Base
+case class Dummy976(x: Int) extends Base
+case class Dummy977(x: Int) extends Base
+case class Dummy978(x: Int) extends Base
+case class Dummy979(x: Int) extends Base
+case class Dummy980(x: Int) extends Base
+case class Dummy981(x: Int) extends Base
+case class Dummy982(x: Int) extends Base
+case class Dummy983(x: Int) extends Base
+case class Dummy984(x: Int) extends Base
+case class Dummy985(x: Int) extends Base
+case class Dummy986(x: Int) extends Base
+case class Dummy987(x: Int) extends Base
+case class Dummy988(x: Int) extends Base
+case class Dummy989(x: Int) extends Base
+case class Dummy990(x: Int) extends Base
+case class Dummy991(x: Int) extends Base
+case class Dummy992(x: Int) extends Base
+case class Dummy993(x: Int) extends Base
+case class Dummy994(x: Int) extends Base
+case class Dummy995(x: Int) extends Base
+case class Dummy996(x: Int) extends Base
+case class Dummy997(x: Int) extends Base
+case class Dummy998(x: Int) extends Base
+case class Dummy999(x: Int) extends Base
+case class Dummy1000(x: Int) extends Base
+case class Dummy1001(x: Int) extends Base
+case class Dummy1002(x: Int) extends Base
+case class Dummy1003(x: Int) extends Base
+case class Dummy1004(x: Int) extends Base
+case class Dummy1005(x: Int) extends Base
+case class Dummy1006(x: Int) extends Base
+case class Dummy1007(x: Int) extends Base
+case class Dummy1008(x: Int) extends Base
+case class Dummy1009(x: Int) extends Base
+case class Dummy1010(x: Int) extends Base
+case class Dummy1011(x: Int) extends Base
+case class Dummy1012(x: Int) extends Base
+case class Dummy1013(x: Int) extends Base
+case class Dummy1014(x: Int) extends Base
+case class Dummy1015(x: Int) extends Base
+case class Dummy1016(x: Int) extends Base
+case class Dummy1017(x: Int) extends Base
+case class Dummy1018(x: Int) extends Base
+case class Dummy1019(x: Int) extends Base
+case class Dummy1020(x: Int) extends Base
+case class Dummy1021(x: Int) extends Base
+case class Dummy1022(x: Int) extends Base
+case class Dummy1023(x: Int) extends Base
+case class Dummy1024(x: Int) extends Base
+case class Dummy1025(x: Int) extends Base
+case class Dummy1026(x: Int) extends Base
+case class Dummy1027(x: Int) extends Base
+case class Dummy1028(x: Int) extends Base
+case class Dummy1029(x: Int) extends Base
+case class Dummy1030(x: Int) extends Base
+case class Dummy1031(x: Int) extends Base
+case class Dummy1032(x: Int) extends Base
+case class Dummy1033(x: Int) extends Base
+case class Dummy1034(x: Int) extends Base
+case class Dummy1035(x: Int) extends Base
+case class Dummy1036(x: Int) extends Base
+case class Dummy1037(x: Int) extends Base
+case class Dummy1038(x: Int) extends Base
+case class Dummy1039(x: Int) extends Base
+case class Dummy1040(x: Int) extends Base
+case class Dummy1041(x: Int) extends Base
+case class Dummy1042(x: Int) extends Base
+case class Dummy1043(x: Int) extends Base
+case class Dummy1044(x: Int) extends Base
+case class Dummy1045(x: Int) extends Base
+case class Dummy1046(x: Int) extends Base
+case class Dummy1047(x: Int) extends Base
+case class Dummy1048(x: Int) extends Base
+case class Dummy1049(x: Int) extends Base
+case class Dummy1050(x: Int) extends Base
+case class Dummy1051(x: Int) extends Base
+case class Dummy1052(x: Int) extends Base
+case class Dummy1053(x: Int) extends Base
+case class Dummy1054(x: Int) extends Base
+case class Dummy1055(x: Int) extends Base
+case class Dummy1056(x: Int) extends Base
+case class Dummy1057(x: Int) extends Base
+case class Dummy1058(x: Int) extends Base
+case class Dummy1059(x: Int) extends Base
+case class Dummy1060(x: Int) extends Base
+case class Dummy1061(x: Int) extends Base
+case class Dummy1062(x: Int) extends Base
+case class Dummy1063(x: Int) extends Base
+case class Dummy1064(x: Int) extends Base
+case class Dummy1065(x: Int) extends Base
+case class Dummy1066(x: Int) extends Base
+case class Dummy1067(x: Int) extends Base
+case class Dummy1068(x: Int) extends Base
+case class Dummy1069(x: Int) extends Base
+case class Dummy1070(x: Int) extends Base
+case class Dummy1071(x: Int) extends Base
+case class Dummy1072(x: Int) extends Base
+case class Dummy1073(x: Int) extends Base
+case class Dummy1074(x: Int) extends Base
+case class Dummy1075(x: Int) extends Base
+case class Dummy1076(x: Int) extends Base
+case class Dummy1077(x: Int) extends Base
+case class Dummy1078(x: Int) extends Base
+case class Dummy1079(x: Int) extends Base
+case class Dummy1080(x: Int) extends Base
+case class Dummy1081(x: Int) extends Base
+case class Dummy1082(x: Int) extends Base
+case class Dummy1083(x: Int) extends Base
+case class Dummy1084(x: Int) extends Base
+case class Dummy1085(x: Int) extends Base
+case class Dummy1086(x: Int) extends Base
+case class Dummy1087(x: Int) extends Base
+case class Dummy1088(x: Int) extends Base
+case class Dummy1089(x: Int) extends Base
+case class Dummy1090(x: Int) extends Base
+case class Dummy1091(x: Int) extends Base
+case class Dummy1092(x: Int) extends Base
+case class Dummy1093(x: Int) extends Base
+case class Dummy1094(x: Int) extends Base
+case class Dummy1095(x: Int) extends Base
+case class Dummy1096(x: Int) extends Base
+case class Dummy1097(x: Int) extends Base
+case class Dummy1098(x: Int) extends Base
+case class Dummy1099(x: Int) extends Base
+case class Dummy1100(x: Int) extends Base
+case class Dummy1101(x: Int) extends Base
+case class Dummy1102(x: Int) extends Base
+case class Dummy1103(x: Int) extends Base
+case class Dummy1104(x: Int) extends Base
+case class Dummy1105(x: Int) extends Base
+case class Dummy1106(x: Int) extends Base
+case class Dummy1107(x: Int) extends Base
+case class Dummy1108(x: Int) extends Base
+case class Dummy1109(x: Int) extends Base
+case class Dummy1110(x: Int) extends Base
+case class Dummy1111(x: Int) extends Base
+case class Dummy1112(x: Int) extends Base
+case class Dummy1113(x: Int) extends Base
+case class Dummy1114(x: Int) extends Base
+case class Dummy1115(x: Int) extends Base
+case class Dummy1116(x: Int) extends Base
+case class Dummy1117(x: Int) extends Base
+case class Dummy1118(x: Int) extends Base
+case class Dummy1119(x: Int) extends Base
+case class Dummy1120(x: Int) extends Base
+case class Dummy1121(x: Int) extends Base
+case class Dummy1122(x: Int) extends Base
+case class Dummy1123(x: Int) extends Base
+case class Dummy1124(x: Int) extends Base
+case class Dummy1125(x: Int) extends Base
+case class Dummy1126(x: Int) extends Base
+case class Dummy1127(x: Int) extends Base
+case class Dummy1128(x: Int) extends Base
+case class Dummy1129(x: Int) extends Base
+case class Dummy1130(x: Int) extends Base
+case class Dummy1131(x: Int) extends Base
+case class Dummy1132(x: Int) extends Base
+case class Dummy1133(x: Int) extends Base
+case class Dummy1134(x: Int) extends Base
+case class Dummy1135(x: Int) extends Base
+case class Dummy1136(x: Int) extends Base
+case class Dummy1137(x: Int) extends Base
+case class Dummy1138(x: Int) extends Base
+case class Dummy1139(x: Int) extends Base
+case class Dummy1140(x: Int) extends Base
+case class Dummy1141(x: Int) extends Base
+case class Dummy1142(x: Int) extends Base
+case class Dummy1143(x: Int) extends Base
+case class Dummy1144(x: Int) extends Base
+case class Dummy1145(x: Int) extends Base
+case class Dummy1146(x: Int) extends Base
+case class Dummy1147(x: Int) extends Base
+case class Dummy1148(x: Int) extends Base
+case class Dummy1149(x: Int) extends Base
+case class Dummy1150(x: Int) extends Base
+case class Dummy1151(x: Int) extends Base
+case class Dummy1152(x: Int) extends Base
+case class Dummy1153(x: Int) extends Base
+case class Dummy1154(x: Int) extends Base
+case class Dummy1155(x: Int) extends Base
+case class Dummy1156(x: Int) extends Base
+case class Dummy1157(x: Int) extends Base
+case class Dummy1158(x: Int) extends Base
+case class Dummy1159(x: Int) extends Base
+case class Dummy1160(x: Int) extends Base
+case class Dummy1161(x: Int) extends Base
+case class Dummy1162(x: Int) extends Base
+case class Dummy1163(x: Int) extends Base
+case class Dummy1164(x: Int) extends Base
+case class Dummy1165(x: Int) extends Base
+case class Dummy1166(x: Int) extends Base
+case class Dummy1167(x: Int) extends Base
+case class Dummy1168(x: Int) extends Base
+case class Dummy1169(x: Int) extends Base
+case class Dummy1170(x: Int) extends Base
+case class Dummy1171(x: Int) extends Base
+case class Dummy1172(x: Int) extends Base
+case class Dummy1173(x: Int) extends Base
+case class Dummy1174(x: Int) extends Base
+case class Dummy1175(x: Int) extends Base
+case class Dummy1176(x: Int) extends Base
+case class Dummy1177(x: Int) extends Base
+case class Dummy1178(x: Int) extends Base
+case class Dummy1179(x: Int) extends Base
+case class Dummy1180(x: Int) extends Base
+case class Dummy1181(x: Int) extends Base
+case class Dummy1182(x: Int) extends Base
+case class Dummy1183(x: Int) extends Base
+case class Dummy1184(x: Int) extends Base
+case class Dummy1185(x: Int) extends Base
+case class Dummy1186(x: Int) extends Base
+case class Dummy1187(x: Int) extends Base
+case class Dummy1188(x: Int) extends Base
+case class Dummy1189(x: Int) extends Base
+case class Dummy1190(x: Int) extends Base
+case class Dummy1191(x: Int) extends Base
+case class Dummy1192(x: Int) extends Base
+case class Dummy1193(x: Int) extends Base
+case class Dummy1194(x: Int) extends Base
+case class Dummy1195(x: Int) extends Base
+case class Dummy1196(x: Int) extends Base
+case class Dummy1197(x: Int) extends Base
+case class Dummy1198(x: Int) extends Base
+case class Dummy1199(x: Int) extends Base
+case class Dummy1200(x: Int) extends Base
+case class Dummy1201(x: Int) extends Base
+case class Dummy1202(x: Int) extends Base
+case class Dummy1203(x: Int) extends Base
+case class Dummy1204(x: Int) extends Base
+case class Dummy1205(x: Int) extends Base
+case class Dummy1206(x: Int) extends Base
+case class Dummy1207(x: Int) extends Base
+case class Dummy1208(x: Int) extends Base
+case class Dummy1209(x: Int) extends Base
+case class Dummy1210(x: Int) extends Base
+case class Dummy1211(x: Int) extends Base
+case class Dummy1212(x: Int) extends Base
+case class Dummy1213(x: Int) extends Base
+case class Dummy1214(x: Int) extends Base
+case class Dummy1215(x: Int) extends Base
+case class Dummy1216(x: Int) extends Base
+case class Dummy1217(x: Int) extends Base
+case class Dummy1218(x: Int) extends Base
+case class Dummy1219(x: Int) extends Base
+case class Dummy1220(x: Int) extends Base
+case class Dummy1221(x: Int) extends Base
+case class Dummy1222(x: Int) extends Base
+case class Dummy1223(x: Int) extends Base
+case class Dummy1224(x: Int) extends Base
+case class Dummy1225(x: Int) extends Base
+case class Dummy1226(x: Int) extends Base
+case class Dummy1227(x: Int) extends Base
+case class Dummy1228(x: Int) extends Base
+case class Dummy1229(x: Int) extends Base
+case class Dummy1230(x: Int) extends Base
+case class Dummy1231(x: Int) extends Base
+case class Dummy1232(x: Int) extends Base
+case class Dummy1233(x: Int) extends Base
+case class Dummy1234(x: Int) extends Base
+case class Dummy1235(x: Int) extends Base
+case class Dummy1236(x: Int) extends Base
+case class Dummy1237(x: Int) extends Base
+case class Dummy1238(x: Int) extends Base
+case class Dummy1239(x: Int) extends Base
+case class Dummy1240(x: Int) extends Base
+case class Dummy1241(x: Int) extends Base
+case class Dummy1242(x: Int) extends Base
+case class Dummy1243(x: Int) extends Base
+case class Dummy1244(x: Int) extends Base
+case class Dummy1245(x: Int) extends Base
+case class Dummy1246(x: Int) extends Base
+case class Dummy1247(x: Int) extends Base
+case class Dummy1248(x: Int) extends Base
+case class Dummy1249(x: Int) extends Base
+case class Dummy1250(x: Int) extends Base
+case class Dummy1251(x: Int) extends Base
+case class Dummy1252(x: Int) extends Base
+case class Dummy1253(x: Int) extends Base
+case class Dummy1254(x: Int) extends Base
+case class Dummy1255(x: Int) extends Base
+case class Dummy1256(x: Int) extends Base
+case class Dummy1257(x: Int) extends Base
+case class Dummy1258(x: Int) extends Base
+case class Dummy1259(x: Int) extends Base
+case class Dummy1260(x: Int) extends Base
+case class Dummy1261(x: Int) extends Base
+case class Dummy1262(x: Int) extends Base
+case class Dummy1263(x: Int) extends Base
+case class Dummy1264(x: Int) extends Base
+case class Dummy1265(x: Int) extends Base
+case class Dummy1266(x: Int) extends Base
+case class Dummy1267(x: Int) extends Base
+case class Dummy1268(x: Int) extends Base
+case class Dummy1269(x: Int) extends Base
+case class Dummy1270(x: Int) extends Base
+case class Dummy1271(x: Int) extends Base
+case class Dummy1272(x: Int) extends Base
+case class Dummy1273(x: Int) extends Base
+case class Dummy1274(x: Int) extends Base
+case class Dummy1275(x: Int) extends Base
+case class Dummy1276(x: Int) extends Base
+case class Dummy1277(x: Int) extends Base
+case class Dummy1278(x: Int) extends Base
+case class Dummy1279(x: Int) extends Base
+case class Dummy1280(x: Int) extends Base
+case class Dummy1281(x: Int) extends Base
+case class Dummy1282(x: Int) extends Base
+case class Dummy1283(x: Int) extends Base
+case class Dummy1284(x: Int) extends Base
+case class Dummy1285(x: Int) extends Base
+case class Dummy1286(x: Int) extends Base
+case class Dummy1287(x: Int) extends Base
+case class Dummy1288(x: Int) extends Base
+case class Dummy1289(x: Int) extends Base
+case class Dummy1290(x: Int) extends Base
+case class Dummy1291(x: Int) extends Base
+case class Dummy1292(x: Int) extends Base
+case class Dummy1293(x: Int) extends Base
+case class Dummy1294(x: Int) extends Base
+case class Dummy1295(x: Int) extends Base
+case class Dummy1296(x: Int) extends Base
+case class Dummy1297(x: Int) extends Base
+case class Dummy1298(x: Int) extends Base
+case class Dummy1299(x: Int) extends Base
+case class Dummy1300(x: Int) extends Base
+case class Dummy1301(x: Int) extends Base
+case class Dummy1302(x: Int) extends Base
+case class Dummy1303(x: Int) extends Base
+case class Dummy1304(x: Int) extends Base
+case class Dummy1305(x: Int) extends Base
+case class Dummy1306(x: Int) extends Base
+case class Dummy1307(x: Int) extends Base
+case class Dummy1308(x: Int) extends Base
+case class Dummy1309(x: Int) extends Base
+case class Dummy1310(x: Int) extends Base
+case class Dummy1311(x: Int) extends Base
+case class Dummy1312(x: Int) extends Base
+case class Dummy1313(x: Int) extends Base
+case class Dummy1314(x: Int) extends Base
+case class Dummy1315(x: Int) extends Base
+case class Dummy1316(x: Int) extends Base
+case class Dummy1317(x: Int) extends Base
+case class Dummy1318(x: Int) extends Base
+case class Dummy1319(x: Int) extends Base
+case class Dummy1320(x: Int) extends Base
+case class Dummy1321(x: Int) extends Base
+case class Dummy1322(x: Int) extends Base
+case class Dummy1323(x: Int) extends Base
+case class Dummy1324(x: Int) extends Base
+case class Dummy1325(x: Int) extends Base
+case class Dummy1326(x: Int) extends Base
+case class Dummy1327(x: Int) extends Base
+case class Dummy1328(x: Int) extends Base
+case class Dummy1329(x: Int) extends Base
+case class Dummy1330(x: Int) extends Base
+case class Dummy1331(x: Int) extends Base
+case class Dummy1332(x: Int) extends Base
+case class Dummy1333(x: Int) extends Base
+case class Dummy1334(x: Int) extends Base
+case class Dummy1335(x: Int) extends Base
+case class Dummy1336(x: Int) extends Base
+case class Dummy1337(x: Int) extends Base
+case class Dummy1338(x: Int) extends Base
+case class Dummy1339(x: Int) extends Base
+case class Dummy1340(x: Int) extends Base
+case class Dummy1341(x: Int) extends Base
+case class Dummy1342(x: Int) extends Base
+case class Dummy1343(x: Int) extends Base
+case class Dummy1344(x: Int) extends Base
+case class Dummy1345(x: Int) extends Base
+case class Dummy1346(x: Int) extends Base
+case class Dummy1347(x: Int) extends Base
+case class Dummy1348(x: Int) extends Base
+case class Dummy1349(x: Int) extends Base
+case class Dummy1350(x: Int) extends Base
+case class Dummy1351(x: Int) extends Base
+case class Dummy1352(x: Int) extends Base
+case class Dummy1353(x: Int) extends Base
+case class Dummy1354(x: Int) extends Base
+case class Dummy1355(x: Int) extends Base
+case class Dummy1356(x: Int) extends Base
+case class Dummy1357(x: Int) extends Base
+case class Dummy1358(x: Int) extends Base
+case class Dummy1359(x: Int) extends Base
+case class Dummy1360(x: Int) extends Base
+case class Dummy1361(x: Int) extends Base
+case class Dummy1362(x: Int) extends Base
+case class Dummy1363(x: Int) extends Base
+case class Dummy1364(x: Int) extends Base
+case class Dummy1365(x: Int) extends Base
+case class Dummy1366(x: Int) extends Base
+case class Dummy1367(x: Int) extends Base
+case class Dummy1368(x: Int) extends Base
+case class Dummy1369(x: Int) extends Base
+case class Dummy1370(x: Int) extends Base
+case class Dummy1371(x: Int) extends Base
+case class Dummy1372(x: Int) extends Base
+case class Dummy1373(x: Int) extends Base
+case class Dummy1374(x: Int) extends Base
+case class Dummy1375(x: Int) extends Base
+case class Dummy1376(x: Int) extends Base
+case class Dummy1377(x: Int) extends Base
+case class Dummy1378(x: Int) extends Base
+case class Dummy1379(x: Int) extends Base
+case class Dummy1380(x: Int) extends Base
+case class Dummy1381(x: Int) extends Base
+case class Dummy1382(x: Int) extends Base
+case class Dummy1383(x: Int) extends Base
+case class Dummy1384(x: Int) extends Base
+case class Dummy1385(x: Int) extends Base
+case class Dummy1386(x: Int) extends Base
+case class Dummy1387(x: Int) extends Base
+case class Dummy1388(x: Int) extends Base
+case class Dummy1389(x: Int) extends Base
+case class Dummy1390(x: Int) extends Base
+case class Dummy1391(x: Int) extends Base
+case class Dummy1392(x: Int) extends Base
+case class Dummy1393(x: Int) extends Base
+case class Dummy1394(x: Int) extends Base
+case class Dummy1395(x: Int) extends Base
+case class Dummy1396(x: Int) extends Base
+case class Dummy1397(x: Int) extends Base
+case class Dummy1398(x: Int) extends Base
+case class Dummy1399(x: Int) extends Base
+case class Dummy1400(x: Int) extends Base
+case class Dummy1401(x: Int) extends Base
+case class Dummy1402(x: Int) extends Base
+case class Dummy1403(x: Int) extends Base
+case class Dummy1404(x: Int) extends Base
+case class Dummy1405(x: Int) extends Base
+case class Dummy1406(x: Int) extends Base
+case class Dummy1407(x: Int) extends Base
+case class Dummy1408(x: Int) extends Base
+case class Dummy1409(x: Int) extends Base
+case class Dummy1410(x: Int) extends Base
+case class Dummy1411(x: Int) extends Base
+case class Dummy1412(x: Int) extends Base
+case class Dummy1413(x: Int) extends Base
+case class Dummy1414(x: Int) extends Base
+case class Dummy1415(x: Int) extends Base
+case class Dummy1416(x: Int) extends Base
+case class Dummy1417(x: Int) extends Base
+case class Dummy1418(x: Int) extends Base
+case class Dummy1419(x: Int) extends Base
+case class Dummy1420(x: Int) extends Base
+case class Dummy1421(x: Int) extends Base
+case class Dummy1422(x: Int) extends Base
+case class Dummy1423(x: Int) extends Base
+case class Dummy1424(x: Int) extends Base
+case class Dummy1425(x: Int) extends Base
+case class Dummy1426(x: Int) extends Base
+case class Dummy1427(x: Int) extends Base
+case class Dummy1428(x: Int) extends Base
+case class Dummy1429(x: Int) extends Base
+case class Dummy1430(x: Int) extends Base
+case class Dummy1431(x: Int) extends Base
+case class Dummy1432(x: Int) extends Base
+case class Dummy1433(x: Int) extends Base
+case class Dummy1434(x: Int) extends Base
+case class Dummy1435(x: Int) extends Base
+case class Dummy1436(x: Int) extends Base
+case class Dummy1437(x: Int) extends Base
+case class Dummy1438(x: Int) extends Base
+case class Dummy1439(x: Int) extends Base
+case class Dummy1440(x: Int) extends Base
+case class Dummy1441(x: Int) extends Base
+case class Dummy1442(x: Int) extends Base
+case class Dummy1443(x: Int) extends Base
+case class Dummy1444(x: Int) extends Base
+case class Dummy1445(x: Int) extends Base
+case class Dummy1446(x: Int) extends Base
+case class Dummy1447(x: Int) extends Base
+case class Dummy1448(x: Int) extends Base
+case class Dummy1449(x: Int) extends Base
+case class Dummy1450(x: Int) extends Base
+case class Dummy1451(x: Int) extends Base
+case class Dummy1452(x: Int) extends Base
+case class Dummy1453(x: Int) extends Base
+case class Dummy1454(x: Int) extends Base
+case class Dummy1455(x: Int) extends Base
+case class Dummy1456(x: Int) extends Base
+case class Dummy1457(x: Int) extends Base
+case class Dummy1458(x: Int) extends Base
+case class Dummy1459(x: Int) extends Base
+case class Dummy1460(x: Int) extends Base
+case class Dummy1461(x: Int) extends Base
+case class Dummy1462(x: Int) extends Base
+case class Dummy1463(x: Int) extends Base
+case class Dummy1464(x: Int) extends Base
+case class Dummy1465(x: Int) extends Base
+case class Dummy1466(x: Int) extends Base
+case class Dummy1467(x: Int) extends Base
+case class Dummy1468(x: Int) extends Base
+case class Dummy1469(x: Int) extends Base
+case class Dummy1470(x: Int) extends Base
+case class Dummy1471(x: Int) extends Base
+case class Dummy1472(x: Int) extends Base
+case class Dummy1473(x: Int) extends Base
+case class Dummy1474(x: Int) extends Base
+case class Dummy1475(x: Int) extends Base
+case class Dummy1476(x: Int) extends Base
+case class Dummy1477(x: Int) extends Base
+case class Dummy1478(x: Int) extends Base
+case class Dummy1479(x: Int) extends Base
+case class Dummy1480(x: Int) extends Base
+case class Dummy1481(x: Int) extends Base
+case class Dummy1482(x: Int) extends Base
+case class Dummy1483(x: Int) extends Base
+case class Dummy1484(x: Int) extends Base
+case class Dummy1485(x: Int) extends Base
+case class Dummy1486(x: Int) extends Base
+case class Dummy1487(x: Int) extends Base
+case class Dummy1488(x: Int) extends Base
+case class Dummy1489(x: Int) extends Base
+case class Dummy1490(x: Int) extends Base
+case class Dummy1491(x: Int) extends Base
+case class Dummy1492(x: Int) extends Base
+case class Dummy1493(x: Int) extends Base
+case class Dummy1494(x: Int) extends Base
+case class Dummy1495(x: Int) extends Base
+case class Dummy1496(x: Int) extends Base
+case class Dummy1497(x: Int) extends Base
+case class Dummy1498(x: Int) extends Base
+case class Dummy1499(x: Int) extends Base
+case class Dummy1500(x: Int) extends Base
+case class Dummy1501(x: Int) extends Base
+case class Dummy1502(x: Int) extends Base
+case class Dummy1503(x: Int) extends Base
+case class Dummy1504(x: Int) extends Base
+case class Dummy1505(x: Int) extends Base
+case class Dummy1506(x: Int) extends Base
+case class Dummy1507(x: Int) extends Base
+case class Dummy1508(x: Int) extends Base
+case class Dummy1509(x: Int) extends Base
+case class Dummy1510(x: Int) extends Base
+case class Dummy1511(x: Int) extends Base
+case class Dummy1512(x: Int) extends Base
+case class Dummy1513(x: Int) extends Base
+case class Dummy1514(x: Int) extends Base
+case class Dummy1515(x: Int) extends Base
+case class Dummy1516(x: Int) extends Base
+case class Dummy1517(x: Int) extends Base
+case class Dummy1518(x: Int) extends Base
+case class Dummy1519(x: Int) extends Base
+case class Dummy1520(x: Int) extends Base
+case class Dummy1521(x: Int) extends Base
+case class Dummy1522(x: Int) extends Base
+case class Dummy1523(x: Int) extends Base
+case class Dummy1524(x: Int) extends Base
+case class Dummy1525(x: Int) extends Base
+case class Dummy1526(x: Int) extends Base
+case class Dummy1527(x: Int) extends Base
+case class Dummy1528(x: Int) extends Base
+case class Dummy1529(x: Int) extends Base
+case class Dummy1530(x: Int) extends Base
+case class Dummy1531(x: Int) extends Base
+case class Dummy1532(x: Int) extends Base
+case class Dummy1533(x: Int) extends Base
+case class Dummy1534(x: Int) extends Base
+case class Dummy1535(x: Int) extends Base
+case class Dummy1536(x: Int) extends Base
+case class Dummy1537(x: Int) extends Base
+case class Dummy1538(x: Int) extends Base
+case class Dummy1539(x: Int) extends Base
+case class Dummy1540(x: Int) extends Base
+case class Dummy1541(x: Int) extends Base
+case class Dummy1542(x: Int) extends Base
+case class Dummy1543(x: Int) extends Base
+case class Dummy1544(x: Int) extends Base
+case class Dummy1545(x: Int) extends Base
+case class Dummy1546(x: Int) extends Base
+case class Dummy1547(x: Int) extends Base
+case class Dummy1548(x: Int) extends Base
+case class Dummy1549(x: Int) extends Base
+case class Dummy1550(x: Int) extends Base
+case class Dummy1551(x: Int) extends Base
+case class Dummy1552(x: Int) extends Base
+case class Dummy1553(x: Int) extends Base
+case class Dummy1554(x: Int) extends Base
+case class Dummy1555(x: Int) extends Base
+case class Dummy1556(x: Int) extends Base
+case class Dummy1557(x: Int) extends Base
+case class Dummy1558(x: Int) extends Base
+case class Dummy1559(x: Int) extends Base
+case class Dummy1560(x: Int) extends Base
+case class Dummy1561(x: Int) extends Base
+case class Dummy1562(x: Int) extends Base
+case class Dummy1563(x: Int) extends Base
+case class Dummy1564(x: Int) extends Base
+case class Dummy1565(x: Int) extends Base
+case class Dummy1566(x: Int) extends Base
+case class Dummy1567(x: Int) extends Base
+case class Dummy1568(x: Int) extends Base
+case class Dummy1569(x: Int) extends Base
+case class Dummy1570(x: Int) extends Base
+case class Dummy1571(x: Int) extends Base
+case class Dummy1572(x: Int) extends Base
+case class Dummy1573(x: Int) extends Base
+case class Dummy1574(x: Int) extends Base
+case class Dummy1575(x: Int) extends Base
+case class Dummy1576(x: Int) extends Base
+case class Dummy1577(x: Int) extends Base
+case class Dummy1578(x: Int) extends Base
+case class Dummy1579(x: Int) extends Base
+case class Dummy1580(x: Int) extends Base
+case class Dummy1581(x: Int) extends Base
+case class Dummy1582(x: Int) extends Base
+case class Dummy1583(x: Int) extends Base
+case class Dummy1584(x: Int) extends Base
+case class Dummy1585(x: Int) extends Base
+case class Dummy1586(x: Int) extends Base
+case class Dummy1587(x: Int) extends Base
+case class Dummy1588(x: Int) extends Base
+case class Dummy1589(x: Int) extends Base
+case class Dummy1590(x: Int) extends Base
+case class Dummy1591(x: Int) extends Base
+case class Dummy1592(x: Int) extends Base
+case class Dummy1593(x: Int) extends Base
+case class Dummy1594(x: Int) extends Base
+case class Dummy1595(x: Int) extends Base
+case class Dummy1596(x: Int) extends Base
+case class Dummy1597(x: Int) extends Base
+case class Dummy1598(x: Int) extends Base
+case class Dummy1599(x: Int) extends Base
+case class Dummy1600(x: Int) extends Base
+case class Dummy1601(x: Int) extends Base
+case class Dummy1602(x: Int) extends Base
+case class Dummy1603(x: Int) extends Base
+case class Dummy1604(x: Int) extends Base
+case class Dummy1605(x: Int) extends Base
+case class Dummy1606(x: Int) extends Base
+case class Dummy1607(x: Int) extends Base
+case class Dummy1608(x: Int) extends Base
+case class Dummy1609(x: Int) extends Base
+case class Dummy1610(x: Int) extends Base
+case class Dummy1611(x: Int) extends Base
+case class Dummy1612(x: Int) extends Base
+case class Dummy1613(x: Int) extends Base
+case class Dummy1614(x: Int) extends Base
+case class Dummy1615(x: Int) extends Base
+case class Dummy1616(x: Int) extends Base
+case class Dummy1617(x: Int) extends Base
+case class Dummy1618(x: Int) extends Base
+case class Dummy1619(x: Int) extends Base
+case class Dummy1620(x: Int) extends Base
+case class Dummy1621(x: Int) extends Base
+case class Dummy1622(x: Int) extends Base
+case class Dummy1623(x: Int) extends Base
+case class Dummy1624(x: Int) extends Base
+case class Dummy1625(x: Int) extends Base
+case class Dummy1626(x: Int) extends Base
+case class Dummy1627(x: Int) extends Base
+case class Dummy1628(x: Int) extends Base
+case class Dummy1629(x: Int) extends Base
+case class Dummy1630(x: Int) extends Base
+case class Dummy1631(x: Int) extends Base
+case class Dummy1632(x: Int) extends Base
+case class Dummy1633(x: Int) extends Base
+case class Dummy1634(x: Int) extends Base
+case class Dummy1635(x: Int) extends Base
+case class Dummy1636(x: Int) extends Base
+case class Dummy1637(x: Int) extends Base
+case class Dummy1638(x: Int) extends Base
+case class Dummy1639(x: Int) extends Base
+case class Dummy1640(x: Int) extends Base
+case class Dummy1641(x: Int) extends Base
+case class Dummy1642(x: Int) extends Base
+case class Dummy1643(x: Int) extends Base
+case class Dummy1644(x: Int) extends Base
+case class Dummy1645(x: Int) extends Base
+case class Dummy1646(x: Int) extends Base
+case class Dummy1647(x: Int) extends Base
+case class Dummy1648(x: Int) extends Base
+case class Dummy1649(x: Int) extends Base
+case class Dummy1650(x: Int) extends Base
+case class Dummy1651(x: Int) extends Base
+case class Dummy1652(x: Int) extends Base
+case class Dummy1653(x: Int) extends Base
+case class Dummy1654(x: Int) extends Base
+case class Dummy1655(x: Int) extends Base
+case class Dummy1656(x: Int) extends Base
+case class Dummy1657(x: Int) extends Base
+case class Dummy1658(x: Int) extends Base
+case class Dummy1659(x: Int) extends Base
+case class Dummy1660(x: Int) extends Base
+case class Dummy1661(x: Int) extends Base
+case class Dummy1662(x: Int) extends Base
+case class Dummy1663(x: Int) extends Base
+case class Dummy1664(x: Int) extends Base
+case class Dummy1665(x: Int) extends Base
+case class Dummy1666(x: Int) extends Base
+case class Dummy1667(x: Int) extends Base
+case class Dummy1668(x: Int) extends Base
+case class Dummy1669(x: Int) extends Base
+case class Dummy1670(x: Int) extends Base
+case class Dummy1671(x: Int) extends Base
+case class Dummy1672(x: Int) extends Base
+case class Dummy1673(x: Int) extends Base
+case class Dummy1674(x: Int) extends Base
+case class Dummy1675(x: Int) extends Base
+case class Dummy1676(x: Int) extends Base
+case class Dummy1677(x: Int) extends Base
+case class Dummy1678(x: Int) extends Base
+case class Dummy1679(x: Int) extends Base
+case class Dummy1680(x: Int) extends Base
+case class Dummy1681(x: Int) extends Base
+case class Dummy1682(x: Int) extends Base
+case class Dummy1683(x: Int) extends Base
+case class Dummy1684(x: Int) extends Base
+case class Dummy1685(x: Int) extends Base
+case class Dummy1686(x: Int) extends Base
+case class Dummy1687(x: Int) extends Base
+case class Dummy1688(x: Int) extends Base
+case class Dummy1689(x: Int) extends Base
+case class Dummy1690(x: Int) extends Base
+case class Dummy1691(x: Int) extends Base
+case class Dummy1692(x: Int) extends Base
+case class Dummy1693(x: Int) extends Base
+case class Dummy1694(x: Int) extends Base
+case class Dummy1695(x: Int) extends Base
+case class Dummy1696(x: Int) extends Base
+case class Dummy1697(x: Int) extends Base
+case class Dummy1698(x: Int) extends Base
+case class Dummy1699(x: Int) extends Base
+case class Dummy1700(x: Int) extends Base
+case class Dummy1701(x: Int) extends Base
+case class Dummy1702(x: Int) extends Base
+case class Dummy1703(x: Int) extends Base
+case class Dummy1704(x: Int) extends Base
+case class Dummy1705(x: Int) extends Base
+case class Dummy1706(x: Int) extends Base
+case class Dummy1707(x: Int) extends Base
+case class Dummy1708(x: Int) extends Base
+case class Dummy1709(x: Int) extends Base
+case class Dummy1710(x: Int) extends Base
+case class Dummy1711(x: Int) extends Base
+case class Dummy1712(x: Int) extends Base
+case class Dummy1713(x: Int) extends Base
+case class Dummy1714(x: Int) extends Base
+case class Dummy1715(x: Int) extends Base
+case class Dummy1716(x: Int) extends Base
+case class Dummy1717(x: Int) extends Base
+case class Dummy1718(x: Int) extends Base
+case class Dummy1719(x: Int) extends Base
+case class Dummy1720(x: Int) extends Base
+case class Dummy1721(x: Int) extends Base
+case class Dummy1722(x: Int) extends Base
+case class Dummy1723(x: Int) extends Base
+case class Dummy1724(x: Int) extends Base
+case class Dummy1725(x: Int) extends Base
+case class Dummy1726(x: Int) extends Base
+case class Dummy1727(x: Int) extends Base
+case class Dummy1728(x: Int) extends Base
+case class Dummy1729(x: Int) extends Base
+case class Dummy1730(x: Int) extends Base
+case class Dummy1731(x: Int) extends Base
+case class Dummy1732(x: Int) extends Base
+case class Dummy1733(x: Int) extends Base
+case class Dummy1734(x: Int) extends Base
+case class Dummy1735(x: Int) extends Base
+case class Dummy1736(x: Int) extends Base
+case class Dummy1737(x: Int) extends Base
+case class Dummy1738(x: Int) extends Base
+case class Dummy1739(x: Int) extends Base
+case class Dummy1740(x: Int) extends Base
+case class Dummy1741(x: Int) extends Base
+case class Dummy1742(x: Int) extends Base
+case class Dummy1743(x: Int) extends Base
+case class Dummy1744(x: Int) extends Base
+case class Dummy1745(x: Int) extends Base
+case class Dummy1746(x: Int) extends Base
+case class Dummy1747(x: Int) extends Base
+case class Dummy1748(x: Int) extends Base
+case class Dummy1749(x: Int) extends Base
+case class Dummy1750(x: Int) extends Base
+case class Dummy1751(x: Int) extends Base
+case class Dummy1752(x: Int) extends Base
+case class Dummy1753(x: Int) extends Base
+case class Dummy1754(x: Int) extends Base
+case class Dummy1755(x: Int) extends Base
+case class Dummy1756(x: Int) extends Base
+case class Dummy1757(x: Int) extends Base
+case class Dummy1758(x: Int) extends Base
+case class Dummy1759(x: Int) extends Base
+case class Dummy1760(x: Int) extends Base
+case class Dummy1761(x: Int) extends Base
+case class Dummy1762(x: Int) extends Base
+case class Dummy1763(x: Int) extends Base
+case class Dummy1764(x: Int) extends Base
+case class Dummy1765(x: Int) extends Base
+case class Dummy1766(x: Int) extends Base
+case class Dummy1767(x: Int) extends Base
+case class Dummy1768(x: Int) extends Base
+case class Dummy1769(x: Int) extends Base
+case class Dummy1770(x: Int) extends Base
+case class Dummy1771(x: Int) extends Base
+case class Dummy1772(x: Int) extends Base
+case class Dummy1773(x: Int) extends Base
+case class Dummy1774(x: Int) extends Base
+case class Dummy1775(x: Int) extends Base
+case class Dummy1776(x: Int) extends Base
+case class Dummy1777(x: Int) extends Base
+case class Dummy1778(x: Int) extends Base
+case class Dummy1779(x: Int) extends Base
+case class Dummy1780(x: Int) extends Base
+case class Dummy1781(x: Int) extends Base
+case class Dummy1782(x: Int) extends Base
+case class Dummy1783(x: Int) extends Base
+case class Dummy1784(x: Int) extends Base
+case class Dummy1785(x: Int) extends Base
+case class Dummy1786(x: Int) extends Base
+case class Dummy1787(x: Int) extends Base
+case class Dummy1788(x: Int) extends Base
+case class Dummy1789(x: Int) extends Base
+case class Dummy1790(x: Int) extends Base
+case class Dummy1791(x: Int) extends Base
+case class Dummy1792(x: Int) extends Base
+case class Dummy1793(x: Int) extends Base
+case class Dummy1794(x: Int) extends Base
+case class Dummy1795(x: Int) extends Base
+case class Dummy1796(x: Int) extends Base
+case class Dummy1797(x: Int) extends Base
+case class Dummy1798(x: Int) extends Base
+case class Dummy1799(x: Int) extends Base
+case class Dummy1800(x: Int) extends Base
+case class Dummy1801(x: Int) extends Base
+case class Dummy1802(x: Int) extends Base
+case class Dummy1803(x: Int) extends Base
+case class Dummy1804(x: Int) extends Base
+case class Dummy1805(x: Int) extends Base
+case class Dummy1806(x: Int) extends Base
+case class Dummy1807(x: Int) extends Base
+case class Dummy1808(x: Int) extends Base
+case class Dummy1809(x: Int) extends Base
+case class Dummy1810(x: Int) extends Base
+case class Dummy1811(x: Int) extends Base
+case class Dummy1812(x: Int) extends Base
+case class Dummy1813(x: Int) extends Base
+case class Dummy1814(x: Int) extends Base
+case class Dummy1815(x: Int) extends Base
+case class Dummy1816(x: Int) extends Base
+case class Dummy1817(x: Int) extends Base
+case class Dummy1818(x: Int) extends Base
+case class Dummy1819(x: Int) extends Base
+case class Dummy1820(x: Int) extends Base
+case class Dummy1821(x: Int) extends Base
+case class Dummy1822(x: Int) extends Base
+case class Dummy1823(x: Int) extends Base
+case class Dummy1824(x: Int) extends Base
+case class Dummy1825(x: Int) extends Base
+case class Dummy1826(x: Int) extends Base
+case class Dummy1827(x: Int) extends Base
+case class Dummy1828(x: Int) extends Base
+case class Dummy1829(x: Int) extends Base
+case class Dummy1830(x: Int) extends Base
+case class Dummy1831(x: Int) extends Base
+case class Dummy1832(x: Int) extends Base
+case class Dummy1833(x: Int) extends Base
+case class Dummy1834(x: Int) extends Base
+case class Dummy1835(x: Int) extends Base
+case class Dummy1836(x: Int) extends Base
+case class Dummy1837(x: Int) extends Base
+case class Dummy1838(x: Int) extends Base
+case class Dummy1839(x: Int) extends Base
+case class Dummy1840(x: Int) extends Base
+case class Dummy1841(x: Int) extends Base
+case class Dummy1842(x: Int) extends Base
+case class Dummy1843(x: Int) extends Base
+case class Dummy1844(x: Int) extends Base
+case class Dummy1845(x: Int) extends Base
+case class Dummy1846(x: Int) extends Base
+case class Dummy1847(x: Int) extends Base
+case class Dummy1848(x: Int) extends Base
+case class Dummy1849(x: Int) extends Base
+case class Dummy1850(x: Int) extends Base
+case class Dummy1851(x: Int) extends Base
+case class Dummy1852(x: Int) extends Base
+case class Dummy1853(x: Int) extends Base
+case class Dummy1854(x: Int) extends Base
+case class Dummy1855(x: Int) extends Base
+case class Dummy1856(x: Int) extends Base
+case class Dummy1857(x: Int) extends Base
+case class Dummy1858(x: Int) extends Base
+case class Dummy1859(x: Int) extends Base
+case class Dummy1860(x: Int) extends Base
+case class Dummy1861(x: Int) extends Base
+case class Dummy1862(x: Int) extends Base
+case class Dummy1863(x: Int) extends Base
+case class Dummy1864(x: Int) extends Base
+case class Dummy1865(x: Int) extends Base
+case class Dummy1866(x: Int) extends Base
+case class Dummy1867(x: Int) extends Base
+case class Dummy1868(x: Int) extends Base
+case class Dummy1869(x: Int) extends Base
+case class Dummy1870(x: Int) extends Base
+case class Dummy1871(x: Int) extends Base
+case class Dummy1872(x: Int) extends Base
+case class Dummy1873(x: Int) extends Base
+case class Dummy1874(x: Int) extends Base
+case class Dummy1875(x: Int) extends Base
+case class Dummy1876(x: Int) extends Base
+case class Dummy1877(x: Int) extends Base
+case class Dummy1878(x: Int) extends Base
+case class Dummy1879(x: Int) extends Base
+case class Dummy1880(x: Int) extends Base
+case class Dummy1881(x: Int) extends Base
+case class Dummy1882(x: Int) extends Base
+case class Dummy1883(x: Int) extends Base
+case class Dummy1884(x: Int) extends Base
+case class Dummy1885(x: Int) extends Base
+case class Dummy1886(x: Int) extends Base
+case class Dummy1887(x: Int) extends Base
+case class Dummy1888(x: Int) extends Base
+case class Dummy1889(x: Int) extends Base
+case class Dummy1890(x: Int) extends Base
+case class Dummy1891(x: Int) extends Base
+case class Dummy1892(x: Int) extends Base
+case class Dummy1893(x: Int) extends Base
+case class Dummy1894(x: Int) extends Base
+case class Dummy1895(x: Int) extends Base
+case class Dummy1896(x: Int) extends Base
+case class Dummy1897(x: Int) extends Base
+case class Dummy1898(x: Int) extends Base
+case class Dummy1899(x: Int) extends Base
+case class Dummy1900(x: Int) extends Base
+case class Dummy1901(x: Int) extends Base
+case class Dummy1902(x: Int) extends Base
+case class Dummy1903(x: Int) extends Base
+case class Dummy1904(x: Int) extends Base
+case class Dummy1905(x: Int) extends Base
+case class Dummy1906(x: Int) extends Base
+case class Dummy1907(x: Int) extends Base
+case class Dummy1908(x: Int) extends Base
+case class Dummy1909(x: Int) extends Base
+case class Dummy1910(x: Int) extends Base
+case class Dummy1911(x: Int) extends Base
+case class Dummy1912(x: Int) extends Base
+case class Dummy1913(x: Int) extends Base
+case class Dummy1914(x: Int) extends Base
+case class Dummy1915(x: Int) extends Base
+case class Dummy1916(x: Int) extends Base
+case class Dummy1917(x: Int) extends Base
+case class Dummy1918(x: Int) extends Base
+case class Dummy1919(x: Int) extends Base
+case class Dummy1920(x: Int) extends Base
+case class Dummy1921(x: Int) extends Base
+case class Dummy1922(x: Int) extends Base
+case class Dummy1923(x: Int) extends Base
+case class Dummy1924(x: Int) extends Base
+case class Dummy1925(x: Int) extends Base
+case class Dummy1926(x: Int) extends Base
+case class Dummy1927(x: Int) extends Base
+case class Dummy1928(x: Int) extends Base
+case class Dummy1929(x: Int) extends Base
+case class Dummy1930(x: Int) extends Base
+case class Dummy1931(x: Int) extends Base
+case class Dummy1932(x: Int) extends Base
+case class Dummy1933(x: Int) extends Base
+case class Dummy1934(x: Int) extends Base
+case class Dummy1935(x: Int) extends Base
+case class Dummy1936(x: Int) extends Base
+case class Dummy1937(x: Int) extends Base
+case class Dummy1938(x: Int) extends Base
+case class Dummy1939(x: Int) extends Base
+case class Dummy1940(x: Int) extends Base
+case class Dummy1941(x: Int) extends Base
+case class Dummy1942(x: Int) extends Base
+case class Dummy1943(x: Int) extends Base
+case class Dummy1944(x: Int) extends Base
+case class Dummy1945(x: Int) extends Base
+case class Dummy1946(x: Int) extends Base
+case class Dummy1947(x: Int) extends Base
+case class Dummy1948(x: Int) extends Base
+case class Dummy1949(x: Int) extends Base
+case class Dummy1950(x: Int) extends Base
+case class Dummy1951(x: Int) extends Base
+case class Dummy1952(x: Int) extends Base
+case class Dummy1953(x: Int) extends Base
+case class Dummy1954(x: Int) extends Base
+case class Dummy1955(x: Int) extends Base
+case class Dummy1956(x: Int) extends Base
+case class Dummy1957(x: Int) extends Base
+case class Dummy1958(x: Int) extends Base
+case class Dummy1959(x: Int) extends Base
+case class Dummy1960(x: Int) extends Base
+case class Dummy1961(x: Int) extends Base
+case class Dummy1962(x: Int) extends Base
+case class Dummy1963(x: Int) extends Base
+case class Dummy1964(x: Int) extends Base
+case class Dummy1965(x: Int) extends Base
+case class Dummy1966(x: Int) extends Base
+case class Dummy1967(x: Int) extends Base
+case class Dummy1968(x: Int) extends Base
+case class Dummy1969(x: Int) extends Base
+case class Dummy1970(x: Int) extends Base
+case class Dummy1971(x: Int) extends Base
+case class Dummy1972(x: Int) extends Base
+case class Dummy1973(x: Int) extends Base
+case class Dummy1974(x: Int) extends Base
+case class Dummy1975(x: Int) extends Base
+case class Dummy1976(x: Int) extends Base
+case class Dummy1977(x: Int) extends Base
+case class Dummy1978(x: Int) extends Base
+case class Dummy1979(x: Int) extends Base
+case class Dummy1980(x: Int) extends Base
+case class Dummy1981(x: Int) extends Base
+case class Dummy1982(x: Int) extends Base
+case class Dummy1983(x: Int) extends Base
+case class Dummy1984(x: Int) extends Base
+case class Dummy1985(x: Int) extends Base
+case class Dummy1986(x: Int) extends Base
+case class Dummy1987(x: Int) extends Base
+case class Dummy1988(x: Int) extends Base
+case class Dummy1989(x: Int) extends Base
+case class Dummy1990(x: Int) extends Base
+case class Dummy1991(x: Int) extends Base
+case class Dummy1992(x: Int) extends Base
+case class Dummy1993(x: Int) extends Base
+case class Dummy1994(x: Int) extends Base
+case class Dummy1995(x: Int) extends Base
+case class Dummy1996(x: Int) extends Base
+case class Dummy1997(x: Int) extends Base
+case class Dummy1998(x: Int) extends Base
+case class Dummy1999(x: Int) extends Base
+def test(y: Base) = y match {
+ case Dummy0(p) => p
+ case Dummy1(p) => p
+ case Dummy2(p) => p
+ case Dummy3(p) => p
+ case Dummy4(p) => p
+ case Dummy5(p) => p
+ case Dummy6(p) => p
+ case Dummy7(p) => p
+ case Dummy8(p) => p
+ case Dummy9(p) => p
+ case Dummy10(p) => p
+ case Dummy11(p) => p
+ case Dummy12(p) => p
+ case Dummy13(p) => p
+ case Dummy14(p) => p
+ case Dummy15(p) => p
+ case Dummy16(p) => p
+ case Dummy17(p) => p
+ case Dummy18(p) => p
+ case Dummy19(p) => p
+ case Dummy20(p) => p
+ case Dummy21(p) => p
+ case Dummy22(p) => p
+ case Dummy23(p) => p
+ case Dummy24(p) => p
+ case Dummy25(p) => p
+ case Dummy26(p) => p
+ case Dummy27(p) => p
+ case Dummy28(p) => p
+ case Dummy29(p) => p
+ case Dummy30(p) => p
+ case Dummy31(p) => p
+ case Dummy32(p) => p
+ case Dummy33(p) => p
+ case Dummy34(p) => p
+ case Dummy35(p) => p
+ case Dummy36(p) => p
+ case Dummy37(p) => p
+ case Dummy38(p) => p
+ case Dummy39(p) => p
+ case Dummy40(p) => p
+ case Dummy41(p) => p
+ case Dummy42(p) => p
+ case Dummy43(p) => p
+ case Dummy44(p) => p
+ case Dummy45(p) => p
+ case Dummy46(p) => p
+ case Dummy47(p) => p
+ case Dummy48(p) => p
+ case Dummy49(p) => p
+ case Dummy50(p) => p
+ case Dummy51(p) => p
+ case Dummy52(p) => p
+ case Dummy53(p) => p
+ case Dummy54(p) => p
+ case Dummy55(p) => p
+ case Dummy56(p) => p
+ case Dummy57(p) => p
+ case Dummy58(p) => p
+ case Dummy59(p) => p
+ case Dummy60(p) => p
+ case Dummy61(p) => p
+ case Dummy62(p) => p
+ case Dummy63(p) => p
+ case Dummy64(p) => p
+ case Dummy65(p) => p
+ case Dummy66(p) => p
+ case Dummy67(p) => p
+ case Dummy68(p) => p
+ case Dummy69(p) => p
+ case Dummy70(p) => p
+ case Dummy71(p) => p
+ case Dummy72(p) => p
+ case Dummy73(p) => p
+ case Dummy74(p) => p
+ case Dummy75(p) => p
+ case Dummy76(p) => p
+ case Dummy77(p) => p
+ case Dummy78(p) => p
+ case Dummy79(p) => p
+ case Dummy80(p) => p
+ case Dummy81(p) => p
+ case Dummy82(p) => p
+ case Dummy83(p) => p
+ case Dummy84(p) => p
+ case Dummy85(p) => p
+ case Dummy86(p) => p
+ case Dummy87(p) => p
+ case Dummy88(p) => p
+ case Dummy89(p) => p
+ case Dummy90(p) => p
+ case Dummy91(p) => p
+ case Dummy92(p) => p
+ case Dummy93(p) => p
+ case Dummy94(p) => p
+ case Dummy95(p) => p
+ case Dummy96(p) => p
+ case Dummy97(p) => p
+ case Dummy98(p) => p
+ case Dummy99(p) => p
+ case Dummy100(p) => p
+ case Dummy101(p) => p
+ case Dummy102(p) => p
+ case Dummy103(p) => p
+ case Dummy104(p) => p
+ case Dummy105(p) => p
+ case Dummy106(p) => p
+ case Dummy107(p) => p
+ case Dummy108(p) => p
+ case Dummy109(p) => p
+ case Dummy110(p) => p
+ case Dummy111(p) => p
+ case Dummy112(p) => p
+ case Dummy113(p) => p
+ case Dummy114(p) => p
+ case Dummy115(p) => p
+ case Dummy116(p) => p
+ case Dummy117(p) => p
+ case Dummy118(p) => p
+ case Dummy119(p) => p
+ case Dummy120(p) => p
+ case Dummy121(p) => p
+ case Dummy122(p) => p
+ case Dummy123(p) => p
+ case Dummy124(p) => p
+ case Dummy125(p) => p
+ case Dummy126(p) => p
+ case Dummy127(p) => p
+ case Dummy128(p) => p
+ case Dummy129(p) => p
+ case Dummy130(p) => p
+ case Dummy131(p) => p
+ case Dummy132(p) => p
+ case Dummy133(p) => p
+ case Dummy134(p) => p
+ case Dummy135(p) => p
+ case Dummy136(p) => p
+ case Dummy137(p) => p
+ case Dummy138(p) => p
+ case Dummy139(p) => p
+ case Dummy140(p) => p
+ case Dummy141(p) => p
+ case Dummy142(p) => p
+ case Dummy143(p) => p
+ case Dummy144(p) => p
+ case Dummy145(p) => p
+ case Dummy146(p) => p
+ case Dummy147(p) => p
+ case Dummy148(p) => p
+ case Dummy149(p) => p
+ case Dummy150(p) => p
+ case Dummy151(p) => p
+ case Dummy152(p) => p
+ case Dummy153(p) => p
+ case Dummy154(p) => p
+ case Dummy155(p) => p
+ case Dummy156(p) => p
+ case Dummy157(p) => p
+ case Dummy158(p) => p
+ case Dummy159(p) => p
+ case Dummy160(p) => p
+ case Dummy161(p) => p
+ case Dummy162(p) => p
+ case Dummy163(p) => p
+ case Dummy164(p) => p
+ case Dummy165(p) => p
+ case Dummy166(p) => p
+ case Dummy167(p) => p
+ case Dummy168(p) => p
+ case Dummy169(p) => p
+ case Dummy170(p) => p
+ case Dummy171(p) => p
+ case Dummy172(p) => p
+ case Dummy173(p) => p
+ case Dummy174(p) => p
+ case Dummy175(p) => p
+ case Dummy176(p) => p
+ case Dummy177(p) => p
+ case Dummy178(p) => p
+ case Dummy179(p) => p
+ case Dummy180(p) => p
+ case Dummy181(p) => p
+ case Dummy182(p) => p
+ case Dummy183(p) => p
+ case Dummy184(p) => p
+ case Dummy185(p) => p
+ case Dummy186(p) => p
+ case Dummy187(p) => p
+ case Dummy188(p) => p
+ case Dummy189(p) => p
+ case Dummy190(p) => p
+ case Dummy191(p) => p
+ case Dummy192(p) => p
+ case Dummy193(p) => p
+ case Dummy194(p) => p
+ case Dummy195(p) => p
+ case Dummy196(p) => p
+ case Dummy197(p) => p
+ case Dummy198(p) => p
+ case Dummy199(p) => p
+ case Dummy200(p) => p
+ case Dummy201(p) => p
+ case Dummy202(p) => p
+ case Dummy203(p) => p
+ case Dummy204(p) => p
+ case Dummy205(p) => p
+ case Dummy206(p) => p
+ case Dummy207(p) => p
+ case Dummy208(p) => p
+ case Dummy209(p) => p
+ case Dummy210(p) => p
+ case Dummy211(p) => p
+ case Dummy212(p) => p
+ case Dummy213(p) => p
+ case Dummy214(p) => p
+ case Dummy215(p) => p
+ case Dummy216(p) => p
+ case Dummy217(p) => p
+ case Dummy218(p) => p
+ case Dummy219(p) => p
+ case Dummy220(p) => p
+ case Dummy221(p) => p
+ case Dummy222(p) => p
+ case Dummy223(p) => p
+ case Dummy224(p) => p
+ case Dummy225(p) => p
+ case Dummy226(p) => p
+ case Dummy227(p) => p
+ case Dummy228(p) => p
+ case Dummy229(p) => p
+ case Dummy230(p) => p
+ case Dummy231(p) => p
+ case Dummy232(p) => p
+ case Dummy233(p) => p
+ case Dummy234(p) => p
+ case Dummy235(p) => p
+ case Dummy236(p) => p
+ case Dummy237(p) => p
+ case Dummy238(p) => p
+ case Dummy239(p) => p
+ case Dummy240(p) => p
+ case Dummy241(p) => p
+ case Dummy242(p) => p
+ case Dummy243(p) => p
+ case Dummy244(p) => p
+ case Dummy245(p) => p
+ case Dummy246(p) => p
+ case Dummy247(p) => p
+ case Dummy248(p) => p
+ case Dummy249(p) => p
+ case Dummy250(p) => p
+ case Dummy251(p) => p
+ case Dummy252(p) => p
+ case Dummy253(p) => p
+ case Dummy254(p) => p
+ case Dummy255(p) => p
+ case Dummy256(p) => p
+ case Dummy257(p) => p
+ case Dummy258(p) => p
+ case Dummy259(p) => p
+ case Dummy260(p) => p
+ case Dummy261(p) => p
+ case Dummy262(p) => p
+ case Dummy263(p) => p
+ case Dummy264(p) => p
+ case Dummy265(p) => p
+ case Dummy266(p) => p
+ case Dummy267(p) => p
+ case Dummy268(p) => p
+ case Dummy269(p) => p
+ case Dummy270(p) => p
+ case Dummy271(p) => p
+ case Dummy272(p) => p
+ case Dummy273(p) => p
+ case Dummy274(p) => p
+ case Dummy275(p) => p
+ case Dummy276(p) => p
+ case Dummy277(p) => p
+ case Dummy278(p) => p
+ case Dummy279(p) => p
+ case Dummy280(p) => p
+ case Dummy281(p) => p
+ case Dummy282(p) => p
+ case Dummy283(p) => p
+ case Dummy284(p) => p
+ case Dummy285(p) => p
+ case Dummy286(p) => p
+ case Dummy287(p) => p
+ case Dummy288(p) => p
+ case Dummy289(p) => p
+ case Dummy290(p) => p
+ case Dummy291(p) => p
+ case Dummy292(p) => p
+ case Dummy293(p) => p
+ case Dummy294(p) => p
+ case Dummy295(p) => p
+ case Dummy296(p) => p
+ case Dummy297(p) => p
+ case Dummy298(p) => p
+ case Dummy299(p) => p
+ case Dummy300(p) => p
+ case Dummy301(p) => p
+ case Dummy302(p) => p
+ case Dummy303(p) => p
+ case Dummy304(p) => p
+ case Dummy305(p) => p
+ case Dummy306(p) => p
+ case Dummy307(p) => p
+ case Dummy308(p) => p
+ case Dummy309(p) => p
+ case Dummy310(p) => p
+ case Dummy311(p) => p
+ case Dummy312(p) => p
+ case Dummy313(p) => p
+ case Dummy314(p) => p
+ case Dummy315(p) => p
+ case Dummy316(p) => p
+ case Dummy317(p) => p
+ case Dummy318(p) => p
+ case Dummy319(p) => p
+ case Dummy320(p) => p
+ case Dummy321(p) => p
+ case Dummy322(p) => p
+ case Dummy323(p) => p
+ case Dummy324(p) => p
+ case Dummy325(p) => p
+ case Dummy326(p) => p
+ case Dummy327(p) => p
+ case Dummy328(p) => p
+ case Dummy329(p) => p
+ case Dummy330(p) => p
+ case Dummy331(p) => p
+ case Dummy332(p) => p
+ case Dummy333(p) => p
+ case Dummy334(p) => p
+ case Dummy335(p) => p
+ case Dummy336(p) => p
+ case Dummy337(p) => p
+ case Dummy338(p) => p
+ case Dummy339(p) => p
+ case Dummy340(p) => p
+ case Dummy341(p) => p
+ case Dummy342(p) => p
+ case Dummy343(p) => p
+ case Dummy344(p) => p
+ case Dummy345(p) => p
+ case Dummy346(p) => p
+ case Dummy347(p) => p
+ case Dummy348(p) => p
+ case Dummy349(p) => p
+ case Dummy350(p) => p
+ case Dummy351(p) => p
+ case Dummy352(p) => p
+ case Dummy353(p) => p
+ case Dummy354(p) => p
+ case Dummy355(p) => p
+ case Dummy356(p) => p
+ case Dummy357(p) => p
+ case Dummy358(p) => p
+ case Dummy359(p) => p
+ case Dummy360(p) => p
+ case Dummy361(p) => p
+ case Dummy362(p) => p
+ case Dummy363(p) => p
+ case Dummy364(p) => p
+ case Dummy365(p) => p
+ case Dummy366(p) => p
+ case Dummy367(p) => p
+ case Dummy368(p) => p
+ case Dummy369(p) => p
+ case Dummy370(p) => p
+ case Dummy371(p) => p
+ case Dummy372(p) => p
+ case Dummy373(p) => p
+ case Dummy374(p) => p
+ case Dummy375(p) => p
+ case Dummy376(p) => p
+ case Dummy377(p) => p
+ case Dummy378(p) => p
+ case Dummy379(p) => p
+ case Dummy380(p) => p
+ case Dummy381(p) => p
+ case Dummy382(p) => p
+ case Dummy383(p) => p
+ case Dummy384(p) => p
+ case Dummy385(p) => p
+ case Dummy386(p) => p
+ case Dummy387(p) => p
+ case Dummy388(p) => p
+ case Dummy389(p) => p
+ case Dummy390(p) => p
+ case Dummy391(p) => p
+ case Dummy392(p) => p
+ case Dummy393(p) => p
+ case Dummy394(p) => p
+ case Dummy395(p) => p
+ case Dummy396(p) => p
+ case Dummy397(p) => p
+ case Dummy398(p) => p
+ case Dummy399(p) => p
+ case Dummy400(p) => p
+ case Dummy401(p) => p
+ case Dummy402(p) => p
+ case Dummy403(p) => p
+ case Dummy404(p) => p
+ case Dummy405(p) => p
+ case Dummy406(p) => p
+ case Dummy407(p) => p
+ case Dummy408(p) => p
+ case Dummy409(p) => p
+ case Dummy410(p) => p
+ case Dummy411(p) => p
+ case Dummy412(p) => p
+ case Dummy413(p) => p
+ case Dummy414(p) => p
+ case Dummy415(p) => p
+ case Dummy416(p) => p
+ case Dummy417(p) => p
+ case Dummy418(p) => p
+ case Dummy419(p) => p
+ case Dummy420(p) => p
+ case Dummy421(p) => p
+ case Dummy422(p) => p
+ case Dummy423(p) => p
+ case Dummy424(p) => p
+ case Dummy425(p) => p
+ case Dummy426(p) => p
+ case Dummy427(p) => p
+ case Dummy428(p) => p
+ case Dummy429(p) => p
+ case Dummy430(p) => p
+ case Dummy431(p) => p
+ case Dummy432(p) => p
+ case Dummy433(p) => p
+ case Dummy434(p) => p
+ case Dummy435(p) => p
+ case Dummy436(p) => p
+ case Dummy437(p) => p
+ case Dummy438(p) => p
+ case Dummy439(p) => p
+ case Dummy440(p) => p
+ case Dummy441(p) => p
+ case Dummy442(p) => p
+ case Dummy443(p) => p
+ case Dummy444(p) => p
+ case Dummy445(p) => p
+ case Dummy446(p) => p
+ case Dummy447(p) => p
+ case Dummy448(p) => p
+ case Dummy449(p) => p
+ case Dummy450(p) => p
+ case Dummy451(p) => p
+ case Dummy452(p) => p
+ case Dummy453(p) => p
+ case Dummy454(p) => p
+ case Dummy455(p) => p
+ case Dummy456(p) => p
+ case Dummy457(p) => p
+ case Dummy458(p) => p
+ case Dummy459(p) => p
+ case Dummy460(p) => p
+ case Dummy461(p) => p
+ case Dummy462(p) => p
+ case Dummy463(p) => p
+ case Dummy464(p) => p
+ case Dummy465(p) => p
+ case Dummy466(p) => p
+ case Dummy467(p) => p
+ case Dummy468(p) => p
+ case Dummy469(p) => p
+ case Dummy470(p) => p
+ case Dummy471(p) => p
+ case Dummy472(p) => p
+ case Dummy473(p) => p
+ case Dummy474(p) => p
+ case Dummy475(p) => p
+ case Dummy476(p) => p
+ case Dummy477(p) => p
+ case Dummy478(p) => p
+ case Dummy479(p) => p
+ case Dummy480(p) => p
+ case Dummy481(p) => p
+ case Dummy482(p) => p
+ case Dummy483(p) => p
+ case Dummy484(p) => p
+ case Dummy485(p) => p
+ case Dummy486(p) => p
+ case Dummy487(p) => p
+ case Dummy488(p) => p
+ case Dummy489(p) => p
+ case Dummy490(p) => p
+ case Dummy491(p) => p
+ case Dummy492(p) => p
+ case Dummy493(p) => p
+ case Dummy494(p) => p
+ case Dummy495(p) => p
+ case Dummy496(p) => p
+ case Dummy497(p) => p
+ case Dummy498(p) => p
+ case Dummy499(p) => p
+ case Dummy500(p) => p
+ case Dummy501(p) => p
+ case Dummy502(p) => p
+ case Dummy503(p) => p
+ case Dummy504(p) => p
+ case Dummy505(p) => p
+ case Dummy506(p) => p
+ case Dummy507(p) => p
+ case Dummy508(p) => p
+ case Dummy509(p) => p
+ case Dummy510(p) => p
+ case Dummy511(p) => p
+ case Dummy512(p) => p
+ case Dummy513(p) => p
+ case Dummy514(p) => p
+ case Dummy515(p) => p
+ case Dummy516(p) => p
+ case Dummy517(p) => p
+ case Dummy518(p) => p
+ case Dummy519(p) => p
+ case Dummy520(p) => p
+ case Dummy521(p) => p
+ case Dummy522(p) => p
+ case Dummy523(p) => p
+ case Dummy524(p) => p
+ case Dummy525(p) => p
+ case Dummy526(p) => p
+ case Dummy527(p) => p
+ case Dummy528(p) => p
+ case Dummy529(p) => p
+ case Dummy530(p) => p
+ case Dummy531(p) => p
+ case Dummy532(p) => p
+ case Dummy533(p) => p
+ case Dummy534(p) => p
+ case Dummy535(p) => p
+ case Dummy536(p) => p
+ case Dummy537(p) => p
+ case Dummy538(p) => p
+ case Dummy539(p) => p
+ case Dummy540(p) => p
+ case Dummy541(p) => p
+ case Dummy542(p) => p
+ case Dummy543(p) => p
+ case Dummy544(p) => p
+ case Dummy545(p) => p
+ case Dummy546(p) => p
+ case Dummy547(p) => p
+ case Dummy548(p) => p
+ case Dummy549(p) => p
+ case Dummy550(p) => p
+ case Dummy551(p) => p
+ case Dummy552(p) => p
+ case Dummy553(p) => p
+ case Dummy554(p) => p
+ case Dummy555(p) => p
+ case Dummy556(p) => p
+ case Dummy557(p) => p
+ case Dummy558(p) => p
+ case Dummy559(p) => p
+ case Dummy560(p) => p
+ case Dummy561(p) => p
+ case Dummy562(p) => p
+ case Dummy563(p) => p
+ case Dummy564(p) => p
+ case Dummy565(p) => p
+ case Dummy566(p) => p
+ case Dummy567(p) => p
+ case Dummy568(p) => p
+ case Dummy569(p) => p
+ case Dummy570(p) => p
+ case Dummy571(p) => p
+ case Dummy572(p) => p
+ case Dummy573(p) => p
+ case Dummy574(p) => p
+ case Dummy575(p) => p
+ case Dummy576(p) => p
+ case Dummy577(p) => p
+ case Dummy578(p) => p
+ case Dummy579(p) => p
+ case Dummy580(p) => p
+ case Dummy581(p) => p
+ case Dummy582(p) => p
+ case Dummy583(p) => p
+ case Dummy584(p) => p
+ case Dummy585(p) => p
+ case Dummy586(p) => p
+ case Dummy587(p) => p
+ case Dummy588(p) => p
+ case Dummy589(p) => p
+ case Dummy590(p) => p
+ case Dummy591(p) => p
+ case Dummy592(p) => p
+ case Dummy593(p) => p
+ case Dummy594(p) => p
+ case Dummy595(p) => p
+ case Dummy596(p) => p
+ case Dummy597(p) => p
+ case Dummy598(p) => p
+ case Dummy599(p) => p
+ case Dummy600(p) => p
+ case Dummy601(p) => p
+ case Dummy602(p) => p
+ case Dummy603(p) => p
+ case Dummy604(p) => p
+ case Dummy605(p) => p
+ case Dummy606(p) => p
+ case Dummy607(p) => p
+ case Dummy608(p) => p
+ case Dummy609(p) => p
+ case Dummy610(p) => p
+ case Dummy611(p) => p
+ case Dummy612(p) => p
+ case Dummy613(p) => p
+ case Dummy614(p) => p
+ case Dummy615(p) => p
+ case Dummy616(p) => p
+ case Dummy617(p) => p
+ case Dummy618(p) => p
+ case Dummy619(p) => p
+ case Dummy620(p) => p
+ case Dummy621(p) => p
+ case Dummy622(p) => p
+ case Dummy623(p) => p
+ case Dummy624(p) => p
+ case Dummy625(p) => p
+ case Dummy626(p) => p
+ case Dummy627(p) => p
+ case Dummy628(p) => p
+ case Dummy629(p) => p
+ case Dummy630(p) => p
+ case Dummy631(p) => p
+ case Dummy632(p) => p
+ case Dummy633(p) => p
+ case Dummy634(p) => p
+ case Dummy635(p) => p
+ case Dummy636(p) => p
+ case Dummy637(p) => p
+ case Dummy638(p) => p
+ case Dummy639(p) => p
+ case Dummy640(p) => p
+ case Dummy641(p) => p
+ case Dummy642(p) => p
+ case Dummy643(p) => p
+ case Dummy644(p) => p
+ case Dummy645(p) => p
+ case Dummy646(p) => p
+ case Dummy647(p) => p
+ case Dummy648(p) => p
+ case Dummy649(p) => p
+ case Dummy650(p) => p
+ case Dummy651(p) => p
+ case Dummy652(p) => p
+ case Dummy653(p) => p
+ case Dummy654(p) => p
+ case Dummy655(p) => p
+ case Dummy656(p) => p
+ case Dummy657(p) => p
+ case Dummy658(p) => p
+ case Dummy659(p) => p
+ case Dummy660(p) => p
+ case Dummy661(p) => p
+ case Dummy662(p) => p
+ case Dummy663(p) => p
+ case Dummy664(p) => p
+ case Dummy665(p) => p
+ case Dummy666(p) => p
+ case Dummy667(p) => p
+ case Dummy668(p) => p
+ case Dummy669(p) => p
+ case Dummy670(p) => p
+ case Dummy671(p) => p
+ case Dummy672(p) => p
+ case Dummy673(p) => p
+ case Dummy674(p) => p
+ case Dummy675(p) => p
+ case Dummy676(p) => p
+ case Dummy677(p) => p
+ case Dummy678(p) => p
+ case Dummy679(p) => p
+ case Dummy680(p) => p
+ case Dummy681(p) => p
+ case Dummy682(p) => p
+ case Dummy683(p) => p
+ case Dummy684(p) => p
+ case Dummy685(p) => p
+ case Dummy686(p) => p
+ case Dummy687(p) => p
+ case Dummy688(p) => p
+ case Dummy689(p) => p
+ case Dummy690(p) => p
+ case Dummy691(p) => p
+ case Dummy692(p) => p
+ case Dummy693(p) => p
+ case Dummy694(p) => p
+ case Dummy695(p) => p
+ case Dummy696(p) => p
+ case Dummy697(p) => p
+ case Dummy698(p) => p
+ case Dummy699(p) => p
+ case Dummy700(p) => p
+ case Dummy701(p) => p
+ case Dummy702(p) => p
+ case Dummy703(p) => p
+ case Dummy704(p) => p
+ case Dummy705(p) => p
+ case Dummy706(p) => p
+ case Dummy707(p) => p
+ case Dummy708(p) => p
+ case Dummy709(p) => p
+ case Dummy710(p) => p
+ case Dummy711(p) => p
+ case Dummy712(p) => p
+ case Dummy713(p) => p
+ case Dummy714(p) => p
+ case Dummy715(p) => p
+ case Dummy716(p) => p
+ case Dummy717(p) => p
+ case Dummy718(p) => p
+ case Dummy719(p) => p
+ case Dummy720(p) => p
+ case Dummy721(p) => p
+ case Dummy722(p) => p
+ case Dummy723(p) => p
+ case Dummy724(p) => p
+ case Dummy725(p) => p
+ case Dummy726(p) => p
+ case Dummy727(p) => p
+ case Dummy728(p) => p
+ case Dummy729(p) => p
+ case Dummy730(p) => p
+ case Dummy731(p) => p
+ case Dummy732(p) => p
+ case Dummy733(p) => p
+ case Dummy734(p) => p
+ case Dummy735(p) => p
+ case Dummy736(p) => p
+ case Dummy737(p) => p
+ case Dummy738(p) => p
+ case Dummy739(p) => p
+ case Dummy740(p) => p
+ case Dummy741(p) => p
+ case Dummy742(p) => p
+ case Dummy743(p) => p
+ case Dummy744(p) => p
+ case Dummy745(p) => p
+ case Dummy746(p) => p
+ case Dummy747(p) => p
+ case Dummy748(p) => p
+ case Dummy749(p) => p
+ case Dummy750(p) => p
+ case Dummy751(p) => p
+ case Dummy752(p) => p
+ case Dummy753(p) => p
+ case Dummy754(p) => p
+ case Dummy755(p) => p
+ case Dummy756(p) => p
+ case Dummy757(p) => p
+ case Dummy758(p) => p
+ case Dummy759(p) => p
+ case Dummy760(p) => p
+ case Dummy761(p) => p
+ case Dummy762(p) => p
+ case Dummy763(p) => p
+ case Dummy764(p) => p
+ case Dummy765(p) => p
+ case Dummy766(p) => p
+ case Dummy767(p) => p
+ case Dummy768(p) => p
+ case Dummy769(p) => p
+ case Dummy770(p) => p
+ case Dummy771(p) => p
+ case Dummy772(p) => p
+ case Dummy773(p) => p
+ case Dummy774(p) => p
+ case Dummy775(p) => p
+ case Dummy776(p) => p
+ case Dummy777(p) => p
+ case Dummy778(p) => p
+ case Dummy779(p) => p
+ case Dummy780(p) => p
+ case Dummy781(p) => p
+ case Dummy782(p) => p
+ case Dummy783(p) => p
+ case Dummy784(p) => p
+ case Dummy785(p) => p
+ case Dummy786(p) => p
+ case Dummy787(p) => p
+ case Dummy788(p) => p
+ case Dummy789(p) => p
+ case Dummy790(p) => p
+ case Dummy791(p) => p
+ case Dummy792(p) => p
+ case Dummy793(p) => p
+ case Dummy794(p) => p
+ case Dummy795(p) => p
+ case Dummy796(p) => p
+ case Dummy797(p) => p
+ case Dummy798(p) => p
+ case Dummy799(p) => p
+ case Dummy800(p) => p
+ case Dummy801(p) => p
+ case Dummy802(p) => p
+ case Dummy803(p) => p
+ case Dummy804(p) => p
+ case Dummy805(p) => p
+ case Dummy806(p) => p
+ case Dummy807(p) => p
+ case Dummy808(p) => p
+ case Dummy809(p) => p
+ case Dummy810(p) => p
+ case Dummy811(p) => p
+ case Dummy812(p) => p
+ case Dummy813(p) => p
+ case Dummy814(p) => p
+ case Dummy815(p) => p
+ case Dummy816(p) => p
+ case Dummy817(p) => p
+ case Dummy818(p) => p
+ case Dummy819(p) => p
+ case Dummy820(p) => p
+ case Dummy821(p) => p
+ case Dummy822(p) => p
+ case Dummy823(p) => p
+ case Dummy824(p) => p
+ case Dummy825(p) => p
+ case Dummy826(p) => p
+ case Dummy827(p) => p
+ case Dummy828(p) => p
+ case Dummy829(p) => p
+ case Dummy830(p) => p
+ case Dummy831(p) => p
+ case Dummy832(p) => p
+ case Dummy833(p) => p
+ case Dummy834(p) => p
+ case Dummy835(p) => p
+ case Dummy836(p) => p
+ case Dummy837(p) => p
+ case Dummy838(p) => p
+ case Dummy839(p) => p
+ case Dummy840(p) => p
+ case Dummy841(p) => p
+ case Dummy842(p) => p
+ case Dummy843(p) => p
+ case Dummy844(p) => p
+ case Dummy845(p) => p
+ case Dummy846(p) => p
+ case Dummy847(p) => p
+ case Dummy848(p) => p
+ case Dummy849(p) => p
+ case Dummy850(p) => p
+ case Dummy851(p) => p
+ case Dummy852(p) => p
+ case Dummy853(p) => p
+ case Dummy854(p) => p
+ case Dummy855(p) => p
+ case Dummy856(p) => p
+ case Dummy857(p) => p
+ case Dummy858(p) => p
+ case Dummy859(p) => p
+ case Dummy860(p) => p
+ case Dummy861(p) => p
+ case Dummy862(p) => p
+ case Dummy863(p) => p
+ case Dummy864(p) => p
+ case Dummy865(p) => p
+ case Dummy866(p) => p
+ case Dummy867(p) => p
+ case Dummy868(p) => p
+ case Dummy869(p) => p
+ case Dummy870(p) => p
+ case Dummy871(p) => p
+ case Dummy872(p) => p
+ case Dummy873(p) => p
+ case Dummy874(p) => p
+ case Dummy875(p) => p
+ case Dummy876(p) => p
+ case Dummy877(p) => p
+ case Dummy878(p) => p
+ case Dummy879(p) => p
+ case Dummy880(p) => p
+ case Dummy881(p) => p
+ case Dummy882(p) => p
+ case Dummy883(p) => p
+ case Dummy884(p) => p
+ case Dummy885(p) => p
+ case Dummy886(p) => p
+ case Dummy887(p) => p
+ case Dummy888(p) => p
+ case Dummy889(p) => p
+ case Dummy890(p) => p
+ case Dummy891(p) => p
+ case Dummy892(p) => p
+ case Dummy893(p) => p
+ case Dummy894(p) => p
+ case Dummy895(p) => p
+ case Dummy896(p) => p
+ case Dummy897(p) => p
+ case Dummy898(p) => p
+ case Dummy899(p) => p
+ case Dummy900(p) => p
+ case Dummy901(p) => p
+ case Dummy902(p) => p
+ case Dummy903(p) => p
+ case Dummy904(p) => p
+ case Dummy905(p) => p
+ case Dummy906(p) => p
+ case Dummy907(p) => p
+ case Dummy908(p) => p
+ case Dummy909(p) => p
+ case Dummy910(p) => p
+ case Dummy911(p) => p
+ case Dummy912(p) => p
+ case Dummy913(p) => p
+ case Dummy914(p) => p
+ case Dummy915(p) => p
+ case Dummy916(p) => p
+ case Dummy917(p) => p
+ case Dummy918(p) => p
+ case Dummy919(p) => p
+ case Dummy920(p) => p
+ case Dummy921(p) => p
+ case Dummy922(p) => p
+ case Dummy923(p) => p
+ case Dummy924(p) => p
+ case Dummy925(p) => p
+ case Dummy926(p) => p
+ case Dummy927(p) => p
+ case Dummy928(p) => p
+ case Dummy929(p) => p
+ case Dummy930(p) => p
+ case Dummy931(p) => p
+ case Dummy932(p) => p
+ case Dummy933(p) => p
+ case Dummy934(p) => p
+ case Dummy935(p) => p
+ case Dummy936(p) => p
+ case Dummy937(p) => p
+ case Dummy938(p) => p
+ case Dummy939(p) => p
+ case Dummy940(p) => p
+ case Dummy941(p) => p
+ case Dummy942(p) => p
+ case Dummy943(p) => p
+ case Dummy944(p) => p
+ case Dummy945(p) => p
+ case Dummy946(p) => p
+ case Dummy947(p) => p
+ case Dummy948(p) => p
+ case Dummy949(p) => p
+ case Dummy950(p) => p
+ case Dummy951(p) => p
+ case Dummy952(p) => p
+ case Dummy953(p) => p
+ case Dummy954(p) => p
+ case Dummy955(p) => p
+ case Dummy956(p) => p
+ case Dummy957(p) => p
+ case Dummy958(p) => p
+ case Dummy959(p) => p
+ case Dummy960(p) => p
+ case Dummy961(p) => p
+ case Dummy962(p) => p
+ case Dummy963(p) => p
+ case Dummy964(p) => p
+ case Dummy965(p) => p
+ case Dummy966(p) => p
+ case Dummy967(p) => p
+ case Dummy968(p) => p
+ case Dummy969(p) => p
+ case Dummy970(p) => p
+ case Dummy971(p) => p
+ case Dummy972(p) => p
+ case Dummy973(p) => p
+ case Dummy974(p) => p
+ case Dummy975(p) => p
+ case Dummy976(p) => p
+ case Dummy977(p) => p
+ case Dummy978(p) => p
+ case Dummy979(p) => p
+ case Dummy980(p) => p
+ case Dummy981(p) => p
+ case Dummy982(p) => p
+ case Dummy983(p) => p
+ case Dummy984(p) => p
+ case Dummy985(p) => p
+ case Dummy986(p) => p
+ case Dummy987(p) => p
+ case Dummy988(p) => p
+ case Dummy989(p) => p
+ case Dummy990(p) => p
+ case Dummy991(p) => p
+ case Dummy992(p) => p
+ case Dummy993(p) => p
+ case Dummy994(p) => p
+ case Dummy995(p) => p
+ case Dummy996(p) => p
+ case Dummy997(p) => p
+ case Dummy998(p) => p
+ case Dummy999(p) => p
+ case Dummy1000(p) => p
+ case Dummy1001(p) => p
+ case Dummy1002(p) => p
+ case Dummy1003(p) => p
+ case Dummy1004(p) => p
+ case Dummy1005(p) => p
+ case Dummy1006(p) => p
+ case Dummy1007(p) => p
+ case Dummy1008(p) => p
+ case Dummy1009(p) => p
+ case Dummy1010(p) => p
+ case Dummy1011(p) => p
+ case Dummy1012(p) => p
+ case Dummy1013(p) => p
+ case Dummy1014(p) => p
+ case Dummy1015(p) => p
+ case Dummy1016(p) => p
+ case Dummy1017(p) => p
+ case Dummy1018(p) => p
+ case Dummy1019(p) => p
+ case Dummy1020(p) => p
+ case Dummy1021(p) => p
+ case Dummy1022(p) => p
+ case Dummy1023(p) => p
+ case Dummy1024(p) => p
+ case Dummy1025(p) => p
+ case Dummy1026(p) => p
+ case Dummy1027(p) => p
+ case Dummy1028(p) => p
+ case Dummy1029(p) => p
+ case Dummy1030(p) => p
+ case Dummy1031(p) => p
+ case Dummy1032(p) => p
+ case Dummy1033(p) => p
+ case Dummy1034(p) => p
+ case Dummy1035(p) => p
+ case Dummy1036(p) => p
+ case Dummy1037(p) => p
+ case Dummy1038(p) => p
+ case Dummy1039(p) => p
+ case Dummy1040(p) => p
+ case Dummy1041(p) => p
+ case Dummy1042(p) => p
+ case Dummy1043(p) => p
+ case Dummy1044(p) => p
+ case Dummy1045(p) => p
+ case Dummy1046(p) => p
+ case Dummy1047(p) => p
+ case Dummy1048(p) => p
+ case Dummy1049(p) => p
+ case Dummy1050(p) => p
+ case Dummy1051(p) => p
+ case Dummy1052(p) => p
+ case Dummy1053(p) => p
+ case Dummy1054(p) => p
+ case Dummy1055(p) => p
+ case Dummy1056(p) => p
+ case Dummy1057(p) => p
+ case Dummy1058(p) => p
+ case Dummy1059(p) => p
+ case Dummy1060(p) => p
+ case Dummy1061(p) => p
+ case Dummy1062(p) => p
+ case Dummy1063(p) => p
+ case Dummy1064(p) => p
+ case Dummy1065(p) => p
+ case Dummy1066(p) => p
+ case Dummy1067(p) => p
+ case Dummy1068(p) => p
+ case Dummy1069(p) => p
+ case Dummy1070(p) => p
+ case Dummy1071(p) => p
+ case Dummy1072(p) => p
+ case Dummy1073(p) => p
+ case Dummy1074(p) => p
+ case Dummy1075(p) => p
+ case Dummy1076(p) => p
+ case Dummy1077(p) => p
+ case Dummy1078(p) => p
+ case Dummy1079(p) => p
+ case Dummy1080(p) => p
+ case Dummy1081(p) => p
+ case Dummy1082(p) => p
+ case Dummy1083(p) => p
+ case Dummy1084(p) => p
+ case Dummy1085(p) => p
+ case Dummy1086(p) => p
+ case Dummy1087(p) => p
+ case Dummy1088(p) => p
+ case Dummy1089(p) => p
+ case Dummy1090(p) => p
+ case Dummy1091(p) => p
+ case Dummy1092(p) => p
+ case Dummy1093(p) => p
+ case Dummy1094(p) => p
+ case Dummy1095(p) => p
+ case Dummy1096(p) => p
+ case Dummy1097(p) => p
+ case Dummy1098(p) => p
+ case Dummy1099(p) => p
+ case Dummy1100(p) => p
+ case Dummy1101(p) => p
+ case Dummy1102(p) => p
+ case Dummy1103(p) => p
+ case Dummy1104(p) => p
+ case Dummy1105(p) => p
+ case Dummy1106(p) => p
+ case Dummy1107(p) => p
+ case Dummy1108(p) => p
+ case Dummy1109(p) => p
+ case Dummy1110(p) => p
+ case Dummy1111(p) => p
+ case Dummy1112(p) => p
+ case Dummy1113(p) => p
+ case Dummy1114(p) => p
+ case Dummy1115(p) => p
+ case Dummy1116(p) => p
+ case Dummy1117(p) => p
+ case Dummy1118(p) => p
+ case Dummy1119(p) => p
+ case Dummy1120(p) => p
+ case Dummy1121(p) => p
+ case Dummy1122(p) => p
+ case Dummy1123(p) => p
+ case Dummy1124(p) => p
+ case Dummy1125(p) => p
+ case Dummy1126(p) => p
+ case Dummy1127(p) => p
+ case Dummy1128(p) => p
+ case Dummy1129(p) => p
+ case Dummy1130(p) => p
+ case Dummy1131(p) => p
+ case Dummy1132(p) => p
+ case Dummy1133(p) => p
+ case Dummy1134(p) => p
+ case Dummy1135(p) => p
+ case Dummy1136(p) => p
+ case Dummy1137(p) => p
+ case Dummy1138(p) => p
+ case Dummy1139(p) => p
+ case Dummy1140(p) => p
+ case Dummy1141(p) => p
+ case Dummy1142(p) => p
+ case Dummy1143(p) => p
+ case Dummy1144(p) => p
+ case Dummy1145(p) => p
+ case Dummy1146(p) => p
+ case Dummy1147(p) => p
+ case Dummy1148(p) => p
+ case Dummy1149(p) => p
+ case Dummy1150(p) => p
+ case Dummy1151(p) => p
+ case Dummy1152(p) => p
+ case Dummy1153(p) => p
+ case Dummy1154(p) => p
+ case Dummy1155(p) => p
+ case Dummy1156(p) => p
+ case Dummy1157(p) => p
+ case Dummy1158(p) => p
+ case Dummy1159(p) => p
+ case Dummy1160(p) => p
+ case Dummy1161(p) => p
+ case Dummy1162(p) => p
+ case Dummy1163(p) => p
+ case Dummy1164(p) => p
+ case Dummy1165(p) => p
+ case Dummy1166(p) => p
+ case Dummy1167(p) => p
+ case Dummy1168(p) => p
+ case Dummy1169(p) => p
+ case Dummy1170(p) => p
+ case Dummy1171(p) => p
+ case Dummy1172(p) => p
+ case Dummy1173(p) => p
+ case Dummy1174(p) => p
+ case Dummy1175(p) => p
+ case Dummy1176(p) => p
+ case Dummy1177(p) => p
+ case Dummy1178(p) => p
+ case Dummy1179(p) => p
+ case Dummy1180(p) => p
+ case Dummy1181(p) => p
+ case Dummy1182(p) => p
+ case Dummy1183(p) => p
+ case Dummy1184(p) => p
+ case Dummy1185(p) => p
+ case Dummy1186(p) => p
+ case Dummy1187(p) => p
+ case Dummy1188(p) => p
+ case Dummy1189(p) => p
+ case Dummy1190(p) => p
+ case Dummy1191(p) => p
+ case Dummy1192(p) => p
+ case Dummy1193(p) => p
+ case Dummy1194(p) => p
+ case Dummy1195(p) => p
+ case Dummy1196(p) => p
+ case Dummy1197(p) => p
+ case Dummy1198(p) => p
+ case Dummy1199(p) => p
+ case Dummy1200(p) => p
+ case Dummy1201(p) => p
+ case Dummy1202(p) => p
+ case Dummy1203(p) => p
+ case Dummy1204(p) => p
+ case Dummy1205(p) => p
+ case Dummy1206(p) => p
+ case Dummy1207(p) => p
+ case Dummy1208(p) => p
+ case Dummy1209(p) => p
+ case Dummy1210(p) => p
+ case Dummy1211(p) => p
+ case Dummy1212(p) => p
+ case Dummy1213(p) => p
+ case Dummy1214(p) => p
+ case Dummy1215(p) => p
+ case Dummy1216(p) => p
+ case Dummy1217(p) => p
+ case Dummy1218(p) => p
+ case Dummy1219(p) => p
+ case Dummy1220(p) => p
+ case Dummy1221(p) => p
+ case Dummy1222(p) => p
+ case Dummy1223(p) => p
+ case Dummy1224(p) => p
+ case Dummy1225(p) => p
+ case Dummy1226(p) => p
+ case Dummy1227(p) => p
+ case Dummy1228(p) => p
+ case Dummy1229(p) => p
+ case Dummy1230(p) => p
+ case Dummy1231(p) => p
+ case Dummy1232(p) => p
+ case Dummy1233(p) => p
+ case Dummy1234(p) => p
+ case Dummy1235(p) => p
+ case Dummy1236(p) => p
+ case Dummy1237(p) => p
+ case Dummy1238(p) => p
+ case Dummy1239(p) => p
+ case Dummy1240(p) => p
+ case Dummy1241(p) => p
+ case Dummy1242(p) => p
+ case Dummy1243(p) => p
+ case Dummy1244(p) => p
+ case Dummy1245(p) => p
+ case Dummy1246(p) => p
+ case Dummy1247(p) => p
+ case Dummy1248(p) => p
+ case Dummy1249(p) => p
+ case Dummy1250(p) => p
+ case Dummy1251(p) => p
+ case Dummy1252(p) => p
+ case Dummy1253(p) => p
+ case Dummy1254(p) => p
+ case Dummy1255(p) => p
+ case Dummy1256(p) => p
+ case Dummy1257(p) => p
+ case Dummy1258(p) => p
+ case Dummy1259(p) => p
+ case Dummy1260(p) => p
+ case Dummy1261(p) => p
+ case Dummy1262(p) => p
+ case Dummy1263(p) => p
+ case Dummy1264(p) => p
+ case Dummy1265(p) => p
+ case Dummy1266(p) => p
+ case Dummy1267(p) => p
+ case Dummy1268(p) => p
+ case Dummy1269(p) => p
+ case Dummy1270(p) => p
+ case Dummy1271(p) => p
+ case Dummy1272(p) => p
+ case Dummy1273(p) => p
+ case Dummy1274(p) => p
+ case Dummy1275(p) => p
+ case Dummy1276(p) => p
+ case Dummy1277(p) => p
+ case Dummy1278(p) => p
+ case Dummy1279(p) => p
+ case Dummy1280(p) => p
+ case Dummy1281(p) => p
+ case Dummy1282(p) => p
+ case Dummy1283(p) => p
+ case Dummy1284(p) => p
+ case Dummy1285(p) => p
+ case Dummy1286(p) => p
+ case Dummy1287(p) => p
+ case Dummy1288(p) => p
+ case Dummy1289(p) => p
+ case Dummy1290(p) => p
+ case Dummy1291(p) => p
+ case Dummy1292(p) => p
+ case Dummy1293(p) => p
+ case Dummy1294(p) => p
+ case Dummy1295(p) => p
+ case Dummy1296(p) => p
+ case Dummy1297(p) => p
+ case Dummy1298(p) => p
+ case Dummy1299(p) => p
+ case Dummy1300(p) => p
+ case Dummy1301(p) => p
+ case Dummy1302(p) => p
+ case Dummy1303(p) => p
+ case Dummy1304(p) => p
+ case Dummy1305(p) => p
+ case Dummy1306(p) => p
+ case Dummy1307(p) => p
+ case Dummy1308(p) => p
+ case Dummy1309(p) => p
+ case Dummy1310(p) => p
+ case Dummy1311(p) => p
+ case Dummy1312(p) => p
+ case Dummy1313(p) => p
+ case Dummy1314(p) => p
+ case Dummy1315(p) => p
+ case Dummy1316(p) => p
+ case Dummy1317(p) => p
+ case Dummy1318(p) => p
+ case Dummy1319(p) => p
+ case Dummy1320(p) => p
+ case Dummy1321(p) => p
+ case Dummy1322(p) => p
+ case Dummy1323(p) => p
+ case Dummy1324(p) => p
+ case Dummy1325(p) => p
+ case Dummy1326(p) => p
+ case Dummy1327(p) => p
+ case Dummy1328(p) => p
+ case Dummy1329(p) => p
+ case Dummy1330(p) => p
+ case Dummy1331(p) => p
+ case Dummy1332(p) => p
+ case Dummy1333(p) => p
+ case Dummy1334(p) => p
+ case Dummy1335(p) => p
+ case Dummy1336(p) => p
+ case Dummy1337(p) => p
+ case Dummy1338(p) => p
+ case Dummy1339(p) => p
+ case Dummy1340(p) => p
+ case Dummy1341(p) => p
+ case Dummy1342(p) => p
+ case Dummy1343(p) => p
+ case Dummy1344(p) => p
+ case Dummy1345(p) => p
+ case Dummy1346(p) => p
+ case Dummy1347(p) => p
+ case Dummy1348(p) => p
+ case Dummy1349(p) => p
+ case Dummy1350(p) => p
+ case Dummy1351(p) => p
+ case Dummy1352(p) => p
+ case Dummy1353(p) => p
+ case Dummy1354(p) => p
+ case Dummy1355(p) => p
+ case Dummy1356(p) => p
+ case Dummy1357(p) => p
+ case Dummy1358(p) => p
+ case Dummy1359(p) => p
+ case Dummy1360(p) => p
+ case Dummy1361(p) => p
+ case Dummy1362(p) => p
+ case Dummy1363(p) => p
+ case Dummy1364(p) => p
+ case Dummy1365(p) => p
+ case Dummy1366(p) => p
+ case Dummy1367(p) => p
+ case Dummy1368(p) => p
+ case Dummy1369(p) => p
+ case Dummy1370(p) => p
+ case Dummy1371(p) => p
+ case Dummy1372(p) => p
+ case Dummy1373(p) => p
+ case Dummy1374(p) => p
+ case Dummy1375(p) => p
+ case Dummy1376(p) => p
+ case Dummy1377(p) => p
+ case Dummy1378(p) => p
+ case Dummy1379(p) => p
+ case Dummy1380(p) => p
+ case Dummy1381(p) => p
+ case Dummy1382(p) => p
+ case Dummy1383(p) => p
+ case Dummy1384(p) => p
+ case Dummy1385(p) => p
+ case Dummy1386(p) => p
+ case Dummy1387(p) => p
+ case Dummy1388(p) => p
+ case Dummy1389(p) => p
+ case Dummy1390(p) => p
+ case Dummy1391(p) => p
+ case Dummy1392(p) => p
+ case Dummy1393(p) => p
+ case Dummy1394(p) => p
+ case Dummy1395(p) => p
+ case Dummy1396(p) => p
+ case Dummy1397(p) => p
+ case Dummy1398(p) => p
+ case Dummy1399(p) => p
+ case Dummy1400(p) => p
+ case Dummy1401(p) => p
+ case Dummy1402(p) => p
+ case Dummy1403(p) => p
+ case Dummy1404(p) => p
+ case Dummy1405(p) => p
+ case Dummy1406(p) => p
+ case Dummy1407(p) => p
+ case Dummy1408(p) => p
+ case Dummy1409(p) => p
+ case Dummy1410(p) => p
+ case Dummy1411(p) => p
+ case Dummy1412(p) => p
+ case Dummy1413(p) => p
+ case Dummy1414(p) => p
+ case Dummy1415(p) => p
+ case Dummy1416(p) => p
+ case Dummy1417(p) => p
+ case Dummy1418(p) => p
+ case Dummy1419(p) => p
+ case Dummy1420(p) => p
+ case Dummy1421(p) => p
+ case Dummy1422(p) => p
+ case Dummy1423(p) => p
+ case Dummy1424(p) => p
+ case Dummy1425(p) => p
+ case Dummy1426(p) => p
+ case Dummy1427(p) => p
+ case Dummy1428(p) => p
+ case Dummy1429(p) => p
+ case Dummy1430(p) => p
+ case Dummy1431(p) => p
+ case Dummy1432(p) => p
+ case Dummy1433(p) => p
+ case Dummy1434(p) => p
+ case Dummy1435(p) => p
+ case Dummy1436(p) => p
+ case Dummy1437(p) => p
+ case Dummy1438(p) => p
+ case Dummy1439(p) => p
+ case Dummy1440(p) => p
+ case Dummy1441(p) => p
+ case Dummy1442(p) => p
+ case Dummy1443(p) => p
+ case Dummy1444(p) => p
+ case Dummy1445(p) => p
+ case Dummy1446(p) => p
+ case Dummy1447(p) => p
+ case Dummy1448(p) => p
+ case Dummy1449(p) => p
+ case Dummy1450(p) => p
+ case Dummy1451(p) => p
+ case Dummy1452(p) => p
+ case Dummy1453(p) => p
+ case Dummy1454(p) => p
+ case Dummy1455(p) => p
+ case Dummy1456(p) => p
+ case Dummy1457(p) => p
+ case Dummy1458(p) => p
+ case Dummy1459(p) => p
+ case Dummy1460(p) => p
+ case Dummy1461(p) => p
+ case Dummy1462(p) => p
+ case Dummy1463(p) => p
+ case Dummy1464(p) => p
+ case Dummy1465(p) => p
+ case Dummy1466(p) => p
+ case Dummy1467(p) => p
+ case Dummy1468(p) => p
+ case Dummy1469(p) => p
+ case Dummy1470(p) => p
+ case Dummy1471(p) => p
+ case Dummy1472(p) => p
+ case Dummy1473(p) => p
+ case Dummy1474(p) => p
+ case Dummy1475(p) => p
+ case Dummy1476(p) => p
+ case Dummy1477(p) => p
+ case Dummy1478(p) => p
+ case Dummy1479(p) => p
+ case Dummy1480(p) => p
+ case Dummy1481(p) => p
+ case Dummy1482(p) => p
+ case Dummy1483(p) => p
+ case Dummy1484(p) => p
+ case Dummy1485(p) => p
+ case Dummy1486(p) => p
+ case Dummy1487(p) => p
+ case Dummy1488(p) => p
+ case Dummy1489(p) => p
+ case Dummy1490(p) => p
+ case Dummy1491(p) => p
+ case Dummy1492(p) => p
+ case Dummy1493(p) => p
+ case Dummy1494(p) => p
+ case Dummy1495(p) => p
+ case Dummy1496(p) => p
+ case Dummy1497(p) => p
+ case Dummy1498(p) => p
+ case Dummy1499(p) => p
+ case Dummy1500(p) => p
+ case Dummy1501(p) => p
+ case Dummy1502(p) => p
+ case Dummy1503(p) => p
+ case Dummy1504(p) => p
+ case Dummy1505(p) => p
+ case Dummy1506(p) => p
+ case Dummy1507(p) => p
+ case Dummy1508(p) => p
+ case Dummy1509(p) => p
+ case Dummy1510(p) => p
+ case Dummy1511(p) => p
+ case Dummy1512(p) => p
+ case Dummy1513(p) => p
+ case Dummy1514(p) => p
+ case Dummy1515(p) => p
+ case Dummy1516(p) => p
+ case Dummy1517(p) => p
+ case Dummy1518(p) => p
+ case Dummy1519(p) => p
+ case Dummy1520(p) => p
+ case Dummy1521(p) => p
+ case Dummy1522(p) => p
+ case Dummy1523(p) => p
+ case Dummy1524(p) => p
+ case Dummy1525(p) => p
+ case Dummy1526(p) => p
+ case Dummy1527(p) => p
+ case Dummy1528(p) => p
+ case Dummy1529(p) => p
+ case Dummy1530(p) => p
+ case Dummy1531(p) => p
+ case Dummy1532(p) => p
+ case Dummy1533(p) => p
+ case Dummy1534(p) => p
+ case Dummy1535(p) => p
+ case Dummy1536(p) => p
+ case Dummy1537(p) => p
+ case Dummy1538(p) => p
+ case Dummy1539(p) => p
+ case Dummy1540(p) => p
+ case Dummy1541(p) => p
+ case Dummy1542(p) => p
+ case Dummy1543(p) => p
+ case Dummy1544(p) => p
+ case Dummy1545(p) => p
+ case Dummy1546(p) => p
+ case Dummy1547(p) => p
+ case Dummy1548(p) => p
+ case Dummy1549(p) => p
+ case Dummy1550(p) => p
+ case Dummy1551(p) => p
+ case Dummy1552(p) => p
+ case Dummy1553(p) => p
+ case Dummy1554(p) => p
+ case Dummy1555(p) => p
+ case Dummy1556(p) => p
+ case Dummy1557(p) => p
+ case Dummy1558(p) => p
+ case Dummy1559(p) => p
+ case Dummy1560(p) => p
+ case Dummy1561(p) => p
+ case Dummy1562(p) => p
+ case Dummy1563(p) => p
+ case Dummy1564(p) => p
+ case Dummy1565(p) => p
+ case Dummy1566(p) => p
+ case Dummy1567(p) => p
+ case Dummy1568(p) => p
+ case Dummy1569(p) => p
+ case Dummy1570(p) => p
+ case Dummy1571(p) => p
+ case Dummy1572(p) => p
+ case Dummy1573(p) => p
+ case Dummy1574(p) => p
+ case Dummy1575(p) => p
+ case Dummy1576(p) => p
+ case Dummy1577(p) => p
+ case Dummy1578(p) => p
+ case Dummy1579(p) => p
+ case Dummy1580(p) => p
+ case Dummy1581(p) => p
+ case Dummy1582(p) => p
+ case Dummy1583(p) => p
+ case Dummy1584(p) => p
+ case Dummy1585(p) => p
+ case Dummy1586(p) => p
+ case Dummy1587(p) => p
+ case Dummy1588(p) => p
+ case Dummy1589(p) => p
+ case Dummy1590(p) => p
+ case Dummy1591(p) => p
+ case Dummy1592(p) => p
+ case Dummy1593(p) => p
+ case Dummy1594(p) => p
+ case Dummy1595(p) => p
+ case Dummy1596(p) => p
+ case Dummy1597(p) => p
+ case Dummy1598(p) => p
+ case Dummy1599(p) => p
+ case Dummy1600(p) => p
+ case Dummy1601(p) => p
+ case Dummy1602(p) => p
+ case Dummy1603(p) => p
+ case Dummy1604(p) => p
+ case Dummy1605(p) => p
+ case Dummy1606(p) => p
+ case Dummy1607(p) => p
+ case Dummy1608(p) => p
+ case Dummy1609(p) => p
+ case Dummy1610(p) => p
+ case Dummy1611(p) => p
+ case Dummy1612(p) => p
+ case Dummy1613(p) => p
+ case Dummy1614(p) => p
+ case Dummy1615(p) => p
+ case Dummy1616(p) => p
+ case Dummy1617(p) => p
+ case Dummy1618(p) => p
+ case Dummy1619(p) => p
+ case Dummy1620(p) => p
+ case Dummy1621(p) => p
+ case Dummy1622(p) => p
+ case Dummy1623(p) => p
+ case Dummy1624(p) => p
+ case Dummy1625(p) => p
+ case Dummy1626(p) => p
+ case Dummy1627(p) => p
+ case Dummy1628(p) => p
+ case Dummy1629(p) => p
+ case Dummy1630(p) => p
+ case Dummy1631(p) => p
+ case Dummy1632(p) => p
+ case Dummy1633(p) => p
+ case Dummy1634(p) => p
+ case Dummy1635(p) => p
+ case Dummy1636(p) => p
+ case Dummy1637(p) => p
+ case Dummy1638(p) => p
+ case Dummy1639(p) => p
+ case Dummy1640(p) => p
+ case Dummy1641(p) => p
+ case Dummy1642(p) => p
+ case Dummy1643(p) => p
+ case Dummy1644(p) => p
+ case Dummy1645(p) => p
+ case Dummy1646(p) => p
+ case Dummy1647(p) => p
+ case Dummy1648(p) => p
+ case Dummy1649(p) => p
+ case Dummy1650(p) => p
+ case Dummy1651(p) => p
+ case Dummy1652(p) => p
+ case Dummy1653(p) => p
+ case Dummy1654(p) => p
+ case Dummy1655(p) => p
+ case Dummy1656(p) => p
+ case Dummy1657(p) => p
+ case Dummy1658(p) => p
+ case Dummy1659(p) => p
+ case Dummy1660(p) => p
+ case Dummy1661(p) => p
+ case Dummy1662(p) => p
+ case Dummy1663(p) => p
+ case Dummy1664(p) => p
+ case Dummy1665(p) => p
+ case Dummy1666(p) => p
+ case Dummy1667(p) => p
+ case Dummy1668(p) => p
+ case Dummy1669(p) => p
+ case Dummy1670(p) => p
+ case Dummy1671(p) => p
+ case Dummy1672(p) => p
+ case Dummy1673(p) => p
+ case Dummy1674(p) => p
+ case Dummy1675(p) => p
+ case Dummy1676(p) => p
+ case Dummy1677(p) => p
+ case Dummy1678(p) => p
+ case Dummy1679(p) => p
+ case Dummy1680(p) => p
+ case Dummy1681(p) => p
+ case Dummy1682(p) => p
+ case Dummy1683(p) => p
+ case Dummy1684(p) => p
+ case Dummy1685(p) => p
+ case Dummy1686(p) => p
+ case Dummy1687(p) => p
+ case Dummy1688(p) => p
+ case Dummy1689(p) => p
+ case Dummy1690(p) => p
+ case Dummy1691(p) => p
+ case Dummy1692(p) => p
+ case Dummy1693(p) => p
+ case Dummy1694(p) => p
+ case Dummy1695(p) => p
+ case Dummy1696(p) => p
+ case Dummy1697(p) => p
+ case Dummy1698(p) => p
+ case Dummy1699(p) => p
+ case Dummy1700(p) => p
+ case Dummy1701(p) => p
+ case Dummy1702(p) => p
+ case Dummy1703(p) => p
+ case Dummy1704(p) => p
+ case Dummy1705(p) => p
+ case Dummy1706(p) => p
+ case Dummy1707(p) => p
+ case Dummy1708(p) => p
+ case Dummy1709(p) => p
+ case Dummy1710(p) => p
+ case Dummy1711(p) => p
+ case Dummy1712(p) => p
+ case Dummy1713(p) => p
+ case Dummy1714(p) => p
+ case Dummy1715(p) => p
+ case Dummy1716(p) => p
+ case Dummy1717(p) => p
+ case Dummy1718(p) => p
+ case Dummy1719(p) => p
+ case Dummy1720(p) => p
+ case Dummy1721(p) => p
+ case Dummy1722(p) => p
+ case Dummy1723(p) => p
+ case Dummy1724(p) => p
+ case Dummy1725(p) => p
+ case Dummy1726(p) => p
+ case Dummy1727(p) => p
+ case Dummy1728(p) => p
+ case Dummy1729(p) => p
+ case Dummy1730(p) => p
+ case Dummy1731(p) => p
+ case Dummy1732(p) => p
+ case Dummy1733(p) => p
+ case Dummy1734(p) => p
+ case Dummy1735(p) => p
+ case Dummy1736(p) => p
+ case Dummy1737(p) => p
+ case Dummy1738(p) => p
+ case Dummy1739(p) => p
+ case Dummy1740(p) => p
+ case Dummy1741(p) => p
+ case Dummy1742(p) => p
+ case Dummy1743(p) => p
+ case Dummy1744(p) => p
+ case Dummy1745(p) => p
+ case Dummy1746(p) => p
+ case Dummy1747(p) => p
+ case Dummy1748(p) => p
+ case Dummy1749(p) => p
+ case Dummy1750(p) => p
+ case Dummy1751(p) => p
+ case Dummy1752(p) => p
+ case Dummy1753(p) => p
+ case Dummy1754(p) => p
+ case Dummy1755(p) => p
+ case Dummy1756(p) => p
+ case Dummy1757(p) => p
+ case Dummy1758(p) => p
+ case Dummy1759(p) => p
+ case Dummy1760(p) => p
+ case Dummy1761(p) => p
+ case Dummy1762(p) => p
+ case Dummy1763(p) => p
+ case Dummy1764(p) => p
+ case Dummy1765(p) => p
+ case Dummy1766(p) => p
+ case Dummy1767(p) => p
+ case Dummy1768(p) => p
+ case Dummy1769(p) => p
+ case Dummy1770(p) => p
+ case Dummy1771(p) => p
+ case Dummy1772(p) => p
+ case Dummy1773(p) => p
+ case Dummy1774(p) => p
+ case Dummy1775(p) => p
+ case Dummy1776(p) => p
+ case Dummy1777(p) => p
+ case Dummy1778(p) => p
+ case Dummy1779(p) => p
+ case Dummy1780(p) => p
+ case Dummy1781(p) => p
+ case Dummy1782(p) => p
+ case Dummy1783(p) => p
+ case Dummy1784(p) => p
+ case Dummy1785(p) => p
+ case Dummy1786(p) => p
+ case Dummy1787(p) => p
+ case Dummy1788(p) => p
+ case Dummy1789(p) => p
+ case Dummy1790(p) => p
+ case Dummy1791(p) => p
+ case Dummy1792(p) => p
+ case Dummy1793(p) => p
+ case Dummy1794(p) => p
+ case Dummy1795(p) => p
+ case Dummy1796(p) => p
+ case Dummy1797(p) => p
+ case Dummy1798(p) => p
+ case Dummy1799(p) => p
+ case Dummy1800(p) => p
+ case Dummy1801(p) => p
+ case Dummy1802(p) => p
+ case Dummy1803(p) => p
+ case Dummy1804(p) => p
+ case Dummy1805(p) => p
+ case Dummy1806(p) => p
+ case Dummy1807(p) => p
+ case Dummy1808(p) => p
+ case Dummy1809(p) => p
+ case Dummy1810(p) => p
+ case Dummy1811(p) => p
+ case Dummy1812(p) => p
+ case Dummy1813(p) => p
+ case Dummy1814(p) => p
+ case Dummy1815(p) => p
+ case Dummy1816(p) => p
+ case Dummy1817(p) => p
+ case Dummy1818(p) => p
+ case Dummy1819(p) => p
+ case Dummy1820(p) => p
+ case Dummy1821(p) => p
+ case Dummy1822(p) => p
+ case Dummy1823(p) => p
+ case Dummy1824(p) => p
+ case Dummy1825(p) => p
+ case Dummy1826(p) => p
+ case Dummy1827(p) => p
+ case Dummy1828(p) => p
+ case Dummy1829(p) => p
+ case Dummy1830(p) => p
+ case Dummy1831(p) => p
+ case Dummy1832(p) => p
+ case Dummy1833(p) => p
+ case Dummy1834(p) => p
+ case Dummy1835(p) => p
+ case Dummy1836(p) => p
+ case Dummy1837(p) => p
+ case Dummy1838(p) => p
+ case Dummy1839(p) => p
+ case Dummy1840(p) => p
+ case Dummy1841(p) => p
+ case Dummy1842(p) => p
+ case Dummy1843(p) => p
+ case Dummy1844(p) => p
+ case Dummy1845(p) => p
+ case Dummy1846(p) => p
+ case Dummy1847(p) => p
+ case Dummy1848(p) => p
+ case Dummy1849(p) => p
+ case Dummy1850(p) => p
+ case Dummy1851(p) => p
+ case Dummy1852(p) => p
+ case Dummy1853(p) => p
+ case Dummy1854(p) => p
+ case Dummy1855(p) => p
+ case Dummy1856(p) => p
+ case Dummy1857(p) => p
+ case Dummy1858(p) => p
+ case Dummy1859(p) => p
+ case Dummy1860(p) => p
+ case Dummy1861(p) => p
+ case Dummy1862(p) => p
+ case Dummy1863(p) => p
+ case Dummy1864(p) => p
+ case Dummy1865(p) => p
+ case Dummy1866(p) => p
+ case Dummy1867(p) => p
+ case Dummy1868(p) => p
+ case Dummy1869(p) => p
+ case Dummy1870(p) => p
+ case Dummy1871(p) => p
+ case Dummy1872(p) => p
+ case Dummy1873(p) => p
+ case Dummy1874(p) => p
+ case Dummy1875(p) => p
+ case Dummy1876(p) => p
+ case Dummy1877(p) => p
+ case Dummy1878(p) => p
+ case Dummy1879(p) => p
+ case Dummy1880(p) => p
+ case Dummy1881(p) => p
+ case Dummy1882(p) => p
+ case Dummy1883(p) => p
+ case Dummy1884(p) => p
+ case Dummy1885(p) => p
+ case Dummy1886(p) => p
+ case Dummy1887(p) => p
+ case Dummy1888(p) => p
+ case Dummy1889(p) => p
+ case Dummy1890(p) => p
+ case Dummy1891(p) => p
+ case Dummy1892(p) => p
+ case Dummy1893(p) => p
+ case Dummy1894(p) => p
+ case Dummy1895(p) => p
+ case Dummy1896(p) => p
+ case Dummy1897(p) => p
+ case Dummy1898(p) => p
+ case Dummy1899(p) => p
+ case Dummy1900(p) => p
+ case Dummy1901(p) => p
+ case Dummy1902(p) => p
+ case Dummy1903(p) => p
+ case Dummy1904(p) => p
+ case Dummy1905(p) => p
+ case Dummy1906(p) => p
+ case Dummy1907(p) => p
+ case Dummy1908(p) => p
+ case Dummy1909(p) => p
+ case Dummy1910(p) => p
+ case Dummy1911(p) => p
+ case Dummy1912(p) => p
+ case Dummy1913(p) => p
+ case Dummy1914(p) => p
+ case Dummy1915(p) => p
+ case Dummy1916(p) => p
+ case Dummy1917(p) => p
+ case Dummy1918(p) => p
+ case Dummy1919(p) => p
+ case Dummy1920(p) => p
+ case Dummy1921(p) => p
+ case Dummy1922(p) => p
+ case Dummy1923(p) => p
+ case Dummy1924(p) => p
+ case Dummy1925(p) => p
+ case Dummy1926(p) => p
+ case Dummy1927(p) => p
+ case Dummy1928(p) => p
+ case Dummy1929(p) => p
+ case Dummy1930(p) => p
+ case Dummy1931(p) => p
+ case Dummy1932(p) => p
+ case Dummy1933(p) => p
+ case Dummy1934(p) => p
+ case Dummy1935(p) => p
+ case Dummy1936(p) => p
+ case Dummy1937(p) => p
+ case Dummy1938(p) => p
+ case Dummy1939(p) => p
+ case Dummy1940(p) => p
+ case Dummy1941(p) => p
+ case Dummy1942(p) => p
+ case Dummy1943(p) => p
+ case Dummy1944(p) => p
+ case Dummy1945(p) => p
+ case Dummy1946(p) => p
+ case Dummy1947(p) => p
+ case Dummy1948(p) => p
+ case Dummy1949(p) => p
+ case Dummy1950(p) => p
+ case Dummy1951(p) => p
+ case Dummy1952(p) => p
+ case Dummy1953(p) => p
+ case Dummy1954(p) => p
+ case Dummy1955(p) => p
+ case Dummy1956(p) => p
+ case Dummy1957(p) => p
+ case Dummy1958(p) => p
+ case Dummy1959(p) => p
+ case Dummy1960(p) => p
+ case Dummy1961(p) => p
+ case Dummy1962(p) => p
+ case Dummy1963(p) => p
+ case Dummy1964(p) => p
+ case Dummy1965(p) => p
+ case Dummy1966(p) => p
+ case Dummy1967(p) => p
+ case Dummy1968(p) => p
+ case Dummy1969(p) => p
+ case Dummy1970(p) => p
+ case Dummy1971(p) => p
+ case Dummy1972(p) => p
+ case Dummy1973(p) => p
+ case Dummy1974(p) => p
+ case Dummy1975(p) => p
+ case Dummy1976(p) => p
+ case Dummy1977(p) => p
+ case Dummy1978(p) => p
+ case Dummy1979(p) => p
+ case Dummy1980(p) => p
+ case Dummy1981(p) => p
+ case Dummy1982(p) => p
+ case Dummy1983(p) => p
+ case Dummy1984(p) => p
+ case Dummy1985(p) => p
+ case Dummy1986(p) => p
+ case Dummy1987(p) => p
+ case Dummy1988(p) => p
+ case Dummy1989(p) => p
+ case Dummy1990(p) => p
+ case Dummy1991(p) => p
+ case Dummy1992(p) => p
+ case Dummy1993(p) => p
+ case Dummy1994(p) => p
+ case Dummy1995(p) => p
+ case Dummy1996(p) => p
+ case Dummy1997(p) => p
+ case Dummy1998(p) => p
+ case Dummy1999(p) => p
+}
+}
diff --git a/test/disabled/run/t6987.check b/test/disabled/run/t6987.check
new file mode 100644
index 0000000000..86fc96c679
--- /dev/null
+++ b/test/disabled/run/t6987.check
@@ -0,0 +1 @@
+got successful verbose results!
diff --git a/test/disabled/run/t6987.scala b/test/disabled/run/t6987.scala
new file mode 100644
index 0000000000..37e91d61ae
--- /dev/null
+++ b/test/disabled/run/t6987.scala
@@ -0,0 +1,43 @@
+import java.io._
+import tools.nsc.{CompileClient, CompileServer}
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+
+object Test extends App {
+ val startupLatch = new CountDownLatch(1)
+ // we have to explicitly launch our server because when the client launches a server it uses
+ // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
+ // happens to be in the path gets used
+ val t = new Thread(new Runnable {
+ def run() = {
+ CompileServer.execute(() => startupLatch.countDown(), Array[String]())
+ }
+ })
+ t setDaemon true
+ t.start()
+ if (!startupLatch.await(2, TimeUnit.MINUTES))
+ sys error "Timeout waiting for server to start"
+
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ val success = (scala.Console withOut ps) {
+ // shut down the server via the client using the verbose flag
+ CompileClient.process(Array("-shutdown", "-verbose"))
+ }
+
+ // now make sure we got success and a verbose result
+ val msg = baos.toString()
+
+ if (success) {
+ if (msg contains "Settings after normalizing paths") {
+ println("got successful verbose results!")
+ } else {
+ println("did not get the string expected, full results were:")
+ println(msg)
+ }
+ } else {
+ println("got a failure. Full results were:")
+ println(msg)
+ }
+ scala.Console.flush
+}
diff --git a/test/files/disabled/run/t4602.scala b/test/files/disabled/run/t4602.scala
new file mode 100644
index 0000000000..73ba231ccf
--- /dev/null
+++ b/test/files/disabled/run/t4602.scala
@@ -0,0 +1,57 @@
+import java.io.{File, FileOutputStream, BufferedOutputStream, FileWriter, ByteArrayOutputStream, PrintStream}
+import tools.nsc.{CompileClient, CompileServer}
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+
+object Test extends App {
+ val startupLatch = new CountDownLatch(1)
+ // we have to explicitly launch our server because when the client launches a server it uses
+ // the "scala" shell command meaning whatever version of scala (and whatever version of libraries)
+ // happens to be in the path gets used
+ val t = new Thread(new Runnable {
+ def run() = {
+ CompileServer.execute(() => startupLatch.countDown(), Array[String]())
+ }
+ })
+ t setDaemon true
+ t.start()
+ if (!startupLatch.await(2, TimeUnit.MINUTES))
+ sys error "Timeout waiting for server to start"
+
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ val outdir = scala.reflect.io.Directory(sys.props("partest.output"))
+
+ val dirNameAndPath = (1 to 2).toList map {number =>
+ val name = s"Hello${number}"
+ val dir = outdir / number.toString
+ (dir, name, dir / s"${name}.scala")
+ }
+
+ dirNameAndPath foreach {case (dir, name, path) =>
+ dir.createDirectory()
+ val file = path.jfile
+ val out = new FileWriter(file)
+ try
+ out.write(s"object ${name}\n")
+ finally
+ out.close
+ }
+
+ val success = (scala.Console withOut ps) {
+ dirNameAndPath foreach {case (path, name, _) =>
+ CompileClient.process(Array("-verbose", "-current-dir", path.toString, s"${name}.scala"))
+ }
+
+ CompileClient.process(Array("-shutdown"))
+ }
+
+ // now make sure we got success and the correct normalized paths
+ val msg = baos.toString()
+
+ assert(success, s"got a failure. Full results were: \n${msg}")
+ dirNameAndPath foreach {case (_, _, path) =>
+ val expected = s"Input files after normalizing paths: ${path}"
+ assert(msg contains expected, s"could not find '${expected}' in output. Full results were: \n${msg}")
+ }
+}
diff --git a/test/files/instrumented/t6611.check b/test/files/instrumented/t6611.check
new file mode 100644
index 0000000000..5cd691e93a
--- /dev/null
+++ b/test/files/instrumented/t6611.check
@@ -0,0 +1 @@
+Method call statistics:
diff --git a/test/files/instrumented/t6611.scala b/test/files/instrumented/t6611.scala
new file mode 100644
index 0000000000..4c52f8a5ef
--- /dev/null
+++ b/test/files/instrumented/t6611.scala
@@ -0,0 +1,35 @@
+import scala.tools.partest.instrumented.Instrumentation._
+
+object Test {
+ def main(args: Array[String]) {
+ startProfiling()
+
+ // tests optimization in Cleanup for varargs reference arrays
+ Array("")
+
+
+ Array(true)
+ Array(true, false)
+ Array(1: Byte)
+ Array(1: Byte, 2: Byte)
+ Array(1: Short)
+ Array(1: Short, 2: Short)
+ Array(1)
+ Array(1, 2)
+ Array(1L)
+ Array(1L, 2L)
+ Array(1d)
+ Array(1d, 2d)
+ Array(1f)
+ Array(1f, 2f)
+
+ /* Not currently optimized:
+ Array[Int](1, 2) etc
+ Array(())
+ Array((), ())
+ */
+
+ stopProfiling()
+ printStatistics()
+ }
+}
diff --git a/test/files/jvm/bytecode-test-example.check b/test/files/jvm/bytecode-test-example.check
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/jvm/bytecode-test-example/Foo_1.scala b/test/files/jvm/bytecode-test-example/Foo_1.scala
new file mode 100644
index 0000000000..4f679d156f
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example/Foo_1.scala
@@ -0,0 +1,9 @@
+class Foo_1 {
+ def foo(x: AnyRef): Int = {
+ val bool = x == null
+ if (x != null)
+ 1
+ else
+ 0
+ }
+}
diff --git a/test/files/jvm/bytecode-test-example/Test.scala b/test/files/jvm/bytecode-test-example/Test.scala
new file mode 100644
index 0000000000..d668059cb7
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example/Test.scala
@@ -0,0 +1,32 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("Foo_1")
+ val methodNode = getMethod(classNode, "foo")
+ println(countNullChecks(methodNode.instructions))
+ }
+
+ def countNullChecks(insnList: InsnList): Int = {
+ /** Is given instruction a null check?
+ * NOTE
+ * This will detect direct null compparsion as in
+ * if (x == null) ...
+ * and not indirect as in
+ * val foo = null
+ * if (x == foo) ...
+ */
+ def isNullCheck(node: asm.tree.AbstractInsnNode): Boolean = {
+ val opcode = node.getOpcode
+ (opcode == asm.Opcodes.IFNULL) || (opcode == asm.Opcodes.IFNONNULL)
+ }
+ insnList.iterator.asScala.count(isNullCheck)
+ }
+}
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index 8674be168c..0efa83fbd9 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -70,7 +70,19 @@ object FutureTests extends MinimalScalaTest {
//FIXME should check
}
}
-
+
+ "The default ExecutionContext" should {
+ "report uncaught exceptions" in {
+ val p = Promise[Throwable]()
+ val logThrowable: Throwable => Unit = p.trySuccess(_)
+ val ec: ExecutionContext = ExecutionContext.fromExecutor(null, logThrowable)
+
+ val t = new NotImplementedError("foo")
+ val f = Future(throw t)(ec)
+ Await.result(p.future, 2.seconds) mustBe t
+ }
+ }
+
"A future with global ExecutionContext" should {
import ExecutionContext.Implicits._
diff --git a/test/files/jvm/patmat_opt_ignore_underscore.check b/test/files/jvm/patmat_opt_ignore_underscore.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_ignore_underscore.flags b/test/files/jvm/patmat_opt_ignore_underscore.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
new file mode 100644
index 0000000000..fa3639380d
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore/Analyzed_1.scala
@@ -0,0 +1,29 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+// this is not the best test for shielding against regressing on this particular issue,
+// but it sets the stage for checking the bytecode emitted by the pattern matcher and
+// comparing it to manually tuned code using if/then/else etc.
+class SameBytecode {
+ case class Foo(x: Any, y: String)
+
+ def a =
+ Foo(1, "a") match {
+ case Foo(_: String, y) => y
+ }
+
+ // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat)
+ // the test checks that bytecode for a and b is identical (modulo line numbers)
+ // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example)
+ // note that the actual tree is quite bad: we do an unnecessary null check, isInstanceOf and local val (x3)
+ // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat)
+ def b: String = {
+ val x1 = Foo(1, "a")
+ if (x1.ne(null)) {
+ if (x1.x.isInstanceOf[String]) {
+ return x1.y
+ }
+ }
+
+ throw new MatchError(x1)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_ignore_underscore/test.scala b/test/files/jvm/patmat_opt_ignore_underscore/test.scala
new file mode 100644
index 0000000000..6179101a7e
--- /dev/null
+++ b/test/files/jvm/patmat_opt_ignore_underscore/test.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/patmat_opt_no_nullcheck.check b/test/files/jvm/patmat_opt_no_nullcheck.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_no_nullcheck.flags b/test/files/jvm/patmat_opt_no_nullcheck.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
new file mode 100644
index 0000000000..3a594c401e
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck/Analyzed_1.scala
@@ -0,0 +1,24 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+case class Foo(x: Any)
+
+class SameBytecode {
+ def a =
+ (Foo(1): Any) match {
+ case Foo(_: String) =>
+ }
+
+ // there's no null check
+ def b: Unit = {
+ val x1: Any = Foo(1)
+ if (x1.isInstanceOf[Foo]) {
+ val x3 = x1.asInstanceOf[Foo]
+ if (x3.x.isInstanceOf[String]) {
+ val x = ()
+ return
+ }
+ }
+
+ throw new MatchError(x1)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_no_nullcheck/test.scala b/test/files/jvm/patmat_opt_no_nullcheck/test.scala
new file mode 100644
index 0000000000..2927e763d5
--- /dev/null
+++ b/test/files/jvm/patmat_opt_no_nullcheck/test.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/patmat_opt_primitive_typetest.check b/test/files/jvm/patmat_opt_primitive_typetest.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/patmat_opt_primitive_typetest.flags b/test/files/jvm/patmat_opt_primitive_typetest.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
new file mode 100644
index 0000000000..e5db6c4dd0
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest/Analyzed_1.scala
@@ -0,0 +1,24 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+class SameBytecode {
+ case class Foo(x: Int, y: String)
+
+ def a =
+ Foo(1, "a") match {
+ case Foo(_: Int, y) => y
+ }
+
+ // this method's body holds the tree that should be generated by the pattern matcher for method a (-Xprint:patmat)
+ // the test checks that bytecode for a and b is identical (modulo line numbers)
+ // we can't diff trees as they are quite different (patmat uses jumps to labels that cannot be expressed in source, for example)
+ // note that the actual tree is quite bad: we do an unnecessary null check, and local val (x3)
+ // some of these will be fixed soon (the initial null check is for the scrutinee, which is harder to fix in patmat)
+ def b: String = {
+ val x1 = Foo(1, "a")
+ if (x1.ne(null)) {
+ return x1.y
+ }
+
+ throw new MatchError(x1)
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/patmat_opt_primitive_typetest/test.scala b/test/files/jvm/patmat_opt_primitive_typetest/test.scala
new file mode 100644
index 0000000000..2927e763d5
--- /dev/null
+++ b/test/files/jvm/patmat_opt_primitive_typetest/test.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.BytecodeTest
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ sameBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"))
+ }
+}
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index b529bca38a..b2b4183564 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -134,6 +134,12 @@ trait FutureCallbacks extends TestBase {
assert(false)
}
}
+
+ def testThatNestedCallbacksDoNotYieldStackOverflow(): Unit = {
+ val promise = Promise[Int]
+ (0 to 10000).map(Future(_)).foldLeft(promise.future)((f1, f2) => f2.flatMap(i => f1))
+ promise.success(-1)
+ }
testOnSuccess()
testOnSuccessWhenCompleted()
@@ -143,6 +149,7 @@ trait FutureCallbacks extends TestBase {
// testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
//TODO: this test is currently problematic, because NonFatal does not match InterruptedException
//testOnFailureWhenSpecialThrowable(7, new InterruptedException)
+ testThatNestedCallbacksDoNotYieldStackOverflow()
testOnFailureWhenTimeoutException()
}
diff --git a/test/files/jvm/t6941.check b/test/files/jvm/t6941.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/t6941.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/t6941.flags b/test/files/jvm/t6941.flags
new file mode 100644
index 0000000000..49d036a887
--- /dev/null
+++ b/test/files/jvm/t6941.flags
@@ -0,0 +1 @@
+-optimize
diff --git a/test/files/jvm/t6941/Analyzed_1.scala b/test/files/jvm/t6941/Analyzed_1.scala
new file mode 100644
index 0000000000..549abd5e64
--- /dev/null
+++ b/test/files/jvm/t6941/Analyzed_1.scala
@@ -0,0 +1,11 @@
+// this class's bytecode, compiled under -optimize is analyzed by the test
+// method a's bytecode should be identical to method b's bytecode
+class SameBytecode {
+ def a(xs: List[Int]) = xs match {
+ case x :: _ => x
+ }
+
+ def b(xs: List[Int]) = xs match {
+ case xs: ::[Int] => xs.hd$1
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/t6941/test.scala b/test/files/jvm/t6941/test.scala
new file mode 100644
index 0000000000..248617f71f
--- /dev/null
+++ b/test/files/jvm/t6941/test.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ def show: Unit = {
+ val classNode = loadClassNode("SameBytecode")
+ similarBytecode(getMethod(classNode, "a"), getMethod(classNode, "b"), equalsModuloVar)
+ }
+}
diff --git a/test/files/jvm/throws-annot-from-java.check b/test/files/jvm/throws-annot-from-java.check
new file mode 100644
index 0000000000..be3ba412f8
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java.check
@@ -0,0 +1,47 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> :paste
+// Entering paste mode (ctrl-D to finish)
+
+{
+ val clazz = rootMirror.getClassByName(newTermName("test.ThrowsDeclaration_2"));
+ {
+ val method = clazz.info.member(newTermName("foo"))
+ val throwsAnn = method.annotations.head
+ val atp = throwsAnn.atp
+ println("foo")
+ println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+ println
+
+ {
+ val method = clazz.info.member(newTermName("bar"))
+ val throwsAnn = method.annotations.head
+ val Literal(const) = throwsAnn.args.head
+ val tp = const.typeValue
+ println("bar")
+ println("tp.typeParams.isEmpty: " + tp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+}
+
+// Exiting paste mode, now interpreting.
+
+foo
+atp.typeParams.isEmpty: true
+throws[IllegalStateException](classOf[java.lang.IllegalStateException])
+
+bar
+tp.typeParams.isEmpty: true
+throws[test.PolymorphicException[_]](classOf[test.PolymorphicException])
+
+scala>
diff --git a/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala b/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala
new file mode 100644
index 0000000000..58fa536f0b
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/PolymorphicException_1.scala
@@ -0,0 +1,3 @@
+package test
+
+class PolymorphicException[T] extends Exception
diff --git a/test/files/jvm/throws-annot-from-java/Test_3.scala b/test/files/jvm/throws-annot-from-java/Test_3.scala
new file mode 100644
index 0000000000..de1d984573
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/Test_3.scala
@@ -0,0 +1,29 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """:power
+:paste
+{
+ val clazz = rootMirror.getClassByName(newTermName("test.ThrowsDeclaration_2"));
+ {
+ val method = clazz.info.member(newTermName("foo"))
+ val throwsAnn = method.annotations.head
+ val atp = throwsAnn.atp
+ println("foo")
+ println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+ println
+
+ {
+ val method = clazz.info.member(newTermName("bar"))
+ val throwsAnn = method.annotations.head
+ val Literal(const) = throwsAnn.args.head
+ val tp = const.typeValue
+ println("bar")
+ println("tp.typeParams.isEmpty: " + tp.typeParams.isEmpty)
+ println(throwsAnn)
+ }
+}
+"""
+}
diff --git a/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java b/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java
new file mode 100644
index 0000000000..3708fe626b
--- /dev/null
+++ b/test/files/jvm/throws-annot-from-java/ThrowsDeclaration_2.java
@@ -0,0 +1,6 @@
+package test;
+
+public class ThrowsDeclaration_2 {
+ public void foo() throws IllegalStateException {};
+ public void bar() throws PolymorphicException {};
+}
diff --git a/test/files/lib/javac-artifacts.jar.desired.sha1 b/test/files/lib/javac-artifacts.jar.desired.sha1
deleted file mode 100644
index a49c986386..0000000000
--- a/test/files/lib/javac-artifacts.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-61931a51bb1a2d308d214b96d06e9a8808515dcf ?javac-artifacts.jar
diff --git a/test/files/neg/anyval-anyref-parent.check b/test/files/neg/anyval-anyref-parent.check
index 8c2aa36583..fe20e5de81 100644
--- a/test/files/neg/anyval-anyref-parent.check
+++ b/test/files/neg/anyval-anyref-parent.check
@@ -3,7 +3,7 @@ trait Foo2 extends AnyVal // fail
^
anyval-anyref-parent.scala:5: error: Any does not have a constructor
class Bar1 extends Any // fail
- ^
+ ^
anyval-anyref-parent.scala:6: error: value class needs to have exactly one public val parameter
class Bar2(x: Int) extends AnyVal // fail
^
diff --git a/test/files/neg/cyclics-import.check b/test/files/neg/cyclics-import.check
index be09fca374..ef355fab0a 100644
--- a/test/files/neg/cyclics-import.check
+++ b/test/files/neg/cyclics-import.check
@@ -3,4 +3,13 @@ Note: this is often due in part to a class depending on a definition nested with
If applicable, you may wish to try moving some members into another object.
import User.UserStatus._
^
-one error found
+cyclics-import.scala:12: error: not found: type Value
+ type UserStatus = Value
+ ^
+cyclics-import.scala:14: error: not found: value Value
+ val Active = Value("1")
+ ^
+cyclics-import.scala:15: error: not found: value Value
+ val Disabled = Value("2")
+ ^
+four errors found
diff --git a/test/files/neg/macro-false-deprecation-warning.check b/test/files/neg/macro-false-deprecation-warning.check
new file mode 100644
index 0000000000..7d56505ec4
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning.check
@@ -0,0 +1,4 @@
+Impls_Macros_1.scala:5: error: illegal start of simple expression
+}
+^
+one error found
diff --git a/test/files/neg/macro-false-deprecation-warning.flags b/test/files/neg/macro-false-deprecation-warning.flags
new file mode 100644
index 0000000000..59af162db6
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning.flags
@@ -0,0 +1 @@
+-language:experimental.macros -deprecation \ No newline at end of file
diff --git a/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
new file mode 100644
index 0000000000..6dc2ea114b
--- /dev/null
+++ b/test/files/neg/macro-false-deprecation-warning/Impls_Macros_1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.macros.Context
+
+object Helper {
+ def unapplySeq[T](x: List[T]): Option[Seq[T]] =
+}
+
+object Macros {
+ def impl[T: c.WeakTypeTag](c: Context)(x: c.Expr[List[T]]) = {
+ c.universe.reify(Helper.unapplySeq(x.splice))
+ }
+
+ object UnapplyMacro {
+ def unapplySeq[T](x: List[T]): Option[Seq[T]] = macro impl[T]
+ }
+}
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index f6bd703e1f..ea7c323b74 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -100,7 +100,7 @@ Error occurred in an application involving default arguments.
^
names-defaults-neg.scala:86: error: module extending its companion class cannot use default constructor arguments
object C extends C()
- ^
+ ^
names-defaults-neg.scala:90: error: deprecated parameter name x has to be distinct from any other parameter name (deprecated or not).
def deprNam1(x: Int, @deprecatedName('x) y: String) = 0
^
diff --git a/test/files/neg/protected-constructors.check b/test/files/neg/protected-constructors.check
index e295917050..f137158ed6 100644
--- a/test/files/neg/protected-constructors.check
+++ b/test/files/neg/protected-constructors.check
@@ -19,4 +19,7 @@ protected-constructors.scala:15: error: class Foo3 in object Ding cannot be acce
object Ding in package dingus where target is defined
class Bar3 extends Ding.Foo3("abc")
^
-four errors found
+protected-constructors.scala:15: error: too many arguments for constructor Object: ()Object
+ class Bar3 extends Ding.Foo3("abc")
+ ^
+5 errors found
diff --git a/test/files/neg/t2148.check b/test/files/neg/t2148.check
index 27b5dce507..5113b48e51 100644
--- a/test/files/neg/t2148.check
+++ b/test/files/neg/t2148.check
@@ -1,4 +1,4 @@
-t2148.scala:9: error: A is not a legal prefix for a constructor
+t2148.scala:9: error: type A is not a stable prefix
val b = new A with A#A1
^
one error found
diff --git a/test/files/neg/t2968.check b/test/files/neg/t2968.check
new file mode 100644
index 0000000000..5d2387f98c
--- /dev/null
+++ b/test/files/neg/t2968.check
@@ -0,0 +1,10 @@
+t2968.scala:8: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+t2968.scala:17: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+t2968.scala:26: error: Missing closing brace `}' assumed here
+} // missing brace
+^
+three errors found
diff --git a/test/files/neg/t2968.scala b/test/files/neg/t2968.scala
new file mode 100644
index 0000000000..41c3a798a5
--- /dev/null
+++ b/test/files/neg/t2968.scala
@@ -0,0 +1,26 @@
+object t1 {
+ case object Const {
+ }
+
+ class Var
+ {
+
+} // missing brace
+
+object t2 {
+ case class Const() {
+ }
+
+ class Var
+ {
+
+} // missing brace
+
+object t3 {
+ final case class Const() {
+ }
+
+ class Var
+ {
+
+} // missing brace
diff --git a/test/files/neg/t2968b.check b/test/files/neg/t2968b.check
new file mode 100644
index 0000000000..36d25a2d12
--- /dev/null
+++ b/test/files/neg/t2968b.check
@@ -0,0 +1,4 @@
+t2968b.scala:7: error: '}' expected but eof found.
+// missing brace
+ ^
+one error found
diff --git a/test/files/neg/t2968b.scala b/test/files/neg/t2968b.scala
new file mode 100644
index 0000000000..422b618aba
--- /dev/null
+++ b/test/files/neg/t2968b.scala
@@ -0,0 +1,7 @@
+case class Const()
+{
+}
+
+class Var
+{
+// missing brace
diff --git a/test/files/neg/t409.check b/test/files/neg/t409.check
index 0edc0d03cd..433d64d25d 100644
--- a/test/files/neg/t409.check
+++ b/test/files/neg/t409.check
@@ -1,4 +1,4 @@
-t409.scala:6: error: class Case1 needs to be a trait to be mixed in
+t409.scala:6: error: traits or objects may not have parameters
class Toto extends Expr with Case1(12);
- ^
+ ^
one error found
diff --git a/test/files/neg/t5353.check b/test/files/neg/t5353.check
new file mode 100644
index 0000000000..75e2435600
--- /dev/null
+++ b/test/files/neg/t5353.check
@@ -0,0 +1,4 @@
+t5353.scala:2: error: this type parameter must be specified
+ def f(x: Boolean) = if (x) Array("abc") else Array()
+ ^
+one error found
diff --git a/test/files/neg/t5353.scala b/test/files/neg/t5353.scala
new file mode 100644
index 0000000000..1ee869aac1
--- /dev/null
+++ b/test/files/neg/t5353.scala
@@ -0,0 +1,3 @@
+class A {
+ def f(x: Boolean) = if (x) Array("abc") else Array()
+}
diff --git a/test/files/neg/t5378.check b/test/files/neg/t5378.check
new file mode 100644
index 0000000000..c1460083f6
--- /dev/null
+++ b/test/files/neg/t5378.check
@@ -0,0 +1,31 @@
+t5378.scala:7: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains = new { def apply[T1 <: T](value: T1) = ??? }
+ ^
+t5378.scala:8: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains1 = new { def apply[T1 <: A1](value: T1) = ??? }
+ ^
+t5378.scala:9: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def contains2 = new { def apply[T1 <: A2](value: T1) = ??? }
+ ^
+t5378.scala:15: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: T](value: T1) = ??? }
+ ^
+t5378.scala:16: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: B1](value: T1) = ??? }
+ ^
+t5378.scala:17: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ new Bippy { def apply[T1 <: B2](value: T1) = ??? }
+ ^
+t5378.scala:21: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply1[T1 <: B3](value: T1) = ???
+ ^
+t5378.scala:23: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply3(value: B3) = ???
+ ^
+t5378.scala:28: error: Parameter type in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply1(s: String)(x: Int)(value: T) = ???
+ ^
+t5378.scala:29: error: Type bound in structural refinement may not refer to an abstract type defined outside that refinement
+ def apply2[T1 <: T](s: String)(x: Int)(value: T1) = ???
+ ^
+10 errors found
diff --git a/test/files/neg/t5378.scala b/test/files/neg/t5378.scala
new file mode 100644
index 0000000000..fa6afa02be
--- /dev/null
+++ b/test/files/neg/t5378.scala
@@ -0,0 +1,54 @@
+import scala.language.reflectiveCalls
+
+class Coll[+T] {
+ type A1 <: T
+ type A2 <: A1
+
+ def contains = new { def apply[T1 <: T](value: T1) = ??? }
+ def contains1 = new { def apply[T1 <: A1](value: T1) = ??? }
+ def contains2 = new { def apply[T1 <: A2](value: T1) = ??? }
+ def contains3 = {
+ trait Bippy {
+ type B1 <: T
+ type B2 <: B1
+ }
+ new Bippy { def apply[T1 <: T](value: T1) = ??? }
+ new Bippy { def apply[T1 <: B1](value: T1) = ??? }
+ new Bippy { def apply[T1 <: B2](value: T1) = ??? }
+ new Bippy {
+ type B3 = B2
+ type B4 = List[B2]
+ def apply1[T1 <: B3](value: T1) = ???
+ def apply2[T1 <: B4](value: T1) = ???
+ def apply3(value: B3) = ???
+ def apply4(value: B4) = value.head
+ }
+ }
+ def contains4 = new {
+ def apply1(s: String)(x: Int)(value: T) = ???
+ def apply2[T1 <: T](s: String)(x: Int)(value: T1) = ???
+ }
+ def containsOk = {
+ trait Bippy {
+ type B1 <: AnyRef
+ type B2 <: B1
+ }
+ new Bippy { def apply[T1 <: AnyRef](value: T1) = ??? }
+ new Bippy { type B1 = String ; def apply[T1 <: B1](value: T1) = ??? }
+ new Bippy { type B2 = String ; def apply[T1 <: B2](value: T1) = ??? }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val xs = new Coll[List[String]]
+ val ys: Coll[Traversable[String]] = xs
+
+ println(ys contains Nil)
+ // java.lang.NoSuchMethodException: Coll$$anon$1.apply(scala.collection.Traversable)
+ // at java.lang.Class.getMethod(Class.java:1605)
+ // at Test$.reflMethod$Method1(a.scala:14)
+ // at Test$.main(a.scala:14)
+ // at Test.main(a.scala)
+ }
+}
diff --git a/test/files/neg/t5529.check b/test/files/neg/t5529.check
index da3f84e1ec..5d2175fa79 100644
--- a/test/files/neg/t5529.check
+++ b/test/files/neg/t5529.check
@@ -4,4 +4,7 @@ t5529.scala:12: error: File is already defined as class File
t5529.scala:10: error: class type required but test.Test.File found
sealed class Dir extends File { }
^
-two errors found
+t5529.scala:10: error: test.Test.File does not have a constructor
+ sealed class Dir extends File { }
+ ^
+three errors found
diff --git a/test/files/neg/t5543.check b/test/files/neg/t5543.check
new file mode 100644
index 0000000000..b61de0f78b
--- /dev/null
+++ b/test/files/neg/t5543.check
@@ -0,0 +1,10 @@
+t5543.scala:3: error: not found: type T
+ def this(x: T) { this() }
+ ^
+t5543.scala:11: error: not found: value x
+ def this(a: Int, b: Int = x) {
+ ^
+t5543.scala:18: error: not found: value x
+ def this(a: Int = x) { this() }
+ ^
+three errors found
diff --git a/test/files/neg/t5543.scala b/test/files/neg/t5543.scala
new file mode 100644
index 0000000000..4e03e6e114
--- /dev/null
+++ b/test/files/neg/t5543.scala
@@ -0,0 +1,19 @@
+class C1 {
+ type T
+ def this(x: T) { this() }
+}
+
+class C1a[T] {
+ def this(x: T) { this() } // works, no error here
+}
+
+class C2(x: Int) {
+ def this(a: Int, b: Int = x) {
+ this(b)
+ }
+}
+
+class C3 {
+ val x = 0
+ def this(a: Int = x) { this() }
+}
diff --git a/test/files/neg/t5692a.check b/test/files/neg/t5692a.check
index ded95a8820..7fbfb5dba7 100644
--- a/test/files/neg/t5692a.check
+++ b/test/files/neg/t5692a.check
@@ -1,4 +1,4 @@
-Test_2.scala:2: error: type parameter not specified
+Test_2.scala:2: error: this type parameter must be specified
def x = Macros.foo
^
one error found
diff --git a/test/files/neg/t5692b.check b/test/files/neg/t5692b.check
index e453870ec8..16796826b4 100644
--- a/test/files/neg/t5692b.check
+++ b/test/files/neg/t5692b.check
@@ -1,4 +1,4 @@
-Test_2.scala:2: error: type parameters not specified
+Test_2.scala:2: error: these type parameters must be specified
def x = Macros.foo
^
one error found
diff --git a/test/files/neg/t5696.check b/test/files/neg/t5696.check
index e0fb61b839..72b7781fc4 100644
--- a/test/files/neg/t5696.check
+++ b/test/files/neg/t5696.check
@@ -15,5 +15,5 @@ t5696.scala:38: error: too many argument lists for constructor invocation
^
t5696.scala:46: error: too many argument lists for constructor invocation
object x extends G(1)(2) {}
- ^
+ ^
6 errors found
diff --git a/test/files/neg/t5753/Impls$class.class b/test/files/neg/t5753/Impls$class.class
deleted file mode 100644
index 476329174e..0000000000
--- a/test/files/neg/t5753/Impls$class.class
+++ /dev/null
Binary files differ
diff --git a/test/files/neg/t5753/Impls.class b/test/files/neg/t5753/Impls.class
deleted file mode 100644
index dfcf89ed44..0000000000
--- a/test/files/neg/t5753/Impls.class
+++ /dev/null
Binary files differ
diff --git a/test/files/neg/t5954.check b/test/files/neg/t5954.check
index 3ca47cd430..ed10658b24 100644
--- a/test/files/neg/t5954.check
+++ b/test/files/neg/t5954.check
@@ -1,16 +1,16 @@
-t5954.scala:36: error: implementation restriction: package object A cannot contain case class D. Instead, class D should be placed directly in package A.
+t5954.scala:36: error: class D should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
case class D()
^
-t5954.scala:35: error: implementation restriction: package object A cannot contain companion object C. Instead, object C should be placed directly in package A.
+t5954.scala:35: error: object C should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
object C
^
-t5954.scala:34: error: implementation restriction: package object A cannot contain companion trait C. Instead, trait C should be placed directly in package A.
+t5954.scala:34: error: trait C should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
trait C
^
-t5954.scala:33: error: implementation restriction: package object A cannot contain companion object B. Instead, object B should be placed directly in package A.
+t5954.scala:33: error: object B should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
object B
^
-t5954.scala:32: error: implementation restriction: package object A cannot contain companion class B. Instead, class B should be placed directly in package A.
+t5954.scala:32: error: class B should be placed directly in package A instead of package object A. Under some circumstances companion objects and case classes in package objects can fail to recompile. See https://issues.scala-lang.org/browse/SI-5954.
class B
^
5 errors found
diff --git a/test/files/neg/t5954.flags b/test/files/neg/t5954.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/neg/t5954.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/neg/t5954.scala b/test/files/neg/t5954.scala
index 9e6f5392c7..3ccb5ed3ff 100644
--- a/test/files/neg/t5954.scala
+++ b/test/files/neg/t5954.scala
@@ -1,4 +1,4 @@
-// if you ever think you've fixed the underlying reason for the implementation restrictions
+// if you ever think you've fixed the underlying reason for the warning
// imposed by SI-5954, then here's a test that should pass with two "succes"es
//
//import scala.tools.partest._
diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check
index f6757f97e3..f91df0c46d 100644
--- a/test/files/neg/t6040.check
+++ b/test/files/neg/t6040.check
@@ -1,7 +1,9 @@
-error: extension of type scala.Dynamic needs to be enabled
+t6040.scala:1: error: extension of type scala.Dynamic needs to be enabled
by making the implicit value language.dynamics visible.
This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
why the feature needs to be explicitly enabled.
+class X extends Dynamic
+ ^
one error found
diff --git a/test/files/neg/t6231.check b/test/files/neg/t6231.check
new file mode 100644
index 0000000000..b27961d393
--- /dev/null
+++ b/test/files/neg/t6231.check
@@ -0,0 +1,6 @@
+t6231.scala:4: error: Implementation restriction: local trait Bug$X$1 is unable to automatically capture the
+free variable value ev$1 on behalf of anonymous class anonfun$qux$1. You can manually assign it to a val inside the trait,
+and refer that that val in anonymous class anonfun$qux$1. For more details, see SI-6231.
+ def qux = { () => ev }
+ ^
+one error found
diff --git a/test/files/neg/t6231.scala b/test/files/neg/t6231.scala
new file mode 100644
index 0000000000..1e5b4e0e1a
--- /dev/null
+++ b/test/files/neg/t6231.scala
@@ -0,0 +1,15 @@
+object Bug {
+ def bar(ev: Any) = {
+ trait X {
+ def qux = { () => ev }
+ }
+ new X {}.qux()
+
+ // workaround
+ trait Y {
+ val ev2 = ev // manually capture `ev` so that `ev2` is added to the trait interface.
+ def qux = { () => ev2 }
+ }
+ }
+}
+
diff --git a/test/files/neg/t6426.check b/test/files/neg/t6426.check
new file mode 100644
index 0000000000..149f74c4de
--- /dev/null
+++ b/test/files/neg/t6426.check
@@ -0,0 +1,7 @@
+t6426.scala:4: error: wildcard invalid as backquoted identifier
+ println(`_`.Buffer(0))
+ ^
+t6426.scala:5: error: ')' expected but '}' found.
+}
+^
+two errors found
diff --git a/test/files/neg/t6426.scala b/test/files/neg/t6426.scala
new file mode 100644
index 0000000000..a27d18eb58
--- /dev/null
+++ b/test/files/neg/t6426.scala
@@ -0,0 +1,5 @@
+class A {
+ import collection.{mutable => _, _}
+
+ println(`_`.Buffer(0))
+}
diff --git a/test/files/neg/t6443c.check b/test/files/neg/t6443c.check
new file mode 100644
index 0000000000..7cf8d23f4b
--- /dev/null
+++ b/test/files/neg/t6443c.check
@@ -0,0 +1,7 @@
+t6443c.scala:16: error: double definition:
+method foo:(d: B.D)(a: Any)(d2: d.type)Unit and
+method foo:(d: B.D)(a: Any, d2: d.type)Unit at line 11
+have same type after erasure: (d: B.D, a: Object, d2: B.D)Unit
+ def foo(d: D)(a: Any)(d2: d.type): Unit = ()
+ ^
+one error found
diff --git a/test/files/neg/t6443c.scala b/test/files/neg/t6443c.scala
new file mode 100644
index 0000000000..817224e043
--- /dev/null
+++ b/test/files/neg/t6443c.scala
@@ -0,0 +1,21 @@
+trait A {
+ type D >: Null <: C
+ def foo(d: D)(a: Any, d2: d.type): Unit
+ trait C {
+ def bar: Unit = foo(null)(null, null)
+ }
+}
+object B extends A {
+ class D extends C
+
+ def foo(d: D)(a: Any, d2: d.type): Unit = () // Bridge method required here!
+
+ // No bridge method should be added, but we'll be happy enough if
+ // the "same type after erasure" error kicks in before the duplicated
+ // bridge causes a problem.
+ def foo(d: D)(a: Any)(d2: d.type): Unit = ()
+}
+
+object Test extends App {
+ new B.D().bar
+}
diff --git a/test/files/neg/t6539/Macro_1.scala b/test/files/neg/t6539/Macro_1.scala
index ed52776d95..4f7d289e2e 100644
--- a/test/files/neg/t6539/Macro_1.scala
+++ b/test/files/neg/t6539/Macro_1.scala
@@ -5,6 +5,6 @@ object M {
def m(a: Any, b: Any): Any = macro mImpl
def mImpl(c: Context)(a: c.Expr[Any], b: c.Expr[Any]) = a
- @reflect.macros.compileTimeOnly("cto may only be used as an argument to " + "m")
+ @reflect.internal.annotations.compileTimeOnly("cto may only be used as an argument to " + "m")
def cto = 0
}
diff --git a/test/files/neg/t6539/Test_2.scala b/test/files/neg/t6539/Test_2.scala
index 5a602879ec..26f4504222 100644
--- a/test/files/neg/t6539/Test_2.scala
+++ b/test/files/neg/t6539/Test_2.scala
@@ -3,4 +3,10 @@ object Test {
M.m(M.cto, ()) // error
M.m((), M.cto) // okay
M.cto // error
+
+ locally {
+ val expr = scala.reflect.runtime.universe.reify(2)
+ val splice = expr.splice
+ val value = expr.value
+ }
}
diff --git a/test/files/neg/t6567.check b/test/files/neg/t6567.check
new file mode 100644
index 0000000000..4c513e64cd
--- /dev/null
+++ b/test/files/neg/t6567.check
@@ -0,0 +1,7 @@
+t6567.scala:8: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+ Option[B](a)
+ ^
+t6567.scala:10: error: Suspicious application of an implicit view (Test.this.a2b) in the argument to Option.apply.
+ val b: Option[B] = Option(a)
+ ^
+two errors found
diff --git a/test/files/neg/t6567.flags b/test/files/neg/t6567.flags
new file mode 100644
index 0000000000..e93641e931
--- /dev/null
+++ b/test/files/neg/t6567.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6567.scala b/test/files/neg/t6567.scala
new file mode 100644
index 0000000000..650e5e39ae
--- /dev/null
+++ b/test/files/neg/t6567.scala
@@ -0,0 +1,11 @@
+class A
+class B
+
+object Test {
+ val a: A = null
+ implicit def a2b(a: A) = new B
+
+ Option[B](a)
+
+ val b: Option[B] = Option(a)
+}
diff --git a/test/files/neg/t6601.check b/test/files/neg/t6601.check
deleted file mode 100644
index 1410e1b11a..0000000000
--- a/test/files/neg/t6601.check
+++ /dev/null
@@ -1,4 +0,0 @@
-AccessPrivateConstructor_2.scala:2: error: constructor PrivateConstructor in class PrivateConstructor cannot be accessed in class AccessPrivateConstructor
- new PrivateConstructor("") // Scalac should forbid accessing to the private constructor!
- ^
-one error found
diff --git a/test/files/neg/t6601/AccessPrivateConstructor_2.scala b/test/files/neg/t6601/AccessPrivateConstructor_2.scala
deleted file mode 100644
index 816bc10d79..0000000000
--- a/test/files/neg/t6601/AccessPrivateConstructor_2.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-class AccessPrivateConstructor {
- new PrivateConstructor("") // Scalac should forbid accessing to the private constructor!
-}
diff --git a/test/files/neg/t6601/PrivateConstructor_1.scala b/test/files/neg/t6601/PrivateConstructor_1.scala
deleted file mode 100644
index f09d7ad068..0000000000
--- a/test/files/neg/t6601/PrivateConstructor_1.scala
+++ /dev/null
@@ -1 +0,0 @@
-class PrivateConstructor private(val s: String) extends AnyVal
diff --git a/test/files/neg/t6667.check b/test/files/neg/t6667.check
index 43313fa4fe..b04251d7c1 100644
--- a/test/files/neg/t6667.check
+++ b/test/files/neg/t6667.check
@@ -1,4 +1,5 @@
-t6667.scala:8: error: ambiguous implicit values:
+t6667.scala:8: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667.
+ambiguous implicit values:
both value inScope1 in object Test of type => C
and value inScope2 in object Test of type => C
match expected type C
diff --git a/test/files/neg/t6667.flags b/test/files/neg/t6667.flags
new file mode 100644
index 0000000000..6c1dd108ae
--- /dev/null
+++ b/test/files/neg/t6667.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint \ No newline at end of file
diff --git a/test/files/neg/t6667b.check b/test/files/neg/t6667b.check
index 99cea9a47c..5d56e776c3 100644
--- a/test/files/neg/t6667b.check
+++ b/test/files/neg/t6667b.check
@@ -4,7 +4,8 @@ t6667b.scala:16: error: ambiguous implicit values:
match expected type Test.Box
new Test()
^
-t6667b.scala:19: error: ambiguous implicit values:
+t6667b.scala:19: error: Search of in-scope implicits was ambiguous, and the implicit scope was searched. In Scala 2.11.0, this code will not compile. See SI-6667.
+ambiguous implicit values:
both value a in object Test of type => Test.Box
and value b of type Test.Box
match expected type Test.Box
diff --git a/test/files/neg/t6667b.flags b/test/files/neg/t6667b.flags
new file mode 100644
index 0000000000..6c1dd108ae
--- /dev/null
+++ b/test/files/neg/t6667b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint \ No newline at end of file
diff --git a/test/files/neg/t667.check b/test/files/neg/t667.check
index e68c6dea00..d4367bc87b 100644
--- a/test/files/neg/t667.check
+++ b/test/files/neg/t667.check
@@ -1,4 +1,4 @@
-t667.scala:8: error: illegal cyclic reference involving class Ni
+t667.scala:8: error: class Ni inherits itself
class Ni extends super.Ni with Ni;
- ^
+ ^
one error found
diff --git a/test/files/neg/t6728.check b/test/files/neg/t6728.check
new file mode 100644
index 0000000000..d853d6f724
--- /dev/null
+++ b/test/files/neg/t6728.check
@@ -0,0 +1,4 @@
+t6728.scala:4: error: '(' expected but '}' found.
+ }
+ ^
+one error found
diff --git a/test/files/neg/t6728.scala b/test/files/neg/t6728.scala
new file mode 100644
index 0000000000..ba0b1a0fdf
--- /dev/null
+++ b/test/files/neg/t6728.scala
@@ -0,0 +1,5 @@
+object X {
+ while(true) {
+ for
+ }
+}
diff --git a/test/files/neg/t6829.check b/test/files/neg/t6829.check
index 8ee6d182eb..7c3c66e0f2 100644
--- a/test/files/neg/t6829.check
+++ b/test/files/neg/t6829.check
@@ -1,6 +1,6 @@
t6829.scala:35: error: type mismatch;
found : AgentSimulation.this.state.type (with underlying type G#State)
- required: _10.State
+ required: _9.State
lazy val actions: Map[G#Agent,G#Action] = agents.map(a => a -> a.chooseAction(state)).toMap
^
t6829.scala:45: error: trait AgentSimulation takes type parameters
@@ -17,12 +17,12 @@ t6829.scala:49: error: not found: value nextState
^
t6829.scala:50: error: type mismatch;
found : s.type (with underlying type Any)
- required: _54.State where val _54: G
+ required: _53.State where val _53: G
val r = rewards(agent).r(s,a,s2)
^
t6829.scala:51: error: type mismatch;
found : s.type (with underlying type Any)
- required: _51.State
+ required: _50.State
agent.learn(s,a,s2,r): G#Agent
^
t6829.scala:53: error: not found: value nextState
diff --git a/test/files/neg/t6902.check b/test/files/neg/t6902.check
new file mode 100644
index 0000000000..8ad7fd37f9
--- /dev/null
+++ b/test/files/neg/t6902.check
@@ -0,0 +1,10 @@
+t6902.scala:4: error: unreachable code
+ case Some(b) => 3 // no warning was emitted
+ ^
+t6902.scala:9: error: unreachable code
+ case Some(b) => 3 // no warning was emitted
+ ^
+t6902.scala:21: error: unreachable code
+ case 1 => 3 // crash
+ ^
+three errors found
diff --git a/test/files/neg/t6902.flags b/test/files/neg/t6902.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t6902.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6902.scala b/test/files/neg/t6902.scala
new file mode 100644
index 0000000000..ce5ff8b6fb
--- /dev/null
+++ b/test/files/neg/t6902.scala
@@ -0,0 +1,23 @@
+object Test {
+ Some(Some(1)) collect {
+ case Some(a) => 2
+ case Some(b) => 3 // no warning was emitted
+ }
+
+ (Some(1): @ unchecked) match {
+ case Some(a) => 2
+ case Some(b) => 3 // no warning was emitted
+ }
+
+ // A variation of SI-6011, which eluded the fix
+ // in 2.10.0.
+ //
+ // duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.
+ // at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:50)
+ // at scala.tools.nsc.Global.abort(Global.scala:249)
+ // at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder$jcode$.emitSWITCH(GenASM.scala:1850)
+ ((1: Byte): @unchecked @annotation.switch) match {
+ case 1 => 2
+ case 1 => 3 // crash
+ }
+}
diff --git a/test/files/neg/t6952.check b/test/files/neg/t6952.check
new file mode 100644
index 0000000000..f1e1881404
--- /dev/null
+++ b/test/files/neg/t6952.check
@@ -0,0 +1,13 @@
+t6952.scala:2: error: extension of type scala.Dynamic needs to be enabled
+by making the implicit value language.dynamics visible.
+This can be achieved by adding the import clause 'import scala.language.dynamics'
+or by setting the compiler option -language:dynamics.
+See the Scala docs for value scala.language.dynamics for a discussion
+why the feature needs to be explicitly enabled.
+trait B extends Dynamic
+ ^
+t6952.scala:3: error: extension of type scala.Dynamic needs to be enabled
+by making the implicit value language.dynamics visible.
+trait C extends A with Dynamic
+ ^
+two errors found
diff --git a/test/files/neg/t6952.scala b/test/files/neg/t6952.scala
new file mode 100644
index 0000000000..257ea3be68
--- /dev/null
+++ b/test/files/neg/t6952.scala
@@ -0,0 +1,4 @@
+trait A
+trait B extends Dynamic
+trait C extends A with Dynamic
+trait D extends B
diff --git a/test/files/neg/t6963.check b/test/files/neg/t6963.check
deleted file mode 100644
index 41cb796b0b..0000000000
--- a/test/files/neg/t6963.check
+++ /dev/null
@@ -1,2 +0,0 @@
-error: -Xmigration is deprecated: This setting is no longer useful and will be removed. Please remove it from your build.
-one error found
diff --git a/test/files/neg/t6963.flags b/test/files/neg/t6963.flags
deleted file mode 100644
index 0b6d71496a..0000000000
--- a/test/files/neg/t6963.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xmigration -deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6963.scala b/test/files/neg/t6963.scala
deleted file mode 100644
index 4da52764f5..0000000000
--- a/test/files/neg/t6963.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-
-object test {
-}
diff --git a/test/files/neg/t6963a.check b/test/files/neg/t6963a.check
new file mode 100644
index 0000000000..159896fd10
--- /dev/null
+++ b/test/files/neg/t6963a.check
@@ -0,0 +1,5 @@
+t6963a.scala:4: error: method scanRight in trait TraversableLike has changed semantics in version 2.9.0:
+The behavior of `scanRight` has changed. The previous behavior can be reproduced with scanRight.reverse.
+ List(1,2,3,4,5).scanRight(0)(_+_)
+ ^
+one error found
diff --git a/test/files/neg/t6963a.flags b/test/files/neg/t6963a.flags
new file mode 100644
index 0000000000..4c61ed9430
--- /dev/null
+++ b/test/files/neg/t6963a.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xmigration:2.7
diff --git a/test/files/neg/t6963a.scala b/test/files/neg/t6963a.scala
new file mode 100644
index 0000000000..b3366b2557
--- /dev/null
+++ b/test/files/neg/t6963a.scala
@@ -0,0 +1,5 @@
+object Test {
+ import scala.collection.mutable._
+
+ List(1,2,3,4,5).scanRight(0)(_+_)
+}
diff --git a/test/files/neg/t6963b.check b/test/files/neg/t6963b.check
new file mode 100644
index 0000000000..7e205a41d0
--- /dev/null
+++ b/test/files/neg/t6963b.check
@@ -0,0 +1,13 @@
+t6963b.scala:2: error: An Array will no longer match as Seq[_].
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ ^
+t6963b.scala:4: error: An Array will no longer match as Seq[_].
+ case _: Seq[_] => true
+ ^
+t6963b.scala:16: error: An Array will no longer match as Seq[_].
+ case (Some(_: Seq[_]), Nil, _) => 1
+ ^
+t6963b.scala:17: error: An Array will no longer match as Seq[_].
+ case (None, List(_: List[_], _), _) => 2
+ ^
+four errors found
diff --git a/test/files/neg/t6963b.flags b/test/files/neg/t6963b.flags
new file mode 100644
index 0000000000..83caa2b147
--- /dev/null
+++ b/test/files/neg/t6963b.flags
@@ -0,0 +1 @@
+-Xmigration:2.7 -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t6963b.scala b/test/files/neg/t6963b.scala
new file mode 100644
index 0000000000..3cfa8f0dca
--- /dev/null
+++ b/test/files/neg/t6963b.scala
@@ -0,0 +1,20 @@
+object Test {
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ def f2(x: Any) = x match {
+ case _: Seq[_] => true
+ case _ => false
+ }
+
+ def f3(x: Any) = x match {
+ case _: Array[_] => true
+ case _ => false
+ }
+
+ def f4(x: Any) = x.isInstanceOf[Traversable[_]]
+
+ def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
+ case (Some(_: Seq[_]), Nil, _) => 1
+ case (None, List(_: List[_], _), _) => 2
+ case _ => 3
+ }
+}
diff --git a/test/files/neg/t877.check b/test/files/neg/t877.check
index c3d4ab6584..5f25bd439c 100644
--- a/test/files/neg/t877.check
+++ b/test/files/neg/t877.check
@@ -1,7 +1,7 @@
t877.scala:3: error: Invalid literal number
trait Foo extends A(22A, Bug!) {}
^
-t877.scala:3: error: ')' expected but eof found.
+t877.scala:3: error: parents of traits may not have parameters
trait Foo extends A(22A, Bug!) {}
- ^
+ ^
two errors found
diff --git a/test/files/pos/SI-7060.flags b/test/files/pos/SI-7060.flags
new file mode 100644
index 0000000000..c926ad6493
--- /dev/null
+++ b/test/files/pos/SI-7060.flags
@@ -0,0 +1 @@
+-Yinline -Ydead-code
diff --git a/test/files/pos/SI-7060.scala b/test/files/pos/SI-7060.scala
new file mode 100644
index 0000000000..c87620e020
--- /dev/null
+++ b/test/files/pos/SI-7060.scala
@@ -0,0 +1,11 @@
+object Test {
+
+ @inline final def mbarray_apply_minibox(array: Any, tag: Byte): Long =
+ if (tag == 0) {
+ array.asInstanceOf[Array[Long]](0)
+ } else
+ array.asInstanceOf[Array[Byte]](0).toLong
+
+ def crash_method(): Unit =
+ mbarray_apply_minibox(null, 0)
+}
diff --git a/test/files/pos/SI-7100.scala b/test/files/pos/SI-7100.scala
new file mode 100644
index 0000000000..7cb6356ec8
--- /dev/null
+++ b/test/files/pos/SI-7100.scala
@@ -0,0 +1,6 @@
+class Buffer {
+ def f[@specialized(Int) T](): T = 0 match {
+ case 0 => 0.asInstanceOf[T]
+ case 1 => 0.asInstanceOf[T]
+ }
+}
diff --git a/test/files/pos/lubs.scala b/test/files/pos/lubs.scala
new file mode 100644
index 0000000000..d7651f86b0
--- /dev/null
+++ b/test/files/pos/lubs.scala
@@ -0,0 +1,3 @@
+object Test {
+ List(new { def f = 1; def g = 1}, new { def f = 2}).map(_.f)
+}
diff --git a/test/files/pos/package-case.flags b/test/files/pos/package-case.flags
deleted file mode 100644
index 2f174c4732..0000000000
--- a/test/files/pos/package-case.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ycompanions-in-pkg-objs
diff --git a/test/files/pos/presuperContext.scala b/test/files/pos/presuperContext.scala
new file mode 100644
index 0000000000..cc34263073
--- /dev/null
+++ b/test/files/pos/presuperContext.scala
@@ -0,0 +1,13 @@
+class A {
+ class C extends { val x: A = this } with AnyRef
+}
+
+class B(x: Int)
+
+class D {
+ class C(x: Int) extends B({val test: D = this; x}) {
+ def this() {
+ this({val test: D = this; 1})
+ }
+ }
+}
diff --git a/test/files/pos/t1014.scala b/test/files/pos/t1014.scala
index 1ac87b225b..3fc10d10dc 100644
--- a/test/files/pos/t1014.scala
+++ b/test/files/pos/t1014.scala
@@ -1,6 +1,8 @@
import scala.xml.{NodeSeq, Elem}
-class EO extends App with Moo{
+class EO extends App with Moo {
+ // return type is Flog, inherited from overridden method.
+ // implicit conversions are applied because expected type `pt` is `Flog` when `computeType(rhs, pt)`.
def cat = <cat>dog</cat>
implicit def nodeSeqToFlog(in: Elem): Flog = new Flog(in)
diff --git a/test/files/pos/t1803.flags b/test/files/pos/t1803.flags
new file mode 100644
index 0000000000..d1a8244169
--- /dev/null
+++ b/test/files/pos/t1803.flags
@@ -0,0 +1 @@
+-Yinfer-argument-types \ No newline at end of file
diff --git a/test/files/pos/t1803.scala b/test/files/pos/t1803.scala
new file mode 100644
index 0000000000..42f4e784a3
--- /dev/null
+++ b/test/files/pos/t1803.scala
@@ -0,0 +1,2 @@
+class A { def foo[A](a: A) = a }
+class B extends A { override def foo[A](b) = b }
diff --git a/test/files/pos/t2130-1.flags b/test/files/pos/t2130-1.flags
deleted file mode 100644
index 2f174c4732..0000000000
--- a/test/files/pos/t2130-1.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ycompanions-in-pkg-objs
diff --git a/test/files/pos/t2130-2.flags b/test/files/pos/t2130-2.flags
deleted file mode 100644
index 2f174c4732..0000000000
--- a/test/files/pos/t2130-2.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ycompanions-in-pkg-objs
diff --git a/test/files/pos/t3577.scala b/test/files/pos/t3577.scala
new file mode 100644
index 0000000000..80a280f67a
--- /dev/null
+++ b/test/files/pos/t3577.scala
@@ -0,0 +1,29 @@
+case class Check[A](val value: A)
+
+case class C2(checks: Check[_]*);
+
+object C {
+ def m(x : C2): Any = (null: Any) match {
+ case C2(_, rest @ _*) => {
+ rest.map(_.value)
+ }
+ }
+}
+
+///////////////////
+
+object Container {
+ trait Exp[+T]
+ abstract class FuncExp[-S, +T]
+
+ sealed abstract class FoundNode[T, Repr] {
+ def optimize[TupleT, U, That](parentNode: FlatMap[T, Repr, U, That]): Any
+ def optimize2[TupleT, U, That](parentNode: Any): Any
+ }
+
+ class FlatMap[T, Repr, U, That]
+
+ val Seq(fn: FoundNode[t, repr]) = Seq[FoundNode[_, _]]()
+ fn.optimize(null) // was: scala.MatchError: ? (of class BoundedWildcardType) @ Variances#varianceInType
+ fn.optimize2(null) // was: fatal error: bad type: ?(class scala.reflect.internal.Types$BoundedWildcardType) @ Pickle.putType
+}
diff --git a/test/files/pos/t3999b.flags b/test/files/pos/t3999b.flags
deleted file mode 100644
index 2f174c4732..0000000000
--- a/test/files/pos/t3999b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ycompanions-in-pkg-objs
diff --git a/test/files/pos/t4052.flags b/test/files/pos/t4052.flags
deleted file mode 100644
index 2f174c4732..0000000000
--- a/test/files/pos/t4052.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ycompanions-in-pkg-objs
diff --git a/test/files/pos/t5082.scala b/test/files/pos/t5082.scala
new file mode 100644
index 0000000000..63eeda38ba
--- /dev/null
+++ b/test/files/pos/t5082.scala
@@ -0,0 +1,14 @@
+trait Something[T]
+object Test { class A }
+case class Test() extends Something[Test.A]
+
+object User {
+ val Test() = Test()
+}
+
+object Wrap {
+ trait Something[T]
+ object Test { class A }
+ case class Test(a: Int, b: Int)(c: String) extends Something[Test.A]
+ val Test(x, y) = Test(1, 2)(""); (x + y).toString
+}
diff --git a/test/files/pos/t5130.scala b/test/files/pos/t5130.scala
new file mode 100644
index 0000000000..676d3c7050
--- /dev/null
+++ b/test/files/pos/t5130.scala
@@ -0,0 +1,46 @@
+import scala.language.reflectiveCalls
+
+class A {
+ this_a =>
+
+ def b = new B
+ class B { def a: this_a.type = this_a }
+}
+trait A2 { def c = () }
+
+object Test {
+ val v1 = new A { def c = () }
+ val v2 = new A with A2 { }
+ val v3: A { def c: Unit } = null
+ def d1 = new A { def c = () }
+ def d2 = new A with A2 { }
+ def d3: A { def c: Unit } = null
+ var x1 = new A { def c = () }
+ var x2 = new A with A2 { }
+ var x3: A { def c: Unit } = null
+
+ def main(args: Array[String]): Unit = {
+ val mv1 = new A { def c = () }
+ val mv2 = new A with A2 { }
+ val mv3: A { def c: Unit } = null
+ def md1 = new A { def c = () }
+ def md2 = new A with A2 { }
+ def md3: A { def c: Unit } = null
+
+ v1.b.a.c
+ v2.b.a.c
+ v3.b.a.c
+ d1.b.a.c
+ d2.b.a.c
+ d3.b.a.c
+ x1.b.a.c
+ x2.b.a.c
+ x3.b.a.c
+ mv1.b.a.c
+ mv2.b.a.c
+ mv3.b.a.c
+ md1.b.a.c
+ md2.b.a.c
+ md3.b.a.c
+ }
+}
diff --git a/test/files/pos/t5604b/T_1.scala b/test/files/pos/t5604b/T_1.scala
new file mode 100644
index 0000000000..179dcb10c6
--- /dev/null
+++ b/test/files/pos/t5604b/T_1.scala
@@ -0,0 +1,6 @@
+// sandbox/t5604/T.scala
+package t6504
+
+trait T {
+ def foo: Boolean = false
+}
diff --git a/test/files/pos/t5604b/T_2.scala b/test/files/pos/t5604b/T_2.scala
new file mode 100644
index 0000000000..179dcb10c6
--- /dev/null
+++ b/test/files/pos/t5604b/T_2.scala
@@ -0,0 +1,6 @@
+// sandbox/t5604/T.scala
+package t6504
+
+trait T {
+ def foo: Boolean = false
+}
diff --git a/test/files/pos/t5604b/Test_1.scala b/test/files/pos/t5604b/Test_1.scala
new file mode 100644
index 0000000000..f7c58ebe83
--- /dev/null
+++ b/test/files/pos/t5604b/Test_1.scala
@@ -0,0 +1,7 @@
+// sandbox/t5604/Test.scala
+package t6504
+
+object Test {
+ def blerg1(a: Any): Any = if (foo) blerg1(0)
+ def blerg2(a: Any): Any = if (t6504.foo) blerg2(0)
+}
diff --git a/test/files/pos/t5604b/Test_2.scala b/test/files/pos/t5604b/Test_2.scala
new file mode 100644
index 0000000000..f7c58ebe83
--- /dev/null
+++ b/test/files/pos/t5604b/Test_2.scala
@@ -0,0 +1,7 @@
+// sandbox/t5604/Test.scala
+package t6504
+
+object Test {
+ def blerg1(a: Any): Any = if (foo) blerg1(0)
+ def blerg2(a: Any): Any = if (t6504.foo) blerg2(0)
+}
diff --git a/test/files/pos/t5604b/pack_1.scala b/test/files/pos/t5604b/pack_1.scala
new file mode 100644
index 0000000000..f50d568bfa
--- /dev/null
+++ b/test/files/pos/t5604b/pack_1.scala
@@ -0,0 +1,5 @@
+// sandbox/t5604/pack.scala
+package t6504
+
+object `package` extends T {
+}
diff --git a/test/files/pos/t5859.scala b/test/files/pos/t5859.scala
new file mode 100644
index 0000000000..2a31e68ee5
--- /dev/null
+++ b/test/files/pos/t5859.scala
@@ -0,0 +1,15 @@
+
+class A {
+ def f(xs: List[Int], ys: AnyRef*) = ()
+ def f(xs: AnyRef*) = ()
+
+ f()
+ f(List[AnyRef](): _*)
+ f(List(): _*)
+ f(Nil: _*)
+ f(Array(): _*)
+ f(Array[AnyRef](): _*)
+ f(List(1))
+ f(List(1), Nil: _*)
+ f(List(1), Array(): _*)
+}
diff --git a/test/files/pos/t6072.scala b/test/files/pos/t6072.scala
new file mode 100644
index 0000000000..e25ebbffc5
--- /dev/null
+++ b/test/files/pos/t6072.scala
@@ -0,0 +1,3 @@
+class A {
+ object B { def eq(lvl: Int) = ??? }
+}
diff --git a/test/files/pos/t6146.flags b/test/files/pos/t6146.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6146.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6146.scala b/test/files/pos/t6146.scala
new file mode 100644
index 0000000000..b5bde826b1
--- /dev/null
+++ b/test/files/pos/t6146.scala
@@ -0,0 +1,60 @@
+// No unreachable or exhaustiveness warnings, please.
+
+//
+// The reported bug
+//
+
+trait AxisCompanion {
+ sealed trait Format
+ object Format {
+ case object Decimal extends Format
+ case object Integer extends Format
+ // Gives an unrelated warning: The outer reference in this type test cannot be checked at run time.
+ //final case class Time( hours: Boolean = false, millis: Boolean = true ) extends Format
+ }
+}
+object Axis extends AxisCompanion
+class Axis {
+ import Axis._
+ def test( f: Format ) = f match {
+ case Format.Integer => "Int"
+ // case Format.Time( hours, millis ) => "Time"
+ case Format.Decimal => "Dec"
+ }
+}
+
+
+//
+// Some tricksier variations
+//
+
+trait T1[X] {
+ trait T2[Y] {
+ sealed trait Format
+ object Format {
+ case object Decimal extends Format
+ case object Integer extends Format
+ }
+ }
+}
+
+object O1 extends T1[Any] {
+ object O2 extends T2[Any] {
+
+ }
+}
+
+case object Shorty extends O1.O2.Format
+
+class Test1 {
+ import O1.O2._
+ val FI: Format.Integer.type = Format.Integer
+ def test( f: Format ) = {
+ val ff: f.type = f
+ ff match {
+ case FI => "Int"
+ case Format.Decimal => "Dec"
+ case Shorty => "Sho"
+ }
+ }
+}
diff --git a/test/files/pos/t6482.scala b/test/files/pos/t6482.scala
new file mode 100644
index 0000000000..24ea38e519
--- /dev/null
+++ b/test/files/pos/t6482.scala
@@ -0,0 +1,11 @@
+final class TraversableOnceOps[+A](val collection: TraversableOnce[A]) extends AnyVal {
+ def reduceLeftOption[B >: A](op: (B, A) => B): Option[B] =
+ if (collection.isEmpty) None else Some(collection.reduceLeft[B](op))
+}
+// error: type arguments [B] do not conform to method reduceLeft's type parameter bounds [B >: A]
+// if (collection.isEmpty) None else Some(collection.reduceLeft[B](op))
+// ^
+
+class Foo[+A <: AnyRef](val xs: List[A]) extends AnyVal {
+ def baz[B >: A](x: B): List[B] = x :: xs
+}
diff --git a/test/files/pos/t6516.scala b/test/files/pos/t6516.scala
new file mode 100644
index 0000000000..c004055de2
--- /dev/null
+++ b/test/files/pos/t6516.scala
@@ -0,0 +1,19 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+import scala.collection.TraversableLike
+
+// This one compiles
+object Test {
+ type Alias[T, CC[_]] = Context { type PrefixType = TraversableLike[T, CC[T]] }
+ def f() = macro f_impl
+ def f_impl(c: Alias[Int, List])() = ???
+}
+
+// This one doesn't
+object Test2 {
+ type Ctx = scala.reflect.macros.Context
+ type Alias[T, CC[_]] = Ctx { type PrefixType = TraversableLike[T, CC[T]] }
+
+ def f() = macro f_impl
+ def f_impl(c: Alias[Int, List])() = ???
+}
diff --git a/test/files/pos/t6595.flags b/test/files/pos/t6595.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/t6595.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/t6595.scala b/test/files/pos/t6595.scala
new file mode 100644
index 0000000000..437c0bcf05
--- /dev/null
+++ b/test/files/pos/t6595.scala
@@ -0,0 +1,18 @@
+import scala.annotation.switch
+
+class Foo extends {
+ final val b0 = 5
+} with AnyRef {
+ final val b1 = 10
+
+ // Using the @switch annotation as a means of testing that the
+ // type inferred for b0 is Int(5) and not Int. Only in the former
+ // case can a switch be generated.
+ def f(p: Int) = (p: @switch) match {
+ case `b0` => 1
+ case `b1` => 2
+ case 15 => 3
+ case 20 => 4
+ case _ => 5
+ }
+}
diff --git a/test/files/pos/t6601/PrivateValueClass_1.scala b/test/files/pos/t6601/PrivateValueClass_1.scala
new file mode 100644
index 0000000000..85c3687137
--- /dev/null
+++ b/test/files/pos/t6601/PrivateValueClass_1.scala
@@ -0,0 +1 @@
+class V private (val a: Any) extends AnyVal \ No newline at end of file
diff --git a/test/files/pos/t6601/UsePrivateValueClass_2.scala b/test/files/pos/t6601/UsePrivateValueClass_2.scala
new file mode 100644
index 0000000000..461b8397b2
--- /dev/null
+++ b/test/files/pos/t6601/UsePrivateValueClass_2.scala
@@ -0,0 +1,10 @@
+object Test {
+ // After the first attempt to make seprately compiled value
+ // classes respect the privacy of constructors, we got:
+ //
+ // exception when typing v.a().==(v.a())/class scala.reflect.internal.Trees$Apply
+ // constructor V in class V cannot be accessed in object Test in file test/files/pos/t6601/UsePrivateValueClass_2.scala
+ // scala.reflect.internal.Types$TypeError: constructor V in class V cannot be accessed in object Test
+ def foo(v: V) = v.a == v.a
+ def bar(v: V) = v == v
+}
diff --git a/test/files/pos/t6651.scala b/test/files/pos/t6651.scala
new file mode 100644
index 0000000000..55a3b74e4c
--- /dev/null
+++ b/test/files/pos/t6651.scala
@@ -0,0 +1,33 @@
+class YouAreYourself[A <: AnyRef](val you: A) extends AnyVal {
+ def yourself: you.type = you
+}
+
+object Test {
+ val s = ""
+ val s1: s.type = new YouAreYourself[s.type](s).yourself
+}
+
+trait Path {
+ type Dep <: AnyRef
+}
+
+final class ValueClass[P <: Path](val path: P) extends AnyVal {
+ import path.Dep
+
+ def apply(dep: Dep)(d2: dep.type, foo: Int): (Dep, d2.type) = (d2, d2)
+
+ // This generates dodgy code; note `ValueClass.this`:
+ //
+ // final def bounds$extension[D >: Nothing <: ValueClass.this.path.Dep,
+ // P >: Nothing <: Path]
+ // ($this: ValueClass[P])
+ // (dep: D)
+ // (d2: dep.type, foo: Int): (D, d2.type) = scala.Tuple2.apply[D, d2.type](d2, d2);
+ //
+ // Nothing crashes down the line, but it certainly doesn't conform to best-practices.
+ //
+ // An better alternative would be to add a type parameter for the (singleton) type of
+ // the wrapped value.
+ def bounds[D <: Dep](dep: D)(d2: dep.type, foo: Int): (D, d2.type) = (d2, d2)
+}
+
diff --git a/test/files/pos/t6891.flags b/test/files/pos/t6891.flags
new file mode 100644
index 0000000000..fe048006aa
--- /dev/null
+++ b/test/files/pos/t6891.flags
@@ -0,0 +1 @@
+-Ycheck:extmethods -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6891.scala b/test/files/pos/t6891.scala
new file mode 100644
index 0000000000..bed2d0d777
--- /dev/null
+++ b/test/files/pos/t6891.scala
@@ -0,0 +1,26 @@
+object O {
+ implicit class Foo[A](val value: String) extends AnyVal {
+ def bippy() = {
+ @annotation.tailrec def loop(x: A): Unit = loop(x)
+ ()
+ }
+
+ def boppy() = {
+ @annotation.tailrec def loop(x: value.type): Unit = loop(x)
+ ()
+ }
+
+ def beppy[C](c: => C) = {
+ () => c
+ @annotation.tailrec def loop(x: value.type): Unit = loop(x)
+ () => c
+ ()
+ }
+ }
+ // uncaught exception during compilation: Types$TypeError("type mismatch;
+ // found : A(in method bippy$extension)
+ // required: A(in class Foo)") @ scala.tools.nsc.typechecker.Contexts$Context.issueCommon(Contexts.scala:396)
+ // error: scala.reflect.internal.Types$TypeError: type mismatch;
+ // found : A(in method bippy$extension)
+ // required: A(in class Foo)
+}
diff --git a/test/files/pos/t6942.flags b/test/files/pos/t6942.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6942.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6942/Bar.java b/test/files/pos/t6942/Bar.java
new file mode 100644
index 0000000000..592f62efb4
--- /dev/null
+++ b/test/files/pos/t6942/Bar.java
@@ -0,0 +1,235 @@
+package foo;
+
+public enum Bar {
+ ANGUILLA /*("US")*/,
+ ANTIGUA_AND_BARBUDA /*("US")*/,
+ ARGENTINA /*("US")*/,
+ ARUBA /*("US")*/,
+ BAHAMAS /*("US")*/,
+ BARBADOS /*("US")*/,
+ BELIZE /*("US")*/,
+ BERMUDA /*("US")*/,
+ BOLIVIA /*("US")*/,
+ BRAZIL /*("US")*/,
+ BRITISH_VIRGIN_ISLANDS /*("US")*/,
+ CANADA /*("US")*/,
+ CAYMAN_ISLANDS /*("US")*/,
+ CHILE /*("US")*/,
+ CHRISTMAS_ISLANDS /*("US")*/,
+ COCOS /*("US")*/,
+ COLOMBIA /*("US")*/,
+ COSTA_RICA /*("US")*/,
+ CUBA /*("US")*/,
+ DOMINICA /*("US")*/,
+ DOMINICAN_REPUBLIC /*("US")*/,
+ ECUADOR /*("US")*/,
+ EL_SALVADOR /*("US")*/,
+ FALKLAND_ISLANDS /*("US")*/,
+ GRENADA /*("US")*/,
+ GUADALOUPE /*("US")*/,
+ GUATEMALA /*("US")*/,
+ HAITI /*("US")*/,
+ HONDURAS /*("US")*/,
+ NETHERLANDS_ANTILLES /*("US")*/,
+ NICARAGUA /*("US")*/,
+ PANAMA /*("US")*/,
+ PARAGUAY /*("US")*/,
+ PERU /*("US")*/,
+ PUERTO_RICO /*("US")*/,
+ JAMAICA /*("US")*/,
+ MARTINIQUE /*("US")*/,
+ MEXICO /*("US")*/,
+ MONTSERRAT /*("US")*/,
+ ST_KITTS /*("US")*/,
+ ST_LUCIA /*("US")*/,
+ ST_VINCENT /*("US")*/,
+ SUPRA_NATIONAL /*("US")*/,
+ TRINIDAD /*("US")*/,
+ TURKS_AND_CAICOS /*("US")*/,
+ UNITED_STATES /*("US")*/,
+ URUGUAY /*("US")*/,
+ VENEZUELA /*("US")*/,
+ VIRGIN_ISLANDS /*("US")*/,
+
+ AUSTRALIA /*("AP")*/,
+ BANGLADESH /*("AP")*/,
+ BHUTAN /*("AP")*/,
+ CAMBODIA /*("AP")*/,
+ CHINA /*("AP")*/,
+ COOK_ISLANDS /*("AP")*/,
+ EAST_TIMOR /*("AP")*/,
+ FIJI /*("AP")*/,
+ GUAM /*("AP")*/,
+ HONG_KONG /*("AP")*/,
+ INDIA /*("AP")*/,
+ INDONESIA /*("AP")*/,
+ JAPAN /*("AP")*/,
+ KIRIBATI /*("AP")*/,
+ LAOS /*("AP")*/,
+ MACAU /*("AP")*/,
+ MALAYSIA /*("AP")*/,
+ MICRONESIA /*("AP")*/,
+ MONGOLIA /*("AP")*/,
+ MYANMAR /*("AP")*/,
+ NEPAL /*("AP")*/,
+ NEW_CALEDONIA /*("AP")*/,
+ NEW_ZEALAND /*("AP")*/,
+ NORFOLK_ISLAND /*("AP")*/,
+ NORTH_KOREA /*("AP")*/,
+ PAKISTAN /*("AP")*/,
+ PALAU /*("AP")*/,
+ PAPUA_NEW_GUINEA /*("AP")*/,
+ PHILIPPINES /*("AP")*/,
+ PITCAIRN_ISLANDS /*("AP")*/,
+ SAMOA /*("AP")*/,
+ WEST_SAMOA /*("AP")*/,
+ SINGAPORE /*("AP")*/,
+ SOUTH_KOREA /*("AP")*/,
+ SRI_LANKA /*("AP")*/,
+ TAIWAN /*("AP")*/,
+ THAILAND /*("AP")*/,
+ TOKELAU /*("AP")*/,
+ TONGA /*("AP")*/,
+ TUVALU /*("AP")*/,
+ VANUATU /*("AP")*/,
+ VIETNAM /*("AP")*/,
+
+ AFGHANISTAN /*("EU")*/,
+ ALBANIA /*("EU")*/,
+ ALGERIA /*("EU")*/,
+ ANDORRA /*("EU")*/,
+ ANGOLA /*("EU")*/,
+ ARMENIA /*("EU")*/,
+ AUSTRIA /*("EU")*/,
+ AZERBAIJAN /*("EU")*/,
+ BAHRAIN /*("EU")*/,
+ BELARUS /*("EU")*/,
+ BELGIUM /*("EU")*/,
+ BENIN /*("EU")*/,
+ BOSNIA_AND_HERZEGOVINA /*("EU")*/,
+ BOTSWANA /*("EU")*/,
+ BOUVET_ISLAND /*("EU")*/,
+ BRUNEI /*("EU")*/,
+ BULGARIA /*("EU")*/,
+ BURKINA_FASO /*("EU")*/,
+ BURUNDI /*("EU")*/,
+ CAMEROON /*("EU")*/,
+ CAPE_VERDE /*("EU")*/,
+ CHAD /*("EU")*/,
+ COMOROS /*("EU")*/,
+ CONGO /*("EU")*/,
+ CROATIA /*("EU")*/,
+ CYPRUS /*("EU")*/,
+ CZECH_REPUBLIC /*("EU")*/,
+ DR_CONGO /*("EU")*/,
+ DENMARK /*("EU")*/,
+ DJIBOUTI /*("EU")*/,
+ EGYPT /*("EU")*/,
+ EQUATORIAL_GUINEA /*("EU")*/,
+ ERITREA /*("EU")*/,
+ ESTONIA /*("EU")*/,
+ ETHIOPIA /*("EU")*/,
+ FAEROE_ISLANDS /*("EU")*/,
+ FINLAND /*("EU")*/,
+ FRANCE /*("EU")*/,
+ FRENCH_GUIANA /*("EU")*/,
+ GABON /*("EU")*/,
+ GAMBIA /*("EU")*/,
+ GEORGIA /*("EU")*/,
+ GERMANY /*("EU")*/,
+ GHANA /*("EU")*/,
+ GIBRALTAR /*("EU")*/,
+ GREAT_BRITAIN /*("EU")*/,
+ GREECE /*("EU")*/,
+ GREENLAND /*("EU")*/,
+ GUINEA /*("EU")*/,
+ GUINEA_BISSAU /*("EU")*/,
+ GUYANA /*("EU")*/,
+ HUNGARY /*("EU")*/,
+ ICELAND /*("EU")*/,
+ IRAN /*("EU")*/,
+ IRAQ /*("EU")*/,
+ IRELAND /*("EU")*/,
+ ISLE_OF_MAN /*("EU")*/,
+ ISRAEL /*("EU")*/,
+ ITALY /*("EU")*/,
+ IVORY_COAST /*("EU")*/,
+ JERSEY /*("EU")*/,
+ JORDAN /*("EU")*/,
+ KAZAKHSTAN /*("EU")*/,
+ KENYA /*("EU")*/,
+ KUWAIT /*("EU")*/,
+ KYRGYZSTAN /*("EU")*/,
+ LATVIA /*("EU")*/,
+ LEBANON /*("EU")*/,
+ LESOTHO /*("EU")*/,
+ LIBERIA /*("EU")*/,
+ LIBYA /*("EU")*/,
+ LIECHTENSTEIN /*("EU")*/,
+ LITHUANIA /*("EU")*/,
+ LUXEMBOURG /*("EU")*/,
+ MACEDONIA /*("EU")*/,
+ MADAGASCAR /*("EU")*/,
+ MALAWI /*("EU")*/,
+ MALDIVES /*("EU")*/,
+ MALI /*("EU")*/,
+ MALTA /*("EU")*/,
+ MARSHALL_ISLAND /*("EU")*/,
+ MAURITANIA /*("EU")*/,
+ MAURITIUS /*("EU")*/,
+ MAYOTTE /*("EU")*/,
+ MOLDOVA /*("EU")*/,
+ MONACO /*("EU")*/,
+ MOROCCO /*("EU")*/,
+ MOZAMBIQUE /*("EU")*/,
+ NAMIBIA /*("EU")*/,
+ NETHERLANDS /*("EU")*/,
+ NIGER_REPUBLIC /*("EU")*/,
+ NIGERIA /*("EU")*/,
+ NORWAY /*("EU")*/,
+ OMAN /*("EU")*/,
+ PALESTINE /*("EU")*/,
+ POLAND /*("EU")*/,
+ PORTUGAL /*("EU")*/,
+ QATAR /*("EU")*/,
+ REUNION /*("EU")*/,
+ ROMANIA /*("EU")*/,
+ RUSSIA /*("EU")*/,
+ RWANDA /*("EU")*/,
+ SAN_MARINO /*("EU")*/,
+ SAO_TOME /*("EU")*/,
+ SAUDI_ARABIA /*("EU")*/,
+ SENEGAL /*("EU")*/,
+ SERBIA /*("EU")*/,
+ SEYCHELLES /*("EU")*/,
+ SEIRRA_LEONE /*("EU")*/,
+ SLOVAKIA /*("EU")*/,
+ SLOVENIA /*("EU")*/,
+ SOMALIA /*("EU")*/,
+ SOUTH_AFRICA /*("EU")*/,
+ SPAIN /*("EU")*/,
+ ST_HELENA /*("EU")*/,
+ SUDAN /*("EU")*/,
+ SURINAME /*("EU")*/,
+ SVALBARD /*("EU")*/,
+ SWAZILAND /*("EU")*/,
+ SWEDEN /*("EU")*/,
+ SWITZERLAND /*("EU")*/,
+ SYRIA /*("EU")*/,
+ TAJIKSTAN /*("EU")*/,
+ TANZANIA /*("EU")*/,
+ TOGO /*("EU")*/,
+ TUNISIA /*("EU")*/,
+ TURKEY /*("EU")*/,
+ TURKMENISTAN /*("EU")*/,
+ UAE /*("EU")*/,
+ UGANDA /*("EU")*/,
+ UKRAINE /*("EU")*/,
+ UZBEKISTAN /*("EU")*/,
+ VATICAN_CITY /*("EU")*/,
+ WESTERN_SAHARA /*("EU")*/,
+ YEMEN /*("EU")*/,
+ ZAMBIA /*("EU")*/,
+ ZIMBABWE /*("EU")*/;
+
+} \ No newline at end of file
diff --git a/test/files/pos/t6942/t6942.scala b/test/files/pos/t6942/t6942.scala
new file mode 100644
index 0000000000..77963d2634
--- /dev/null
+++ b/test/files/pos/t6942/t6942.scala
@@ -0,0 +1,64 @@
+// not a peep out of the pattern matcher's unreachability analysis
+// its budget should suffice for these simple matches (they do have a large search space)
+class Test {
+ import foo.Bar // a large enum
+ def exhaustUnreachabilitysStack_ENUM_STYLE = (null: Bar) match {
+ case Bar.BULGARIA =>
+ case _ =>
+ }
+
+ // lots of strings
+ def exhaustUnreachabilitysStack_StringStyle = "foo" match {
+ case "a" =>
+ case "b" =>
+ case "c" =>
+ case "d" =>
+ case "e" =>
+ case "f" =>
+ case "aa" =>
+ case "ba" =>
+ case "ca" =>
+ case "da" =>
+ case "ea" =>
+ case "f1a" =>
+ case "a1a" =>
+ case "b1a" =>
+ case "c1a" =>
+ case "d1a" =>
+ case "e1a" =>
+ case "f1a2" =>
+ case "f1a0" =>
+ case "a1a2" =>
+ case "b1a2" =>
+ case "c1a2" =>
+ case "d1a2" =>
+ case "e1a2" =>
+ case "f1a3" =>
+ case "_a" =>
+ case "_b" =>
+ case "_c" =>
+ case "_d" =>
+ case "_e" =>
+ case "_f" =>
+ case "_aa" =>
+ case "_ba" =>
+ case "_ca" =>
+ case "_da" =>
+ case "_ea" =>
+ case "_f1a" =>
+ case "_a1a" =>
+ case "_b1a" =>
+ case "_c1a" =>
+ case "_d1a" =>
+ case "_e1a" =>
+ case "_f1a0" =>
+ case "_f1a2" =>
+ case "_a1a2" =>
+ case "_b1a2" =>
+ case "_c1a2" =>
+ case "_d1a2" =>
+ case "_e1a2" =>
+ case "_f1a3" =>
+ case _ =>
+ }
+}
diff --git a/test/files/pos/t6963c.flags b/test/files/pos/t6963c.flags
new file mode 100644
index 0000000000..4d6e04914f
--- /dev/null
+++ b/test/files/pos/t6963c.flags
@@ -0,0 +1 @@
+-Xmigration:2.9 -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6963c.scala b/test/files/pos/t6963c.scala
new file mode 100644
index 0000000000..0b6b5c757f
--- /dev/null
+++ b/test/files/pos/t6963c.scala
@@ -0,0 +1,25 @@
+object Test {
+ def f1(x: Any) = x.isInstanceOf[Seq[_]]
+ def f2(x: Any) = x match {
+ case _: Seq[_] => true
+ case _ => false
+ }
+
+ def f3(x: Any) = x match {
+ case _: Array[_] => true
+ case _ => false
+ }
+
+ def f4(x: Any) = x.isInstanceOf[Traversable[_]]
+
+ def f5(x1: Any, x2: Any, x3: AnyRef) = (x1, x2, x3) match {
+ case (Some(_: Seq[_]), Nil, _) => 1
+ case (None, List(_: List[_], _), _) => 2
+ case _ => 3
+ }
+
+ def f5: Unit = {
+ import scala.collection.mutable._
+ List(1,2,3,4,5).scanRight(0)(_+_)
+ }
+}
diff --git a/test/files/pos/t6976/Exts_1.scala b/test/files/pos/t6976/Exts_1.scala
new file mode 100644
index 0000000000..9b3a69edd9
--- /dev/null
+++ b/test/files/pos/t6976/Exts_1.scala
@@ -0,0 +1,10 @@
+object Exts {
+ implicit class AnyExts[T](val o: T) extends AnyVal {
+ def moo = "moo!"
+ }
+}
+
+trait Exts {
+ import language.implicitConversions
+ implicit def AnyExts[T](o: T) = Exts.AnyExts(o)
+}
diff --git a/test/files/pos/t6976/ImplicitBug_1.scala b/test/files/pos/t6976/ImplicitBug_1.scala
new file mode 100644
index 0000000000..c9031bab2e
--- /dev/null
+++ b/test/files/pos/t6976/ImplicitBug_1.scala
@@ -0,0 +1,27 @@
+// This one is weird and nasty. Not sure if this is scalac or sbt
+// (tried with 0.12 & 0.12.2-RC2) bug.
+//
+// A level of indirection is required to trigger this bug.
+// Exts seems to need to be defined in separate file.
+//
+// Steps to reproduce:
+// 1. sbt clean
+// 2. sbt run (it works)
+// 3. Comment A & uncomment B.
+// 4. sbt run (it fails)
+// 5. Switch it back & sbt run. It still fails.
+//
+// In this project sbt clean helps. However in a large project where this
+// bug was found compiler crashed even after doing sbt clean. The only
+// way to work around this was to reference Exts object explicitly (C) in
+// the source file using its implicit classes.
+
+// Lets suppose this is a mega-trait combining all sorts of helper
+// functionality.
+trait Support extends Exts
+
+object ImplicitsBug extends App with Support { // A
+// object ImplicitsBug extends App with Exts { // B
+ //Exts // C) this reference helped in the large project.
+ println(3.moo)
+}
diff --git a/test/files/pos/t6976/ImplicitBug_2.scala b/test/files/pos/t6976/ImplicitBug_2.scala
new file mode 100644
index 0000000000..2fea5e2993
--- /dev/null
+++ b/test/files/pos/t6976/ImplicitBug_2.scala
@@ -0,0 +1,7 @@
+trait Support extends Exts
+
+// object ImplicitsBug extends App with Support { // A
+object ImplicitsBug extends App with Exts { // B
+ //Exts // C) this reference helped in the large project.
+ println(3.moo)
+}
diff --git a/test/files/pos/t6994.flags b/test/files/pos/t6994.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6994.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6994.scala b/test/files/pos/t6994.scala
new file mode 100644
index 0000000000..d707196423
--- /dev/null
+++ b/test/files/pos/t6994.scala
@@ -0,0 +1,8 @@
+object Test {
+ object NF {
+ def unapply(t: Throwable): Option[Throwable] = None
+ }
+ val x = (try { None } catch { case NF(ex) => None }) getOrElse 0
+ // Was emitting a spurious warning post typer:
+ // "This catches all Throwables. If this is really intended, use `case ex6 : Throwable` to clear this warning."
+}
diff --git a/test/files/pos/t7011.flags b/test/files/pos/t7011.flags
new file mode 100644
index 0000000000..a4c161553e
--- /dev/null
+++ b/test/files/pos/t7011.flags
@@ -0,0 +1 @@
+-Ydebug -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7011.scala b/test/files/pos/t7011.scala
new file mode 100644
index 0000000000..539f662bc0
--- /dev/null
+++ b/test/files/pos/t7011.scala
@@ -0,0 +1,7 @@
+object bar {
+ def foo {
+ lazy val x = 42
+
+ {()=>x}
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7022.scala b/test/files/pos/t7022.scala
new file mode 100644
index 0000000000..0609e2d250
--- /dev/null
+++ b/test/files/pos/t7022.scala
@@ -0,0 +1,9 @@
+class Catch[+T] {
+ def either[U >: T](body: => U): Either[Throwable, U] = ???
+}
+
+object Test {
+ implicit class RichCatch[T](val c: Catch[T]) extends AnyVal {
+ def validation[U >: T](u: => U): Either[Throwable, U] = c.either(u)
+ }
+}
diff --git a/test/files/pos/t7033.scala b/test/files/pos/t7033.scala
new file mode 100644
index 0000000000..a4d256673b
--- /dev/null
+++ b/test/files/pos/t7033.scala
@@ -0,0 +1,15 @@
+import language.higherKinds
+object Wrap {
+ implicit class X[X](val a: X)
+
+ X[Int](0)
+}
+
+class Wrap {
+ implicit class Y[Y](val a: Y)
+ Y[Int](0)
+ implicit class Z[Z[_]](val a: Z[Wrap.this.Z[Z]])
+ Z[List](List(new Z[List](null)))
+}
+
+case class X[X](val a: X)
diff --git a/test/files/pos/t7035.scala b/test/files/pos/t7035.scala
new file mode 100644
index 0000000000..f45bd0a878
--- /dev/null
+++ b/test/files/pos/t7035.scala
@@ -0,0 +1,15 @@
+case class Y(final var x: Int, final private var y: String, final val z1: Boolean, final private val z2: Any) {
+
+ import Test.{y => someY}
+ List(someY.x: Int, someY.y: String, someY.z1: Boolean, someY.z2: Any)
+ someY.y = ""
+}
+
+object Test {
+ val y = Y(0, "", true, new {})
+ val unapp: Option[(Int, String, Boolean, Any)] = // was (Int, Boolean, String, Any) !!
+ Y.unapply(y)
+
+ val Y(a, b, c, d) = y
+ List(a: Int, b: String, c: Boolean, d: Any)
+}
diff --git a/test/files/presentation/doc.check b/test/files/presentation/doc.check
index 62b3bfc2e3..e33756773d 100755..100644
--- a/test/files/presentation/doc.check
+++ b/test/files/presentation/doc.check
@@ -1,3 +1,4 @@
+reload: Test.scala
body:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This is a test comment), Text(.)))), Text(
))))))
@example:Body(List(Paragraph(Chain(List(Summary(Monospace(Text("abb".permutations = Iterator(abb, bab, bba)))))))))
diff --git a/test/files/presentation/doc.scala b/test/files/presentation/doc/doc.scala
index 4b0d6baa1f..475d92b861 100755
--- a/test/files/presentation/doc.scala
+++ b/test/files/presentation/doc/doc.scala
@@ -1,5 +1,5 @@
import scala.tools.nsc.doc
-import scala.tools.nsc.doc.base.LinkTo
+import scala.tools.nsc.doc.base._
import scala.tools.nsc.doc.base.comment._
import scala.tools.nsc.interactive._
import scala.tools.nsc.interactive.tests._
@@ -28,12 +28,33 @@ object Test extends InteractiveTest {
|trait Commented {}
|class User(c: %sCommented)""".stripMargin.format(comment, tags take nTags mkString "\n", caret)
- override def main(args: Array[String]) {
- val documenter = new Doc(settings) {
- val global: compiler.type = compiler
-
+ override lazy val compiler = {
+ new {
+ override val settings = {
+ prepareSettings(Test.this.settings)
+ Test.this.settings
+ }
+ } with Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase {
+ val global: this.type = this
def chooseLink(links: List[LinkTo]): LinkTo = links.head
+ def internalLink(sym: Symbol, site: Symbol) = None
+ def toString(link: LinkTo) = link.toString
+
+ def getComment(sym: Symbol, source: SourceFile) = {
+ val docResponse = new Response[(String, String, Position)]
+ askDocComment(sym, sym.owner, source, docResponse)
+ docResponse.get.left.toOption flatMap {
+ case (expanded, raw, pos) =>
+ if (expanded.isEmpty)
+ None
+ else
+ Some(ask { () => parseAtSymbol(expanded, raw, pos, Some(sym.owner)) })
+ }
+ }
}
+ }
+
+ override def runDefaultTests() {
for (i <- 1 to tags.length) {
val markedText = text(i)
val idx = markedText.indexOf(caret)
@@ -52,18 +73,17 @@ object Test extends InteractiveTest {
treeResponse.get.left.toOption match {
case Some(tree) =>
val sym = tree.tpe.typeSymbol
- documenter.retrieve(sym, sym.owner) match {
- case Some(HtmlResult(comment)) =>
- import comment._
- val tags: List[(String, Iterable[Body])] =
- List(("@example", example), ("@version", version), ("@since", since.toList), ("@todo", todo), ("@note", note), ("@see", see))
- val str = ("body:" + body + "\n") +
- tags.map{ case (name, bodies) => name + ":" + bodies.mkString("\n") }.mkString("\n")
- reporter.println(str)
- case Some(_) => reporter.println("Got unexpected result")
- case None => reporter.println("Got no result")
+ compiler.getComment(sym, batch) match {
+ case None => println("Got no doc comment")
+ case Some(comment) =>
+ import comment._
+ val tags: List[(String, Iterable[Body])] =
+ List(("@example", example), ("@version", version), ("@since", since.toList), ("@todo", todo), ("@note", note), ("@see", see))
+ val str = ("body:" + body + "\n") +
+ tags.map{ case (name, bodies) => name + ":" + bodies.mkString("\n") }.mkString("\n")
+ println(str)
}
- case None => reporter.println("Couldn't find a typedTree")
+ case None => println("Couldn't find a typedTree")
}
}
}
diff --git a/test/files/presentation/ide-t1001326.check b/test/files/presentation/ide-t1001326.check
new file mode 100644
index 0000000000..0ac15faed4
--- /dev/null
+++ b/test/files/presentation/ide-t1001326.check
@@ -0,0 +1,4 @@
+Unique OK
+Unattributed OK
+NeverModify OK
+AlwaysParseTree OK \ No newline at end of file
diff --git a/test/files/presentation/ide-t1001326/Test.scala b/test/files/presentation/ide-t1001326/Test.scala
new file mode 100644
index 0000000000..3091da4b40
--- /dev/null
+++ b/test/files/presentation/ide-t1001326/Test.scala
@@ -0,0 +1,91 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+import scala.reflect.internal.util.SourceFile
+import scala.tools.nsc.interactive.Response
+
+object Test extends InteractiveTest {
+
+ override def execute(): Unit = {
+ val sf = sourceFiles.find(_.file.name == "A.scala").head
+ uniqueParseTree_t1001326(sf)
+ unattributedParseTree_t1001326(sf)
+ neverModifyParseTree_t1001326(sf)
+ shouldAlwaysReturnParseTree_t1001326(sf)
+ }
+
+ /**
+ * Asking twice for a parseTree on the same source should always return a new tree
+ */
+ private def uniqueParseTree_t1001326(sf: SourceFile) {
+ val parseTree1 = compiler.parseTree(sf)
+ val parseTree2 = compiler.parseTree(sf)
+ if (parseTree1 != parseTree2) {
+ reporter.println("Unique OK")
+ } else {
+ reporter.println("Unique FAILED")
+ }
+ }
+
+ /**
+ * A parseTree should never contain any symbols or types
+ */
+ private def unattributedParseTree_t1001326(sf: SourceFile) {
+ if (noSymbolsOrTypes(compiler.parseTree(sf))) {
+ reporter.println("Unattributed OK")
+ } else {
+ reporter.println("Unattributed FAILED")
+ }
+ }
+
+ /**
+ * Once you have obtained a parseTree it should never change
+ */
+ private def neverModifyParseTree_t1001326(sf: SourceFile) {
+ val parsedTree = compiler.parseTree(sf)
+ loadSourceAndWaitUntilTypechecked(sf)
+ if (noSymbolsOrTypes(parsedTree)) {
+ reporter.println("NeverModify OK")
+ } else {
+ reporter.println("NeverModify FAILED")
+ }
+ }
+
+ /**
+ * Should always return a parse tree
+ */
+ private def shouldAlwaysReturnParseTree_t1001326(sf: SourceFile) {
+ loadSourceAndWaitUntilTypechecked(sf)
+ if (noSymbolsOrTypes(compiler.parseTree(sf))) {
+ reporter.println("AlwaysParseTree OK")
+ } else {
+ reporter.println("AlwaysParseTree FAILED")
+ }
+ }
+
+ /**
+ * Load a source and block while it is type-checking.
+ */
+ private def loadSourceAndWaitUntilTypechecked(sf: SourceFile): Unit = {
+ compiler.askToDoFirst(sf)
+ val res = new Response[Unit]
+ compiler.askReload(List(sf), res)
+ res.get
+ askLoadedTyped(sf).get
+ }
+
+ /**
+ * Traverses a tree and makes sure that there are no types or symbols present in the tree with
+ * the exception of the symbol for the package 'scala'. This is because that symbol will be
+ * present in some of the nodes that the compiler generates.
+ */
+ private def noSymbolsOrTypes(tree: compiler.Tree): Boolean = {
+ tree.forAll { t =>
+ (t.symbol == null ||
+ t.symbol == compiler.NoSymbol ||
+ t.symbol == compiler.definitions.ScalaPackage // ignore the symbol for the scala package for now
+ ) && (
+ t.tpe == null ||
+ t.tpe == compiler.NoType)
+ }
+ }
+
+} \ No newline at end of file
diff --git a/test/files/presentation/ide-t1001326/src/a/A.scala b/test/files/presentation/ide-t1001326/src/a/A.scala
new file mode 100644
index 0000000000..c82ca02231
--- /dev/null
+++ b/test/files/presentation/ide-t1001326/src/a/A.scala
@@ -0,0 +1,5 @@
+package a
+
+class A {
+ def foo(s: String) = s + s
+} \ No newline at end of file
diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check
new file mode 100644
index 0000000000..7d8d181306
--- /dev/null
+++ b/test/files/run/analyzerPlugins.check
@@ -0,0 +1,196 @@
+adaptBoundsToAnnots(List( <: Int), List(type T), List(Int @testAnn)) [2]
+annotationsConform(Boolean @testAnn, Boolean) [1]
+annotationsConform(Boolean(false), Boolean @testAnn) [1]
+annotationsConform(Int @testAnn, ?A) [1]
+annotationsConform(Int @testAnn, Any) [1]
+annotationsConform(Int @testAnn, Int) [2]
+annotationsConform(Int(1) @testAnn, Int) [1]
+annotationsConform(Int(1), Int @testAnn) [1]
+annotationsConform(Nothing, Int @testAnn) [2]
+annotationsConform(String @testAnn, String) [1]
+canAdaptAnnotations(Trees$Ident, String) [1]
+canAdaptAnnotations(Trees$Select, ?) [1]
+canAdaptAnnotations(Trees$Select, Boolean @testAnn) [1]
+canAdaptAnnotations(Trees$Select, Boolean) [1]
+canAdaptAnnotations(Trees$Select, String @testAnn) [1]
+canAdaptAnnotations(Trees$TypeTree, ?) [10]
+canAdaptAnnotations(Trees$Typed, ?) [3]
+canAdaptAnnotations(Trees$Typed, Any) [1]
+canAdaptAnnotations(Trees$Typed, Int) [1]
+lub(List(Int @testAnn, Int)) [1]
+pluginsPt(?, Trees$Annotated) [7]
+pluginsPt(?, Trees$Apply) [8]
+pluginsPt(?, Trees$ApplyImplicitView) [2]
+pluginsPt(?, Trees$Assign) [7]
+pluginsPt(?, Trees$Block) [7]
+pluginsPt(?, Trees$ClassDef) [2]
+pluginsPt(?, Trees$DefDef) [14]
+pluginsPt(?, Trees$Ident) [49]
+pluginsPt(?, Trees$If) [2]
+pluginsPt(?, Trees$Literal) [20]
+pluginsPt(?, Trees$New) [5]
+pluginsPt(?, Trees$PackageDef) [1]
+pluginsPt(?, Trees$Return) [1]
+pluginsPt(?, Trees$Select) [51]
+pluginsPt(?, Trees$Super) [2]
+pluginsPt(?, Trees$This) [20]
+pluginsPt(?, Trees$TypeApply) [3]
+pluginsPt(?, Trees$TypeBoundsTree) [2]
+pluginsPt(?, Trees$TypeDef) [1]
+pluginsPt(?, Trees$TypeTree) [37]
+pluginsPt(?, Trees$Typed) [1]
+pluginsPt(?, Trees$ValDef) [23]
+pluginsPt(Any, Trees$Literal) [2]
+pluginsPt(Any, Trees$Typed) [1]
+pluginsPt(Array[Any], Trees$ArrayValue) [1]
+pluginsPt(Boolean @testAnn, Trees$Literal) [1]
+pluginsPt(Boolean @testAnn, Trees$Select) [1]
+pluginsPt(Boolean, Trees$Apply) [1]
+pluginsPt(Boolean, Trees$Ident) [1]
+pluginsPt(Boolean, Trees$Literal) [1]
+pluginsPt(Double, Trees$Select) [1]
+pluginsPt(Int @testAnn, Trees$Literal) [1]
+pluginsPt(Int, Trees$Apply) [1]
+pluginsPt(Int, Trees$Ident) [2]
+pluginsPt(Int, Trees$If) [1]
+pluginsPt(Int, Trees$Literal) [6]
+pluginsPt(Int, Trees$Select) [3]
+pluginsPt(List, Trees$Apply) [1]
+pluginsPt(List[Any], Trees$Select) [1]
+pluginsPt(String @testAnn, Trees$Select) [1]
+pluginsPt(String, Trees$Apply) [1]
+pluginsPt(String, Trees$Block) [2]
+pluginsPt(String, Trees$Ident) [4]
+pluginsPt(String, Trees$Literal) [1]
+pluginsPt(String, Trees$Select) [1]
+pluginsPt(String, Trees$Typed) [1]
+pluginsPt(Unit, Trees$Assign) [1]
+pluginsPt(scala.annotation.Annotation, Trees$Apply) [5]
+pluginsTypeSig(<none>, Trees$Template) [2]
+pluginsTypeSig(class A, Trees$ClassDef) [1]
+pluginsTypeSig(class testAnn, Trees$ClassDef) [1]
+pluginsTypeSig(constructor A, Trees$DefDef) [2]
+pluginsTypeSig(constructor testAnn, Trees$DefDef) [1]
+pluginsTypeSig(method foo, Trees$DefDef) [1]
+pluginsTypeSig(method method, Trees$DefDef) [1]
+pluginsTypeSig(method nested, Trees$DefDef) [1]
+pluginsTypeSig(type T, Trees$TypeDef) [2]
+pluginsTypeSig(value annotField, Trees$ValDef) [2]
+pluginsTypeSig(value f, Trees$ValDef) [1]
+pluginsTypeSig(value inferField, Trees$ValDef) [2]
+pluginsTypeSig(value lub1, Trees$ValDef) [2]
+pluginsTypeSig(value lub2, Trees$ValDef) [2]
+pluginsTypeSig(value param, Trees$ValDef) [2]
+pluginsTypeSig(value str, Trees$ValDef) [1]
+pluginsTypeSig(value x, Trees$ValDef) [5]
+pluginsTypeSig(value y, Trees$ValDef) [5]
+pluginsTypeSig(variable count, Trees$ValDef) [3]
+pluginsTypeSigAccessor(value annotField) [1]
+pluginsTypeSigAccessor(value inferField) [1]
+pluginsTypeSigAccessor(value lub1) [1]
+pluginsTypeSigAccessor(value lub2) [1]
+pluginsTypeSigAccessor(value x) [1]
+pluginsTypeSigAccessor(value y) [1]
+pluginsTypeSigAccessor(variable count) [2]
+pluginsTyped( <: Int, Trees$TypeBoundsTree) [2]
+pluginsTyped(()Object, Trees$Select) [1]
+pluginsTyped(()String, Trees$Ident) [1]
+pluginsTyped(()String, Trees$TypeApply) [1]
+pluginsTyped(()scala.annotation.Annotation, Trees$Select) [1]
+pluginsTyped(()testAnn, Trees$Select) [10]
+pluginsTyped((str: String)A <and> (param: Double)A, Trees$Select) [1]
+pluginsTyped((x$1: Any)Boolean <and> (x: Double)Boolean <and> (x: Float)Boolean <and> (x: Long)Boolean <and> (x: Int)Boolean <and> (x: Char)Boolean <and> (x: Short)Boolean <and> (x: Byte)Boolean, Trees$Select) [1]
+pluginsTyped((x$1: Int)Unit, Trees$Select) [1]
+pluginsTyped((x: Double)Double <and> (x: Float)Float <and> (x: Long)Long <and> (x: Int)Int <and> (x: Char)Int <and> (x: Short)Int <and> (x: Byte)Int <and> (x: String)String, Trees$Select) [1]
+pluginsTyped((x: String)scala.collection.immutable.StringOps, Trees$Select) [2]
+pluginsTyped((xs: Array[Any])scala.collection.mutable.WrappedArray[Any], Trees$TypeApply) [1]
+pluginsTyped(<empty>.type, Trees$Ident) [1]
+pluginsTyped(<error>, Trees$Select) [1]
+pluginsTyped(<notype>, Trees$ClassDef) [2]
+pluginsTyped(<notype>, Trees$DefDef) [14]
+pluginsTyped(<notype>, Trees$PackageDef) [1]
+pluginsTyped(<notype>, Trees$TypeDef) [1]
+pluginsTyped(<notype>, Trees$ValDef) [23]
+pluginsTyped(<root>, Trees$Ident) [1]
+pluginsTyped(=> Boolean @testAnn, Trees$Select) [1]
+pluginsTyped(=> Double, Trees$Select) [4]
+pluginsTyped(=> Int, Trees$Select) [5]
+pluginsTyped(=> Int, Trees$TypeApply) [1]
+pluginsTyped(=> String @testAnn, Trees$Select) [1]
+pluginsTyped(A, Trees$Apply) [1]
+pluginsTyped(A, Trees$Ident) [2]
+pluginsTyped(A, Trees$This) [8]
+pluginsTyped(A, Trees$TypeTree) [4]
+pluginsTyped(A.super.type, Trees$Super) [1]
+pluginsTyped(A.this.type, Trees$This) [11]
+pluginsTyped(Any, Trees$TypeTree) [1]
+pluginsTyped(AnyRef, Trees$Select) [2]
+pluginsTyped(Array[Any], Trees$ArrayValue) [1]
+pluginsTyped(Boolean @testAnn, Trees$Select) [1]
+pluginsTyped(Boolean @testAnn, Trees$TypeTree) [4]
+pluginsTyped(Boolean(false), Trees$Literal) [2]
+pluginsTyped(Boolean, Trees$Apply) [1]
+pluginsTyped(Boolean, Trees$Select) [4]
+pluginsTyped(Char('c'), Trees$Literal) [2]
+pluginsTyped(Double, Trees$Select) [6]
+pluginsTyped(Int @testAnn, Trees$TypeTree) [2]
+pluginsTyped(Int @testAnn, Trees$Typed) [2]
+pluginsTyped(Int(0), Trees$Literal) [3]
+pluginsTyped(Int(1) @testAnn, Trees$Typed) [1]
+pluginsTyped(Int(1), Trees$Literal) [9]
+pluginsTyped(Int(2), Trees$Literal) [1]
+pluginsTyped(Int, Trees$Apply) [1]
+pluginsTyped(Int, Trees$Ident) [2]
+pluginsTyped(Int, Trees$If) [2]
+pluginsTyped(Int, Trees$Select) [17]
+pluginsTyped(Int, Trees$TypeTree) [13]
+pluginsTyped(List, Trees$Apply) [1]
+pluginsTyped(List, Trees$Select) [1]
+pluginsTyped(List[Any], Trees$Apply) [1]
+pluginsTyped(List[Any], Trees$Select) [1]
+pluginsTyped(List[Any], Trees$TypeTree) [3]
+pluginsTyped(Nothing, Trees$Return) [1]
+pluginsTyped(Nothing, Trees$Select) [2]
+pluginsTyped(Object, Trees$Apply) [1]
+pluginsTyped(String @testAnn, Trees$Ident) [1]
+pluginsTyped(String @testAnn, Trees$Select) [1]
+pluginsTyped(String @testAnn, Trees$TypeTree) [4]
+pluginsTyped(String(""), Trees$Literal) [2]
+pluginsTyped(String("huhu"), Trees$Literal) [1]
+pluginsTyped(String("str") @testAnn, Trees$Typed) [1]
+pluginsTyped(String("str"), Trees$Literal) [1]
+pluginsTyped(String("str"), Trees$Typed) [1]
+pluginsTyped(String("two"), Trees$Literal) [3]
+pluginsTyped(String, Trees$Apply) [2]
+pluginsTyped(String, Trees$Block) [2]
+pluginsTyped(String, Trees$Ident) [1]
+pluginsTyped(String, Trees$Select) [9]
+pluginsTyped(String, Trees$TypeTree) [8]
+pluginsTyped(Unit, Trees$Apply) [2]
+pluginsTyped(Unit, Trees$Assign) [8]
+pluginsTyped(Unit, Trees$Block) [7]
+pluginsTyped(Unit, Trees$If) [1]
+pluginsTyped(Unit, Trees$Literal) [8]
+pluginsTyped(Unit, Trees$TypeTree) [1]
+pluginsTyped([A](xs: A*)List[A], Trees$Select) [1]
+pluginsTyped([T <: Int]=> Int, Trees$Select) [1]
+pluginsTyped([T0 >: ? <: ?]()T0, Trees$Select) [1]
+pluginsTyped([T](xs: Array[T])scala.collection.mutable.WrappedArray[T], Trees$Select) [1]
+pluginsTyped(annotation.type, Trees$Select) [2]
+pluginsTyped(math.type, Trees$Select) [9]
+pluginsTyped(scala.annotation.Annotation, Trees$Apply) [1]
+pluginsTyped(scala.annotation.TypeConstraint, Trees$Select) [4]
+pluginsTyped(scala.collection.immutable.List.type, Trees$Select) [2]
+pluginsTyped(scala.collection.immutable.StringOps, Trees$ApplyImplicitView) [2]
+pluginsTyped(scala.collection.mutable.WrappedArray[Any], Trees$Apply) [1]
+pluginsTyped(scala.type, Trees$Ident) [1]
+pluginsTyped(scala.type, Trees$Select) [1]
+pluginsTyped(str.type, Trees$Ident) [3]
+pluginsTyped(testAnn, Trees$Apply) [5]
+pluginsTyped(testAnn, Trees$Ident) [5]
+pluginsTyped(testAnn, Trees$New) [5]
+pluginsTyped(testAnn, Trees$This) [1]
+pluginsTyped(testAnn, Trees$TypeTree) [2]
+pluginsTyped(testAnn.super.type, Trees$Super) [1]
+pluginsTyped(type, Trees$Select) [1]
+pluginsTypedReturn(return f, String) [1]
diff --git a/test/files/run/analyzerPlugins.scala b/test/files/run/analyzerPlugins.scala
new file mode 100644
index 0000000000..daef83fa30
--- /dev/null
+++ b/test/files/run/analyzerPlugins.scala
@@ -0,0 +1,121 @@
+import scala.tools.partest._
+import scala.tools.nsc._
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp"
+
+ def code = """
+ class testAnn extends annotation.TypeConstraint
+
+ class A(param: Double) extends { val x: Int = 1; val y = "two"; type T = A } with AnyRef {
+ val inferField = ("str": @testAnn)
+ val annotField: Boolean @testAnn = false
+
+ val lub1 = List('c', (1: Int @testAnn), "")
+ val lub2 = if (annotField) (1: @testAnn) else 2
+
+ def foo[T <: Int] = 0
+ foo[Int @testAnn]
+
+ var count = 0
+
+ math.random // some statement
+
+ def method: String = {
+ math.random
+ val f = inferField
+
+ def nested(): String = {
+ if(count == 1)
+ return f
+ "huhu"
+ }
+ nested()
+ }
+
+ def this(str: String) {
+ this(str.toDouble)
+ math.random
+ count += 1
+ }
+ }
+ """.trim
+
+
+ def show() {
+ val global = newCompiler()
+ import global._
+ import analyzer._
+
+ val output = collection.mutable.ListBuffer[String]()
+
+ object annotChecker extends AnnotationChecker {
+ def hasTestAnn(tps: Type*) = {
+ tps exists (_.annotations.map(_.toString) contains "testAnn")
+ }
+
+ def annotationsConform(tpe1: Type, tpe2: Type): Boolean = {
+ if (hasTestAnn(tpe1, tpe2))
+ output += s"annotationsConform($tpe1, $tpe2)"
+ true
+ }
+
+ override def annotationsLub(tp: Type, ts: List[Type]): Type = {
+ if (hasTestAnn(ts: _*))
+ output += s"lub($ts)"
+ tp
+ }
+
+ override def adaptBoundsToAnnotations(bounds: List[TypeBounds], tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = {
+ if (hasTestAnn(targs: _*))
+ output += s"adaptBoundsToAnnots($bounds, $tparams, $targs)"
+ bounds
+ }
+ }
+
+ object analyzerPlugin extends AnalyzerPlugin {
+ def treeClass(t: Tree) = t.getClass.toString.split('.').last
+
+ override def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Int): Type = {
+ output += s"pluginsPt($pt, ${treeClass(tree)})"
+ pt
+ }
+
+ override def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Int, pt: Type): Type = {
+ output += s"pluginsTyped($tpe, ${treeClass(tree)})"
+ tpe
+ }
+
+ override def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = {
+ output += s"pluginsTypeSig(${defTree.symbol}, ${treeClass(defTree)})"
+ tpe
+ }
+
+ override def pluginsTypeSigAccessor(tpe: Type, typer: Typer, tree: ValDef, sym: Symbol): Type = {
+ output += s"pluginsTypeSigAccessor(${tree.symbol})"
+ tpe
+ }
+
+
+ override def canAdaptAnnotations(tree: Tree, typer: Typer, mode: Int, pt: Type): Boolean = {
+ output += s"canAdaptAnnotations(${treeClass(tree)}, $pt)"
+ false
+ }
+
+ override def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = {
+ output += s"pluginsTypedReturn($tree, $pt)"
+ tpe
+ }
+
+ }
+
+ addAnnotationChecker(annotChecker)
+ addAnalyzerPlugin(analyzerPlugin)
+ compileString(global)(code)
+
+ val res = output.groupBy(identity).mapValues(_.size).map { case (k,v) => s"$k [$v]" }.toList.sorted
+ println(res.mkString("\n"))
+ }
+
+}
diff --git a/test/files/run/array-addition.check b/test/files/run/array-addition.check
deleted file mode 100644
index 7bfbd9c711..0000000000
--- a/test/files/run/array-addition.check
+++ /dev/null
@@ -1,4 +0,0 @@
-Array(1, 2, 3, 4)
-Array(1, 2, 3, 4)
-Array(1)
-Array(1)
diff --git a/test/files/run/array-addition.scala b/test/files/run/array-addition.scala
deleted file mode 100644
index 8def48e85c..0000000000
--- a/test/files/run/array-addition.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-object Test {
- def prettyPrintArray(x: Array[_]) = println("Array(" + x.mkString(", ") + ")")
-
- def main(args: Array[String]): Unit = {
- prettyPrintArray(Array(1,2,3) :+ 4)
- prettyPrintArray(1 +: Array(2,3,4))
- prettyPrintArray(Array() :+ 1)
- prettyPrintArray(1 +: Array())
- }
-}
-
diff --git a/test/files/run/idempotency-case-classes.check b/test/files/run/idempotency-case-classes.check
index 700af3b81b..80d178cba3 100644
--- a/test/files/run/idempotency-case-classes.check
+++ b/test/files/run/idempotency-case-classes.check
@@ -42,7 +42,7 @@ C(2,3)
C.super.<init>();
()
};
- final override def toString(): String = "C";
+ final override <synthetic> def toString(): String = "C";
case <synthetic> def apply(x: Int, y: Int): C = new C(x, y);
case <synthetic> def unapply(x$0: C): Option[(Int, Int)] = if (x$0.==(null))
scala.this.None
diff --git a/test/files/run/idempotency-partial-functions.scala b/test/files/run/idempotency-partial-functions.scala
deleted file mode 100644
index dd5f1167f1..0000000000
--- a/test/files/run/idempotency-partial-functions.scala
+++ /dev/null
@@ -1,25 +0,0 @@
-import scala.reflect.runtime.universe._
-import scala.reflect.runtime.{currentMirror => cm}
-import scala.tools.reflect.{ToolBox, ToolBoxError}
-import scala.tools.reflect.Eval
-
-object Test extends App {
- val partials = reify {
- List((false,true)) collect { case (x,true) => x }
- }
- try {
- println(partials.eval)
- } catch {
- case _: ToolBoxError => println("error!!")
- }
- try {
- val tb = cm.mkToolBox()
- val tpartials = tb.typeCheck(partials.tree)
- println(tpartials)
- val rtpartials = tb.resetAllAttrs(tpartials)
- println(tb.eval(rtpartials))
- } catch {
- // this is the current behaviour, rather than the desired behavior; see SI-6187
- case _: ToolBoxError => println("error!")
- }
-} \ No newline at end of file
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index 282542a732..f2f0b60687 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -21,59 +21,60 @@
< 92 JUMP 7
<
< 7:
-395c391
+391c387
< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value ex6, value x4, value x5, value x
-397c393
-< blocks: [1,2,3,4,5,8,11,13,14,16]
+393c389
+< blocks: [1,2,3,4,5,8,10,11,13]
---
-> blocks: [1,2,3,5,8,11,13,14,16,17]
-421c417,426
+> blocks: [1,2,3,5,8,10,11,13,14]
+417c413,422
< 103 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 17
+> ? JUMP 14
>
-> 17:
+> 14:
> 101 LOAD_LOCAL(value ex6)
> 101 STORE_LOCAL(value x4)
> 101 SCOPE_ENTER value x4
> 106 LOAD_LOCAL(value x4)
> 106 IS_INSTANCE REF(class MyException)
-> 106 CZJUMP (BOOL)NE ? 5 : 11
-434,436d438
+> 106 CZJUMP (BOOL)NE ? 5 : 8
+430,432d434
< 101 JUMP 4
<
< 4:
-450,453d451
+442,445d443
< 106 LOAD_LOCAL(value x5)
< 106 CALL_METHOD MyException.message (dynamic)
< 106 STORE_LOCAL(value message)
< 106 SCOPE_ENTER value message
-455c453,454
+447c445,446
< 106 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 106 CALL_METHOD MyException.message (dynamic)
-527c526
+519c518
< blocks: [1,2,3,4,6,7,8,9,10]
---
> blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
-556c555,560
+548c547
< 306 THROW(MyException)
---
> ? JUMP 11
->
+549a549,553
> 11:
> ? LOAD_LOCAL(variable monitor4)
> 305 MONITOR_EXIT
> ? JUMP 12
-562c566
+>
+554c558
< ? THROW(Throwable)
---
> ? JUMP 12
-568c572,579
+560c564,571
< ? THROW(Throwable)
---
> ? STORE_LOCAL(value t)
@@ -84,7 +85,7 @@
> 304 MONITOR_EXIT
> ? STORE_LOCAL(value t)
> ? JUMP 13
-583a595,606
+575a587,598
> 13:
> 310 LOAD_MODULE object Predef
> 310 CALL_PRIMITIVE(StartConcat)
@@ -97,35 +98,35 @@
> 310 CALL_METHOD scala.Predef.println (dynamic)
> 310 JUMP 2
>
-592c615
+584c607
< catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
---
> catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
-595c618
+587c610
< catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
---
> catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
-627c650
+619c642
< blocks: [1,2,3,4,5,6,7,9,10]
---
> blocks: [1,2,3,4,5,6,7,9,10,11,12]
-651c674,675
+643c666,667
< 78 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
> ? JUMP 11
-652a677,681
+644a669,673
> 11:
> 81 LOAD_LOCAL(value e)
> ? STORE_LOCAL(variable exc1)
> ? JUMP 12
>
-680c709,710
+672c701,702
< 81 THROW(Exception)
---
> ? STORE_LOCAL(variable exc1)
> ? JUMP 12
-696a727,739
+688a719,731
> 12:
> 83 LOAD_MODULE object Predef
> 83 CONSTANT("finally")
@@ -139,88 +140,88 @@
> 84 LOAD_LOCAL(variable exc1)
> 84 THROW(Throwable)
>
-702c745
+694c737
< catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
---
> catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
-726c769
+718c761
< locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
-728c771
-< blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31]
+720c763
+< blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25]
---
-> blocks: [1,2,3,4,5,6,9,12,14,17,18,19,22,25,27,28,30,31,32,33,34]
-752c795,802
+> blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28]
+744c787,794
< 172 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 32
+> ? JUMP 26
>
-> 32:
+> 26:
> 170 LOAD_LOCAL(value ex6)
> 170 STORE_LOCAL(value x4)
> 170 SCOPE_ENTER value x4
-> 170 JUMP 18
-799,802d848
+> 170 JUMP 15
+787,790d836
< 175 LOAD_LOCAL(value x5)
< 175 CALL_METHOD MyException.message (dynamic)
< 175 STORE_LOCAL(value message)
< 175 SCOPE_ENTER value message
-804c850,851
+792c838,839
< 176 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 176 CALL_METHOD MyException.message (dynamic)
-808c855,856
+796c843,844
< 177 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 177 CALL_METHOD MyException.message (dynamic)
-810c858,859
+798c846,847
< 177 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 33
-814c863,864
+> ? JUMP 27
+802c851,852
< 170 THROW(Throwable)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 33
-823a874,879
-> 33:
+> ? JUMP 27
+811a862,867
+> 27:
> 169 LOAD_LOCAL(value ex6)
> 169 STORE_LOCAL(value x4)
> 169 SCOPE_ENTER value x4
> 169 JUMP 5
>
-838,841d893
+822,825d877
< 180 LOAD_LOCAL(value x5)
< 180 CALL_METHOD MyException.message (dynamic)
< 180 STORE_LOCAL(value message)
< 180 SCOPE_ENTER value message
-843c895,896
+827c879,880
< 181 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 181 CALL_METHOD MyException.message (dynamic)
-847c900,901
+831c884,885
< 182 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 182 CALL_METHOD MyException.message (dynamic)
-849c903,904
+833c887,888
< 182 THROW(MyException)
---
> ? STORE_LOCAL(variable exc2)
-> ? JUMP 34
-853c908,909
+> ? JUMP 28
+837c892,893
< 169 THROW(Throwable)
---
> ? STORE_LOCAL(variable exc2)
-> ? JUMP 34
-869a926,938
-> 34:
+> ? JUMP 28
+853a910,922
+> 28:
> 184 LOAD_MODULE object Predef
> 184 CONSTANT("finally")
> 184 CALL_METHOD scala.Predef.println (dynamic)
@@ -233,159 +234,158 @@
> 185 LOAD_LOCAL(variable exc2)
> 185 THROW(Throwable)
>
-875c944
-< catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30) starting at: 4
+859c928
+< catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4
---
-> catch (Throwable) in ArrayBuffer(17, 18, 19, 22, 25, 27, 28, 30, 32) starting at: 4
-878c947
-< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30) starting at: 3
+> catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4
+862c931
+< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3
---
-> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 12, 17, 18, 19, 22, 25, 27, 28, 30, 32, 33) starting at: 3
-902c971
+> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3
+886c955
< locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value e, value ex6, value x4, value x5, value x
-904c973
-< blocks: [1,2,3,6,7,8,11,14,16,17,19]
+888c957
+< blocks: [1,2,3,6,7,8,11,13,14,16]
---
-> blocks: [1,2,3,6,7,8,11,14,16,17,19,20]
-928c997,1004
+> blocks: [1,2,3,6,7,8,11,13,14,16,17]
+912c981,988
< 124 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 20
+> ? JUMP 17
>
-> 20:
+> 17:
> 122 LOAD_LOCAL(value ex6)
> 122 STORE_LOCAL(value x4)
> 122 SCOPE_ENTER value x4
> 122 JUMP 7
-957,960d1032
+937,940d1012
< 127 LOAD_LOCAL(value x5)
< 127 CALL_METHOD MyException.message (dynamic)
< 127 STORE_LOCAL(value message)
< 127 SCOPE_ENTER value message
-962c1034,1035
+942c1014,1015
< 127 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 127 CALL_METHOD MyException.message (dynamic)
-991c1064
-< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19) starting at: 3
+971c1044
+< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
---
-> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 14, 16, 17, 19, 20) starting at: 3
-1015c1088
+> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
+995c1068
< locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
---
> locals: value args, variable result, value ex6, value x4, value x5, value x, value e
-1017c1090
-< blocks: [1,2,3,4,5,8,11,15,16,17,19]
+997c1070
+< blocks: [1,2,3,4,5,8,12,13,14,16]
---
-> blocks: [1,2,3,5,8,11,15,16,17,19,20]
-1041c1114,1123
+> blocks: [1,2,3,5,8,12,13,14,16,17]
+1021c1094,1103
< 148 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 20
+> ? JUMP 17
>
-> 20:
+> 17:
> 145 LOAD_LOCAL(value ex6)
> 145 STORE_LOCAL(value x4)
> 145 SCOPE_ENTER value x4
> 154 LOAD_LOCAL(value x4)
> 154 IS_INSTANCE REF(class MyException)
-> 154 CZJUMP (BOOL)NE ? 5 : 11
-1062,1064d1143
+> 154 CZJUMP (BOOL)NE ? 5 : 8
+1042,1044d1123
< 145 JUMP 4
<
< 4:
-1078,1081d1156
+1054,1057d1132
< 154 LOAD_LOCAL(value x5)
< 154 CALL_METHOD MyException.message (dynamic)
< 154 STORE_LOCAL(value message)
< 154 SCOPE_ENTER value message
-1083c1158,1159
+1059c1134,1135
< 154 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 154 CALL_METHOD MyException.message (dynamic)
-1300c1376
+1276c1352
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1324c1400,1401
+1300c1376,1383
< 38 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
> ? JUMP 8
-1325a1403,1408
+>
> 8:
> 42 LOAD_MODULE object Predef
> 42 CONSTANT("IllegalArgumentException")
> 42 CALL_METHOD scala.Predef.println (dynamic)
> 42 JUMP 2
->
-1371c1454
+1347c1430
< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
---
> locals: value args, variable result, value ex6, value x4, value x5, value x
-1373c1456
-< blocks: [1,2,3,4,5,8,11,13,14,16,17,19]
+1349c1432
+< blocks: [1,2,3,4,5,8,10,11,13,14,16]
---
-> blocks: [1,2,3,5,8,11,13,14,16,17,19,20]
-1397c1480,1481
+> blocks: [1,2,3,5,8,10,11,13,14,16,17]
+1373c1456,1457
< 203 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 20
-1417c1501,1510
+> ? JUMP 17
+1393c1477,1486
< 209 THROW(MyException)
---
> ? STORE_LOCAL(value ex6)
-> ? JUMP 20
+> ? JUMP 17
>
-> 20:
+> 17:
> 200 LOAD_LOCAL(value ex6)
> 200 STORE_LOCAL(value x4)
> 200 SCOPE_ENTER value x4
> 212 LOAD_LOCAL(value x4)
> 212 IS_INSTANCE REF(class MyException)
-> 212 CZJUMP (BOOL)NE ? 5 : 11
-1430,1432d1522
+> 212 CZJUMP (BOOL)NE ? 5 : 8
+1406,1408d1498
< 200 JUMP 4
<
< 4:
-1446,1449d1535
+1418,1421d1507
< 212 LOAD_LOCAL(value x5)
< 212 CALL_METHOD MyException.message (dynamic)
< 212 STORE_LOCAL(value message)
< 212 SCOPE_ENTER value message
-1451c1537,1538
+1423c1509,1510
< 213 LOAD_LOCAL(value message)
---
> ? LOAD_LOCAL(value x5)
> 213 CALL_METHOD MyException.message (dynamic)
-1495c1582
+1467c1554
< blocks: [1,2,3,4,5,7]
---
> blocks: [1,2,3,4,5,7,8]
-1519c1606,1607
+1491c1578,1579
< 58 THROW(IllegalArgumentException)
---
> ? STORE_LOCAL(value e)
> ? JUMP 8
-1520a1609,1614
+1492a1581,1586
> 8:
> 62 LOAD_MODULE object Predef
> 62 CONSTANT("RuntimeException")
> 62 CALL_METHOD scala.Predef.println (dynamic)
> 62 JUMP 2
>
-1568c1662
+1540c1634
< blocks: [1,2,3,4]
---
> blocks: [1,2,3,4,5]
-1588c1682,1687
+1560c1654,1659
< 229 THROW(MyException)
---
> ? JUMP 5
@@ -394,19 +394,19 @@
> ? LOAD_LOCAL(variable monitor1)
> 228 MONITOR_EXIT
> 228 THROW(Throwable)
-1594c1693
+1566c1665
< ? THROW(Throwable)
---
> 228 THROW(Throwable)
-1622c1721
+1594c1693
< locals: value args, variable result, variable monitor2, variable monitorResult1
---
> locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
-1624c1723
+1596c1695
< blocks: [1,2,3,4]
---
> blocks: [1,2,3,4,5]
-1647c1746,1754
+1619c1718,1726
< 245 THROW(MyException)
---
> ? STORE_LOCAL(value exception$1)
@@ -418,7 +418,7 @@
> ? LOAD_LOCAL(variable monitor2)
> 244 MONITOR_EXIT
> 244 THROW(Throwable)
-1653c1760
+1625c1732
< ? THROW(Throwable)
---
> 244 THROW(Throwable)
diff --git a/test/files/run/parserJavaIdent.check b/test/files/run/parserJavaIdent.check
new file mode 100644
index 0000000000..597ddbee47
--- /dev/null
+++ b/test/files/run/parserJavaIdent.check
@@ -0,0 +1,26 @@
+[1.7] parsed: simple
+[1.8] parsed: with123
+[1.6] parsed: with$
+[1.10] parsed: withøßöèæ
+[1.6] parsed: with_
+[1.6] parsed: _with
+[1.1] failure: java identifier expected
+
+3start
+^
+[1.1] failure: java identifier expected
+
+-start
+^
+[1.5] failure: java identifier expected
+
+with-s
+ ^
+[1.3] failure: java identifier expected
+
+we♥scala
+ ^
+[1.6] failure: java identifier expected
+
+with space
+ ^
diff --git a/test/files/run/parserJavaIdent.scala b/test/files/run/parserJavaIdent.scala
new file mode 100644
index 0000000000..c068075e4e
--- /dev/null
+++ b/test/files/run/parserJavaIdent.scala
@@ -0,0 +1,26 @@
+object Test extends scala.util.parsing.combinator.JavaTokenParsers {
+
+ def test[A](s: String) {
+ val res = parseAll(ident, s) match {
+ case Failure(_, in) => Failure("java identifier expected", in)
+ case o => o
+ }
+ println(res)
+ }
+
+ def main(args: Array[String]) {
+ // Happy tests
+ test("simple")
+ test("with123")
+ test("with$")
+ test("withøßöèæ")
+ test("with_")
+ test("_with")
+ // Sad tests
+ test("3start")
+ test("-start")
+ test("with-s")
+ test("we♥scala")
+ test("with space")
+ }
+}
diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check
index 53c53cfbcc..2d37fff1f4 100644
--- a/test/files/run/reflection-java-annotations.check
+++ b/test/files/run/reflection-java-annotations.check
@@ -1 +1 @@
-List(JavaComplexAnnotation(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation], classOf[JavaComplexAnnotation]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false))
+List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false))
diff --git a/test/files/run/reflection-java-annotations/JavaAnnottee_1.java b/test/files/run/reflection-java-annotations/JavaAnnottee_1.java
new file mode 100644
index 0000000000..b241f5d25e
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/JavaAnnottee_1.java
@@ -0,0 +1,47 @@
+@JavaComplexAnnotation_1(
+ v1 = (byte)1,
+ v2 = (short)2,
+ v3 = (char)3,
+ v4 = (int)4,
+ v5 = (long)5,
+ v6 = (float)6,
+ v7 = (double)7,
+ v10 = "hello",
+ v11 = JavaAnnottee_1.class,
+ v12 = JavaSimpleEnumeration_1.FOO,
+ v13 = @JavaSimpleAnnotation_1(
+ v1 = (byte)11,
+ v2 = (short)12,
+ v3 = (char)13,
+ v4 = (int)14,
+ v5 = (long)15,
+ v6 = (float)16,
+ v7 = (double)17,
+ v10 = "world1",
+ v11 = JavaSimpleAnnotation_1.class,
+ v12 = JavaSimpleEnumeration_1.FOO
+ ),
+ v101 = {(byte)101, (byte)101},
+ v102 = {(short)102, (short)102},
+ v103 = {(char)103, (char)103},
+ v104 = {(int)104, (int)104},
+ v105 = {(long)105, (long)105},
+ v106 = {(float)106, (float)106},
+ v107 = {(double)107, (double)107},
+ v108 = {false, true},
+ v110 = {"hello", "world"},
+ v111 = {JavaSimpleAnnotation_1.class, JavaComplexAnnotation_1.class},
+ v112 = {JavaSimpleEnumeration_1.FOO, JavaSimpleEnumeration_1.BAR},
+ v113 = {@JavaSimpleAnnotation_1(
+ v1 = (byte)21,
+ v2 = (short)22,
+ v3 = (char)23,
+ v4 = (int)24,
+ v5 = (long)25,
+ v6 = (float)26,
+ v7 = (double)27,
+ v10 = "world2",
+ v11 = JavaComplexAnnotation_1.class,
+ v12 = JavaSimpleEnumeration_1.BAR
+ )})
+public class JavaAnnottee_1 {} \ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java b/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java
new file mode 100644
index 0000000000..645eeb9399
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/JavaComplexAnnotation_1.java
@@ -0,0 +1,34 @@
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE})
+public @interface JavaComplexAnnotation_1 {
+ byte v1();
+ short v2();
+ char v3();
+ int v4();
+ long v5();
+ float v6();
+ double v7();
+ boolean v8() default false;
+ // void v9();
+ String v10();
+ Class<?> v11();
+ JavaSimpleEnumeration_1 v12();
+ JavaSimpleAnnotation_1 v13();
+ byte[] v101();
+ short[] v102();
+ char[] v103();
+ int[] v104();
+ long[] v105();
+ float[] v106();
+ double[] v107();
+ boolean[] v108();
+ String[] v110();
+ Class<?>[] v111();
+ JavaSimpleEnumeration_1[] v112();
+ JavaSimpleAnnotation_1[] v113();
+} \ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java b/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java
new file mode 100644
index 0000000000..c0f92fad2c
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/JavaSimpleAnnotation_1.java
@@ -0,0 +1,21 @@
+import java.lang.annotation.Target;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.FIELD, ElementType.METHOD, ElementType.TYPE})
+public @interface JavaSimpleAnnotation_1 {
+ byte v1();
+ short v2();
+ char v3();
+ int v4();
+ long v5();
+ float v6();
+ double v7();
+ boolean v8() default false;
+ // void v9();
+ String v10();
+ Class<?> v11();
+ JavaSimpleEnumeration_1 v12();
+} \ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java b/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java
new file mode 100644
index 0000000000..39246141cc
--- /dev/null
+++ b/test/files/run/reflection-java-annotations/JavaSimpleEnumeration_1.java
@@ -0,0 +1,4 @@
+enum JavaSimpleEnumeration_1 {
+ FOO,
+ BAR
+} \ No newline at end of file
diff --git a/test/files/run/reflection-java-annotations.scala b/test/files/run/reflection-java-annotations/Test_2.scala
index 2e3fed48ce..d2c3157071 100644
--- a/test/files/run/reflection-java-annotations.scala
+++ b/test/files/run/reflection-java-annotations/Test_2.scala
@@ -1,6 +1,6 @@
object Test extends App {
import scala.reflect.runtime.universe._
- val sym = typeOf[JavaAnnottee].typeSymbol
+ val sym = typeOf[JavaAnnottee_1].typeSymbol
sym.typeSignature
sym.annotations foreach (_.javaArgs)
println(sym.annotations)
diff --git a/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java b/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java
new file mode 100644
index 0000000000..39246141cc
--- /dev/null
+++ b/test/files/run/reflection-java-crtp/JavaSimpleEnumeration_1.java
@@ -0,0 +1,4 @@
+enum JavaSimpleEnumeration_1 {
+ FOO,
+ BAR
+} \ No newline at end of file
diff --git a/test/files/run/reflection-java-crtp.scala b/test/files/run/reflection-java-crtp/Main_2.scala
index 260d3540dc..fb5668f323 100644
--- a/test/files/run/reflection-java-crtp.scala
+++ b/test/files/run/reflection-java-crtp/Main_2.scala
@@ -1,6 +1,6 @@
object Test extends App {
import scala.reflect.runtime.universe._
- val enum = typeOf[JavaSimpleEnumeration].baseClasses(1).asClass
+ val enum = typeOf[JavaSimpleEnumeration_1].baseClasses(1).asClass
// make sure that the E's in Enum<E extends Enum<E>> are represented by the same symbol
val e1 = enum.typeParams(0).asType
val TypeBounds(_, TypeRef(_, _, List(TypeRef(_, e2: TypeSymbol, _)))) = e1.typeSignature
diff --git a/test/files/run/reify-staticXXX.scala b/test/files/run/reify-staticXXX.scala
index dc861f843e..e80157dd8f 100644
--- a/test/files/run/reify-staticXXX.scala
+++ b/test/files/run/reify-staticXXX.scala
@@ -4,12 +4,12 @@ import scala.tools.reflect.Eval
object B { override def toString = "object" }
class C { override def toString = "class" }
-package foo {
+package foo1 {
object B { override def toString = "package > object" }
class C { override def toString = "package > class" }
}
-object foo {
+object Foo2 {
object B { override def toString = "object > object" }
class C { override def toString = "object > class" }
}
@@ -20,14 +20,14 @@ object packageless {
println(reify(B).eval)
println(new C)
println(reify(new C).eval)
- println(foo.B)
- println(reify(foo.B).eval)
- println(new foo.C)
- println(reify(new foo.C).eval)
- println(_root_.foo.B)
- println(reify(_root_.foo.B).eval)
- println(new _root_.foo.C)
- println(reify(new _root_.foo.C).eval)
+ println(Foo2.B)
+ println(reify(Foo2.B).eval)
+ println(new Foo2.C)
+ println(reify(new Foo2.C).eval)
+ println(_root_.foo1.B)
+ println(reify(_root_.foo1.B).eval)
+ println(new _root_.foo1.C)
+ println(reify(new _root_.foo1.C).eval)
}
}
@@ -38,14 +38,14 @@ package packageful {
println(reify(B).eval)
println(new C)
println(reify(new C).eval)
- println(foo.B)
- println(reify(foo.B).eval)
- println(new foo.C)
- println(reify(new foo.C).eval)
- println(_root_.foo.B)
- println(reify(_root_.foo.B).eval)
- println(new _root_.foo.C)
- println(reify(new _root_.foo.C).eval)
+ println(Foo2.B)
+ println(reify(Foo2.B).eval)
+ println(new Foo2.C)
+ println(reify(new Foo2.C).eval)
+ println(_root_.foo1.B)
+ println(reify(_root_.foo1.B).eval)
+ println(new _root_.foo1.C)
+ println(reify(new _root_.foo1.C).eval)
}
}
}
diff --git a/test/files/run/reify_magicsymbols.check b/test/files/run/reify_magicsymbols.check
index e2aa46a364..c9d892d793 100644
--- a/test/files/run/reify_magicsymbols.check
+++ b/test/files/run/reify_magicsymbols.check
@@ -10,4 +10,4 @@ List[Null]
List[Nothing]
AnyRef{def foo(x: Int): Int}
Int* => Unit
-=> Int => Unit
+(=> Int) => Unit
diff --git a/test/files/run/t2418.check b/test/files/run/t2418.check
new file mode 100644
index 0000000000..f599e28b8a
--- /dev/null
+++ b/test/files/run/t2418.check
@@ -0,0 +1 @@
+10
diff --git a/test/files/run/t2418.scala b/test/files/run/t2418.scala
new file mode 100644
index 0000000000..f330bef60a
--- /dev/null
+++ b/test/files/run/t2418.scala
@@ -0,0 +1,10 @@
+class Foo {
+ @volatile final var x=10
+ override def toString = "" + x
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println((new Foo))
+ }
+}
diff --git a/test/files/run/t2818.check b/test/files/run/t2818.check
new file mode 100644
index 0000000000..31286c990b
--- /dev/null
+++ b/test/files/run/t2818.check
@@ -0,0 +1,4 @@
+105
+499999500000
+0
+1
diff --git a/test/files/run/t2818.scala b/test/files/run/t2818.scala
new file mode 100644
index 0000000000..19b67cbc88
--- /dev/null
+++ b/test/files/run/t2818.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ println((List.range(1L, 15L) :\ 0L) (_ + _))
+ println((List.range(1L, 1000000L) :\ 0L) (_ + _))
+ println((List.fill(5)(1) :\ 1) (_ - _))
+ println((List.fill(1000000)(1) :\ 1) (_ - _))
+}
diff --git a/test/files/run/t3353.check b/test/files/run/t3353.check
new file mode 100644
index 0000000000..8b4ae1fe69
--- /dev/null
+++ b/test/files/run/t3353.check
@@ -0,0 +1 @@
+Got: foo and None
diff --git a/test/files/run/t3353.scala b/test/files/run/t3353.scala
new file mode 100644
index 0000000000..eeb63c1b05
--- /dev/null
+++ b/test/files/run/t3353.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+
+ "foo" match {
+ case Matcher(result) => println(result)
+ }
+
+ object Matcher{
+ def unapply(s: String)(implicit secondParam: Option[String] = None) = Some("Got: " + s + " and " + secondParam)
+ }
+}
diff --git a/test/files/run/t4729.check b/test/files/run/t4729.check
new file mode 100644
index 0000000000..9a2aa56d99
--- /dev/null
+++ b/test/files/run/t4729.check
@@ -0,0 +1,4 @@
+WrappedArray(1, 2)
+WrappedArray(1, 2)
+WrappedArray(1, 2)
+WrappedArray(1, 2)
diff --git a/test/files/run/t4729/J_1.java b/test/files/run/t4729/J_1.java
new file mode 100644
index 0000000000..2ffb5a88d1
--- /dev/null
+++ b/test/files/run/t4729/J_1.java
@@ -0,0 +1,4 @@
+// Java Interface:
+public interface J_1 {
+ public void method(String... s);
+}
diff --git a/test/files/run/t4729/S_2.scala b/test/files/run/t4729/S_2.scala
new file mode 100644
index 0000000000..e34e3d34d4
--- /dev/null
+++ b/test/files/run/t4729/S_2.scala
@@ -0,0 +1,29 @@
+ // Scala class:
+class ScalaVarArgs extends J_1 {
+ // -- no problem on overriding it using ordinary class
+ def method(s: String*) { println(s) }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ //[1] Ok - no problem using inferred type
+ val varArgs = new J_1 {
+ def method(s: String*) { println(s) }
+ }
+ varArgs.method("1", "2")
+
+ //[2] Ok -- no problem when explicit set its type after construction
+ val b: J_1 = varArgs
+ b.method("1", "2")
+
+ //[3] Ok -- no problem on calling its method
+ (new ScalaVarArgs).method("1", "2")
+ (new ScalaVarArgs: J_1).method("1", "2")
+
+ //[4] Not Ok -- error when assigning anonymous class to a explictly typed val
+ // Compiler error: object creation impossible, since method method in trait VarArgs of type (s: <repeated...>[java.lang.String])Unit is not defined
+ val tagged: J_1 = new J_1 {
+ def method(s: String*) { println(s) }
+ }
+ }
+}
diff --git a/test/files/run/t5064.check b/test/files/run/t5064.check
index 61ccfd16e7..077006abd9 100644
--- a/test/files/run/t5064.check
+++ b/test/files/run/t5064.check
@@ -1,6 +1,6 @@
-[53] T5064.super.<init>()
-[53] T5064.super.<init>
-[53] this
+[12] T5064.super.<init>()
+[12] T5064.super.<init>
+[12] this
[16:23] immutable.this.List.apply(scala.this.Predef.wrapIntArray(Array[Int]{1}))
[16:20] immutable.this.List.apply
<16:20> immutable.this.List
diff --git a/test/files/run/t5313.check b/test/files/run/t5313.check
new file mode 100644
index 0000000000..7a48b2b711
--- /dev/null
+++ b/test/files/run/t5313.check
@@ -0,0 +1,12 @@
+STORE_LOCAL(variable kept1)
+STORE_LOCAL(value result)
+STORE_LOCAL(variable kept1)
+STORE_LOCAL(variable kept2)
+STORE_LOCAL(value kept3)
+STORE_LOCAL(variable kept2)
+STORE_LOCAL(variable kept4)
+STORE_LOCAL(variable kept4)
+STORE_LOCAL(variable kept5)
+STORE_LOCAL(variable kept5)
+STORE_LOCAL(variable kept6)
+STORE_LOCAL(variable kept6)
diff --git a/test/files/run/t5313.scala b/test/files/run/t5313.scala
new file mode 100644
index 0000000000..7da8726a1f
--- /dev/null
+++ b/test/files/run/t5313.scala
@@ -0,0 +1,54 @@
+import scala.tools.partest.IcodeTest
+
+object Test extends IcodeTest {
+ override def printIcodeAfterPhase = "dce"
+
+ override def extraSettings: String = super.extraSettings + " -optimize"
+
+ override def code =
+ """class Foo {
+ def randomBoolean = util.Random.nextInt % 2 == 0
+ def bar = {
+ var kept1 = new Object
+ val result = new java.lang.ref.WeakReference(kept1)
+ kept1 = null // we can't eliminate this assigment because result can observe
+ // when the object has no more references. See SI-5313
+ kept1 = new Object // but we can eliminate this one because kept1 has already been clobbered
+ var erased2 = null // we can eliminate this store because it's never used
+ val erased3 = erased2 // and this
+ var erased4 = erased2 // and this
+ val erased5 = erased4 // and this
+ var kept2: Object = new Object // ultimately can't be eliminated
+ while(randomBoolean) {
+ val kept3 = kept2
+ kept2 = null // this can't, because it clobbers kept2, which is used
+ erased4 = null // safe to eliminate
+ println(kept3)
+ }
+ var kept4 = new Object // have to keep, it's used
+ try
+ println(kept4)
+ catch {
+ case _ : Throwable => kept4 = null // have to keep, it clobbers kept4 which is used
+ }
+ var kept5 = new Object
+ print(kept5)
+ kept5 = null // can't eliminate it's a clobber and it's used
+ print(kept5)
+ kept5 = null // can eliminate because we don't care about clobbers of nulls
+ while(randomBoolean) {
+ var kept6: AnyRef = null // not used, but have to keep because it clobbers the next used store
+ // on the back edge of the loop
+ kept6 = new Object // used
+ println(kept6)
+ }
+ result
+ }
+ }""".stripMargin
+
+ override def show() {
+ val storeLocal = "STORE_LOCAL"
+ val lines1 = collectIcode("") filter (_ contains storeLocal) map (x => x.drop(x.indexOf(storeLocal)))
+ println(lines1 mkString "\n")
+ }
+}
diff --git a/test/files/run/t5374.check b/test/files/run/t5374.check
deleted file mode 100644
index 6be88d77ec..0000000000
--- a/test/files/run/t5374.check
+++ /dev/null
@@ -1,6 +0,0 @@
-ListBuffer(1, 2, 3, 1)
-ListBuffer(1, 2, 3, 1)
-ListBuffer()
-List(1, 2, 3, 4, 5)
-List(1, 2, 3)
-ok \ No newline at end of file
diff --git a/test/files/run/t5374.scala b/test/files/run/t5374.scala
deleted file mode 100644
index 9b1671e795..0000000000
--- a/test/files/run/t5374.scala
+++ /dev/null
@@ -1,76 +0,0 @@
-
-
-
-import collection.mutable.ListBuffer
-import java.io._
-
-
-
-object Test {
-
- def main(args: Array[String]) {
- ticketExample()
- emptyListBuffer()
- list()
- legacyList()
- objectWithMultipleLists()
- }
-
- def inAndOut[T <: AnyRef](obj: T): T = {
- val baos = new ByteArrayOutputStream
- val oos = new ObjectOutputStream(baos)
- oos.writeObject( obj )
- val bais = new ByteArrayInputStream( baos.toByteArray )
- val ois = new ObjectInputStream(bais)
- ois.readObject.asInstanceOf[T]
- }
-
- def ticketExample() {
- val lb = inAndOut(ListBuffer(1, 2, 3))
- val lb2 = ListBuffer[Int]() ++= lb
-
- lb2 ++= List(1)
- lb ++= List(1)
- println(lb)
- println(lb2)
- }
-
- def emptyListBuffer() {
- val lb = inAndOut(ListBuffer[Int]())
-
- println(lb)
- }
-
- def list() {
- val l = inAndOut(List(1, 2, 3, 4, 5))
-
- println(l)
- }
-
- // this byte array corresponds to what List(1, 2, 3) used to be serialized to prior to this fix
- val listBytes = Array[Byte](-84, -19, 0, 5, 115, 114, 0, 39, 115, 99, 97, 108, 97, 46, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 46, 105, 109, 109, 117, 116, 97, 98, 108, 101, 46, 36, 99, 111, 108, 111, 110, 36, 99, 111, 108, 111, 110, -118, 92, 99, 91, -10, -40, -7, 109, 3, 0, 2, 76, 0, 43, 115, 99, 97, 108, 97, 36, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 36, 105, 109, 109, 117, 116, 97, 98, 108, 101, 36, 36, 99, 111, 108, 111, 110, 36, 99, 111, 108, 111, 110, 36, 36, 104, 100, 116, 0, 18, 76, 106, 97, 118, 97, 47, 108, 97, 110, 103, 47, 79, 98, 106, 101, 99, 116, 59, 76, 0, 2, 116, 108, 116, 0, 33, 76, 115, 99, 97, 108, 97, 47, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 47, 105, 109, 109, 117, 116, 97, 98, 108, 101, 47, 76, 105, 115, 116, 59, 120, 112, 115, 114, 0, 17, 106, 97, 118, 97, 46, 108, 97, 110, 103, 46, 73, 110, 116, 101, 103, 101, 114, 18, -30, -96, -92, -9, -127, -121, 56, 2, 0, 1, 73, 0, 5, 118, 97, 108, 117, 101, 120, 114, 0, 16, 106, 97, 118, 97, 46, 108, 97, 110, 103, 46, 78, 117, 109, 98, 101, 114, -122, -84, -107, 29, 11, -108, -32, -117, 2, 0, 0, 120, 112, 0, 0, 0, 1, 115, 113, 0, 126, 0, 4, 0, 0, 0, 2, 115, 113, 0, 126, 0, 4, 0, 0, 0, 3, 115, 114, 0, 44, 115, 99, 97, 108, 97, 46, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 46, 105, 109, 109, 117, 116, 97, 98, 108, 101, 46, 76, 105, 115, 116, 83, 101, 114, 105, 97, 108, 105, 122, 101, 69, 110, 100, 36, -118, 92, 99, 91, -9, 83, 11, 109, 2, 0, 0, 120, 112, 120)
-
- def legacyList() {
- val bais = new ByteArrayInputStream(listBytes)
- val ois = new ObjectInputStream(bais)
- val l = ois.readObject()
-
- println(l)
- }
-
- class Foo extends Serializable {
- val head = List(1, 2, 3)
- val last = head.tail.tail
- def structuralSharing: Boolean = head.tail.tail eq last
-
- assert(structuralSharing)
- }
-
- def objectWithMultipleLists() {
- val foo = inAndOut(new Foo)
-
- if (foo.structuralSharing) println("ok")
- else println("no structural sharing")
- }
-
-}
diff --git a/test/files/run/t5543.check b/test/files/run/t5543.check
index 517038f4c7..2ef2d51ff4 100644
--- a/test/files/run/t5543.check
+++ b/test/files/run/t5543.check
@@ -1,3 +1,9 @@
Test, 7, 119
m, 3, 19
Test, 5, 85
+T
+C
+T
+T
+D
+T
diff --git a/test/files/run/t5543.scala b/test/files/run/t5543.scala
index 651bc7f2b2..3684bf9690 100644
--- a/test/files/run/t5543.scala
+++ b/test/files/run/t5543.scala
@@ -22,5 +22,24 @@ object Test extends Function0[Int] {
println(sut.toString)
println(sut.m())
println(A.init()())
+
+ println((new T.C()).x)
+ println((new T.D(0,0)).x)
+ }
+}
+
+object T {
+ override def toString = "T"
+
+ // `this` refers to T
+ class C(val x: Any = {println(this); this}) { // prints T
+ println(this) // prints C
+ override def toString() = "C"
+ }
+
+ class D(val x: Any) {
+ override def toString() = "D"
+ // `this` refers again to T
+ def this(a: Int, b: Int, c: Any = {println(this); this}) { this(c); println(this) } // prints T, then prints D
}
}
diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check
index 3b2eb55313..5127d3c1c7 100644
--- a/test/files/run/t5603.check
+++ b/test/files/run/t5603.check
@@ -12,7 +12,7 @@
[95:101]<paramaccessor> private[this] val i: [98:101]Int = _;
<119:139>def <init>([95]i: [98]Int) = <119:139>{
<119:139>val nameElse = <134:139>"Bob";
- [NoPosition][NoPosition][NoPosition]super.<init>();
+ [94][94][94]super.<init>();
[94]()
};
[168:184]val name = [179:184]"avc";
@@ -20,7 +20,7 @@
};
[215:241]object Test extends [227:241][235:238]App {
[227]def <init>() = [227]{
- [NoPosition][NoPosition][NoPosition]super.<init>();
+ [227][227][227]super.<init>();
[227]()
};
[NoPosition]<empty>
diff --git a/test/files/run/t5604.check b/test/files/run/t5604.check
new file mode 100644
index 0000000000..53a2fc8894
--- /dev/null
+++ b/test/files/run/t5604.check
@@ -0,0 +1,8 @@
+long
+double
+long
+double
+long
+double
+long
+double
diff --git a/test/files/run/t5604.scala b/test/files/run/t5604.scala
new file mode 100644
index 0000000000..a06c8aab3e
--- /dev/null
+++ b/test/files/run/t5604.scala
@@ -0,0 +1,50 @@
+// a.scala
+// Fri Jan 13 11:31:47 PST 2012
+
+package foo {
+ object regular extends Duh {
+ def buh(n: Long) = println("long")
+ def buh(n: Double) = println("double")
+ }
+ class regular {
+ import regular._
+
+ duh(33L)
+ duh(3.0d)
+ foo.regular.duh(33L)
+ foo.regular.duh(3.0d)
+ buh(66L)
+ buh(6.0d)
+ foo.regular.buh(66L)
+ foo.regular.buh(6.0d)
+ }
+
+ trait Duh {
+ def duh(n: Long) = println("long")
+ def duh(n: Double) = println("double")
+ }
+ package object bar extends Duh {
+ def buh(n: Long) = println("long")
+ def buh(n: Double) = println("double")
+ }
+ package bar {
+ object Main {
+ def main(args:Array[String]) {
+ duh(33L)
+ duh(3.0d)
+ foo.bar.duh(33L)
+ foo.bar.duh(3.0d)
+ buh(66L)
+ buh(6.0d)
+ foo.bar.buh(66L)
+ foo.bar.buh(6.0d)
+ }
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ foo.bar.Main.main(null)
+ }
+}
diff --git a/test/files/run/t5824.check b/test/files/run/t5824.check
new file mode 100644
index 0000000000..3774da60e5
--- /dev/null
+++ b/test/files/run/t5824.check
@@ -0,0 +1 @@
+a b c
diff --git a/test/files/run/t5824.scala b/test/files/run/t5824.scala
new file mode 100644
index 0000000000..2ad169e2d1
--- /dev/null
+++ b/test/files/run/t5824.scala
@@ -0,0 +1,8 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ reify {
+ println("%s %s %s".format(List("a", "b", "c"): _*))
+ }.eval
+}
diff --git a/test/files/run/t6011c.scala b/test/files/run/t6011c.scala
new file mode 100644
index 0000000000..0647e3f81a
--- /dev/null
+++ b/test/files/run/t6011c.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ // A variation of SI-6011, which eluded the fix
+ // in 2.10.0.
+ //
+ // duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.
+ // at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:50)
+ // at scala.tools.nsc.Global.abort(Global.scala:249)
+ // at scala.tools.nsc.backend.jvm.GenASM$JPlainBuilder$jcode$.emitSWITCH(GenASM.scala:1850)
+ ((1: Byte): @unchecked @annotation.switch) match {
+ case 1 => 2
+ case 1 => 3 // crash
+ }
+}
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
index 34f4b22134..79deaacf3a 100644
--- a/test/files/run/t6028.check
+++ b/test/files/run/t6028.check
@@ -15,7 +15,7 @@ package <empty> {
}
};
def bar(barParam: Int): Object = {
- @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = new runtime.VolatileObjectRef(<empty>);
+ @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = new runtime.VolatileObjectRef(null);
T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
};
def tryy(tryyParam: Int): Function0 = {
diff --git a/test/files/run/t6113.check b/test/files/run/t6113.check
new file mode 100644
index 0000000000..65fb3cd090
--- /dev/null
+++ b/test/files/run/t6113.check
@@ -0,0 +1 @@
+Foo[[X](Int, X)]
diff --git a/test/files/run/t6113.scala b/test/files/run/t6113.scala
new file mode 100644
index 0000000000..321cae86a3
--- /dev/null
+++ b/test/files/run/t6113.scala
@@ -0,0 +1,6 @@
+trait Foo[C[_]]
+
+object Test extends App {
+ import scala.reflect.runtime.universe._
+ println(typeOf[Foo[({type l[X] = (Int, X)})#l]])
+} \ No newline at end of file
diff --git a/test/files/run/t6135.scala b/test/files/run/t6135.scala
new file mode 100644
index 0000000000..c0f8f3fd1d
--- /dev/null
+++ b/test/files/run/t6135.scala
@@ -0,0 +1,13 @@
+object Test extends App {
+ class A { class V }
+
+ abstract class B[S] {
+ def foo(t: S, a: A)(v: a.V)
+ }
+
+ val b1 = new B[String] {
+ def foo(t: String, a: A)(v: a.V) = () // Bridge method required here!
+ }
+
+ b1.foo("", null)(null)
+}
diff --git a/test/files/run/t6146b.check b/test/files/run/t6146b.check
new file mode 100644
index 0000000000..b664d1152a
--- /dev/null
+++ b/test/files/run/t6146b.check
@@ -0,0 +1,52 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> val u = rootMirror.universe; import u._, language._
+u: $r.intp.global.type = <global>
+import u._
+import language._
+
+scala> val S1 = typeOf[c.X.S1.type forSome { val c: C[_] }].typeSymbol.tpeHK
+S1: u.Type = C.X.S1.type
+
+scala> val S2 = typeOf[O.S2].typeSymbol.tpeHK
+S2: u.Type = C.this.S2
+
+scala> val S3 = typeOf[O.S3].typeSymbol.tpeHK
+S3: u.Type = O.S3
+
+scala> val S4 = typeOf[S4].typeSymbol.tpeHK
+S4: u.Type = S4
+
+scala> val F = typeOf[c.F[_] forSome { val c: C[_] }].typeSymbol.tpeHK
+F: u.Type = C.this.F
+
+scala> val fTpe = typeOf[O.type].decl(newTermName("foo")).paramss.head.head.tpe
+fTpe: u.Type = O.F[Int]
+
+scala> def memType(sub: Type, scrut: Type): Type =
+ nestedMemberType(sub.typeSymbol, scrut.prefix, scrut.typeSymbol.owner)
+memType: (sub: u.Type, scrut: u.Type)u.Type
+
+scala>
+
+scala> memType(S1, fTpe)
+res0: u.Type = O.X.S1.type
+
+scala> memType(S2, fTpe)
+res1: u.Type = O.S2
+
+scala> memType(S3, fTpe)
+res2: u.Type = O.S3
+
+scala> memType(S4, fTpe)
+res3: u.Type = S4
+
+scala>
diff --git a/test/files/run/t6146b.scala b/test/files/run/t6146b.scala
new file mode 100644
index 0000000000..adcd40d2ee
--- /dev/null
+++ b/test/files/run/t6146b.scala
@@ -0,0 +1,39 @@
+import scala.tools.partest.ReplTest
+
+class A {
+ sealed trait F[A]
+}
+
+class C[T] extends A {
+ sealed trait F[A]
+ object X {
+ object S1 extends F[T]
+ }
+ class S2 extends F[T]
+}
+object O extends C[Int] {
+ def foo(f: F[Int]) = f match { case X.S1 => }
+
+ class S3 extends F[Int]
+}
+class S4 extends O.F[String]
+
+object Test extends ReplTest {
+ override def code = """
+:power
+val u = rootMirror.universe; import u._, language._
+val S1 = typeOf[c.X.S1.type forSome { val c: C[_] }].typeSymbol.tpeHK
+val S2 = typeOf[O.S2].typeSymbol.tpeHK
+val S3 = typeOf[O.S3].typeSymbol.tpeHK
+val S4 = typeOf[S4].typeSymbol.tpeHK
+val F = typeOf[c.F[_] forSome { val c: C[_] }].typeSymbol.tpeHK
+val fTpe = typeOf[O.type].decl(newTermName("foo")).paramss.head.head.tpe
+def memType(sub: Type, scrut: Type): Type =
+ nestedMemberType(sub.typeSymbol, scrut.prefix, scrut.typeSymbol.owner)
+
+memType(S1, fTpe)
+memType(S2, fTpe)
+memType(S3, fTpe)
+memType(S4, fTpe)
+ """.stripMargin.trim
+} \ No newline at end of file
diff --git a/test/files/run/t6154.check b/test/files/run/t6154.check
new file mode 100644
index 0000000000..9766475a41
--- /dev/null
+++ b/test/files/run/t6154.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t6154.scala b/test/files/run/t6154.scala
new file mode 100644
index 0000000000..02ef62905f
--- /dev/null
+++ b/test/files/run/t6154.scala
@@ -0,0 +1,10 @@
+object Test {
+ def foo(a: Int) {
+ var bar: Int = 0
+ bar = try { 0 } catch { case ex: Throwable => 0 }
+ new { foo(bar) }
+ }
+
+ def main(args: Array[String]): Unit =
+ try foo(0) catch { case _: java.lang.StackOverflowError => println("ok") }
+}
diff --git a/test/files/run/t6187.check b/test/files/run/t6187.check
new file mode 100644
index 0000000000..c0ca02923b
--- /dev/null
+++ b/test/files/run/t6187.check
@@ -0,0 +1,32 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> import language.experimental.macros, reflect.macros.Context
+import language.experimental.macros
+import reflect.macros.Context
+
+scala> def macroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
+ val r = c.universe.reify { List(t.splice) }
+ c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+}
+macroImpl: [T](c: scala.reflect.macros.Context)(t: c.Expr[T])(implicit evidence$1: c.WeakTypeTag[T])c.Expr[List[T]]
+
+scala> def demo[T](t: T): List[T] = macro macroImpl[T]
+demo: [T](t: T)List[T]
+
+scala> def m[T](t: T): List[List[T]] =
+ demo( List((t,true)) collect { case (x,true) => x } )
+m: [T](t: T)List[List[T]]
+
+scala> m(List(1))
+res0: List[List[List[Int]]] = List(List(List(1)))
+
+scala> // Showing we haven't added unreachable warnings
+
+scala> List(1) collect { case x => x }
+res1: List[Int] = List(1)
+
+scala> List("") collect { case x => x }
+res2: List[String] = List("")
+
+scala>
diff --git a/test/files/run/t6187.scala b/test/files/run/t6187.scala
new file mode 100644
index 0000000000..ae642917e7
--- /dev/null
+++ b/test/files/run/t6187.scala
@@ -0,0 +1,18 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def code = """
+import language.experimental.macros, reflect.macros.Context
+def macroImpl[T: c.WeakTypeTag](c: Context)(t: c.Expr[T]): c.Expr[List[T]] = {
+ val r = c.universe.reify { List(t.splice) }
+ c.Expr[List[T]]( c.resetLocalAttrs(r.tree) )
+}
+def demo[T](t: T): List[T] = macro macroImpl[T]
+def m[T](t: T): List[List[T]] =
+ demo( List((t,true)) collect { case (x,true) => x } )
+m(List(1))
+// Showing we haven't added unreachable warnings
+List(1) collect { case x => x }
+List("") collect { case x => x }
+ """.trim
+}
diff --git a/test/files/run/t6187b.scala b/test/files/run/t6187b.scala
new file mode 100644
index 0000000000..d2d3e9797d
--- /dev/null
+++ b/test/files/run/t6187b.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val x: PartialFunction[Int, Int] = { case 1 => 1 }
+ val o: Any = ""
+ assert(x.applyOrElse(0, (_: Int) => o) == "")
+}
diff --git a/test/files/run/t6206.check b/test/files/run/t6206.check
new file mode 100644
index 0000000000..8064573667
--- /dev/null
+++ b/test/files/run/t6206.check
@@ -0,0 +1,4 @@
+outer
+outer
+inner
+inner
diff --git a/test/files/run/t6206.scala b/test/files/run/t6206.scala
new file mode 100644
index 0000000000..07ff246d02
--- /dev/null
+++ b/test/files/run/t6206.scala
@@ -0,0 +1,37 @@
+class Outer {
+ def apply( position : Inner ) {}
+ class Inner
+
+ this.apply(new Inner)
+ this (new Inner) // error,
+}
+
+
+class Outer1 {
+
+ self =>
+
+ def apply( position : Inner ) : String = "outer"
+
+ class Inner( ) {
+
+ def apply(arg: Inner): String = "inner"
+
+ def testMe = {
+ List(
+ self.apply( this ), // a) this works
+ self( this ), // b) this does not work!
+ this apply this,
+ this(this)
+ ) foreach println
+ }
+ }
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val o = new Outer1
+ val i = new o.Inner
+ i.testMe
+ }
+}
diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check
index af6bd5d269..e6467edc95 100644
--- a/test/files/run/t6288.check
+++ b/test/files/run/t6288.check
@@ -1,8 +1,8 @@
[[syntax trees at end of patmat]] // newSource1
[7]package [7]<empty> {
[7]object Case3 extends [13][106]scala.AnyRef {
- [106]def <init>(): [13]Case3.type = [106]{
- [106][106][106]Case3.super.<init>();
+ [13]def <init>(): [13]Case3.type = [13]{
+ [13][13][13]Case3.super.<init>();
[13]()
};
[21]def unapply([29]z: [32]<type: [32]scala.Any>): [21]Option[Int] = [56][52][52]scala.Some.apply[[52]Int]([58]-1);
@@ -11,10 +11,7 @@
[64]case5()[84]{
[84]<synthetic> val o7: [84]Option[Int] = [84][84]Case3.unapply([84]x1);
[84]if ([84]o7.isEmpty.unary_!)
- [84]{
- [90]val nr: [90]Int = [90]o7.get;
- [97][97]matchEnd4([97]())
- }
+ [97][97]matchEnd4([97]())
else
[84][84]case6()
};
@@ -27,8 +24,8 @@
}
};
[113]object Case4 extends [119][217]scala.AnyRef {
- [217]def <init>(): [119]Case4.type = [217]{
- [217][217][217]Case4.super.<init>();
+ [119]def <init>(): [119]Case4.type = [119]{
+ [119][119][119]Case4.super.<init>();
[119]()
};
[127]def unapplySeq([138]z: [141]<type: [141]scala.Any>): [127]Option[List[Int]] = [167]scala.None;
@@ -38,10 +35,7 @@
[195]<synthetic> val o7: [195]Option[List[Int]] = [195][195]Case4.unapplySeq([195]x1);
[195]if ([195]o7.isEmpty.unary_!)
[195]if ([195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0)))
- [195]{
- [201]val nr: [201]Int = [201][201]o7.get.apply([201]0);
- [208][208]matchEnd4([208]())
- }
+ [208][208]matchEnd4([208]())
else
[195][195]case6()
else
@@ -56,8 +50,8 @@
}
};
[224]object Case5 extends [230][312]scala.AnyRef {
- [312]def <init>(): [230]Case5.type = [312]{
- [312][312][312]Case5.super.<init>();
+ [230]def <init>(): [230]Case5.type = [230]{
+ [230][230][230]Case5.super.<init>();
[230]()
};
[238]def unapply([246]z: [249]<type: [249]scala.Any>): [238]Boolean = [265]true;
diff --git a/test/files/run/t6288b-jump-position.check b/test/files/run/t6288b-jump-position.check
index 45ec31c308..83ba810958 100644
--- a/test/files/run/t6288b-jump-position.check
+++ b/test/files/run/t6288b-jump-position.check
@@ -19,7 +19,7 @@ object Case3 extends Object {
Exception handlers:
def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
- locals: value args, value x1, value x2, value x
+ locals: value args, value x1, value x
startBlock: 1
blocks: [1,2,3,6,7]
@@ -35,10 +35,6 @@ object Case3 extends Object {
5 CZJUMP (BOOL)NE ? 3 : 6
3:
- 5 LOAD_LOCAL(value x1)
- 5 CHECK_CAST REF(class String)
- 5 STORE_LOCAL(value x2)
- 5 SCOPE_ENTER value x2
6 LOAD_MODULE object Predef
6 CONSTANT("case 0")
6 CALL_METHOD scala.Predef.println (dynamic)
@@ -69,9 +65,9 @@ object Case3 extends Object {
blocks: [1]
1:
- 12 THIS(Case3)
- 12 CALL_METHOD java.lang.Object.<init> (super())
- 12 RETURN(UNIT)
+ 1 THIS(Case3)
+ 1 CALL_METHOD java.lang.Object.<init> (super())
+ 1 RETURN(UNIT)
}
Exception handlers:
diff --git a/test/files/run/t6434.check b/test/files/run/t6434.check
new file mode 100644
index 0000000000..f898b6b781
--- /dev/null
+++ b/test/files/run/t6434.check
@@ -0,0 +1,10 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala> def f(x: => Int): Int = x
+f: (x: => Int)Int
+
+scala> f _
+res0: (=> Int) => Int = <function1>
+
+scala>
diff --git a/test/files/run/t6434.scala b/test/files/run/t6434.scala
new file mode 100644
index 0000000000..e4a4579613
--- /dev/null
+++ b/test/files/run/t6434.scala
@@ -0,0 +1,8 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code =
+"""def f(x: => Int): Int = x
+f _
+"""
+}
diff --git a/test/files/run/t6439.check b/test/files/run/t6439.check
new file mode 100644
index 0000000000..178ea739f5
--- /dev/null
+++ b/test/files/run/t6439.check
@@ -0,0 +1,66 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class A
+defined class A
+
+scala> object A // warn
+defined module A
+warning: previously defined class A is not a companion to object A.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> trait B
+defined trait B
+
+scala> object B // warn
+defined module B
+warning: previously defined trait B is not a companion to object B.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> object C
+defined module C
+
+scala> object Bippy
+defined module Bippy
+
+scala> class C // warn
+defined class C
+warning: previously defined object C is not a companion to class C.
+Companions must be defined together; you may wish to use :paste mode for this.
+
+scala> class D
+defined class D
+
+scala> def D = 0 // no warn
+D: Int
+
+scala> val D = 0 // no warn
+D: Int = 0
+
+scala> object E
+defined module E
+
+scala> var E = 0 // no warn
+E: Int = 0
+
+scala> object F
+defined module F
+
+scala> type F = Int // no warn
+defined type alias F
+
+scala> :power
+** Power User mode enabled - BEEP WHIR GYVE **
+** :phase has been set to 'typer'. **
+** scala.tools.nsc._ has been imported **
+** global._, definitions._ also imported **
+** Try :help, :vals, power.<tab> **
+
+scala> intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+res0: $r.intp.global.Symbol = type F
+
+scala>
+
+scala>
diff --git a/test/files/run/t6439.scala b/test/files/run/t6439.scala
new file mode 100644
index 0000000000..70a2dbafaf
--- /dev/null
+++ b/test/files/run/t6439.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+class A
+object A // warn
+trait B
+object B // warn
+object C
+object Bippy
+class C // warn
+class D
+def D = 0 // no warn
+val D = 0 // no warn
+object E
+var E = 0 // no warn
+object F
+type F = Int // no warn
+:power
+intp("F") // this now works as a result of changing .typeSymbol to .typeSymbolDirect in IMain#Request#definedSymbols
+ """
+}
diff --git a/test/files/run/t6443-by-name.check b/test/files/run/t6443-by-name.check
new file mode 100644
index 0000000000..6f98fa4a28
--- /dev/null
+++ b/test/files/run/t6443-by-name.check
@@ -0,0 +1,3 @@
+1
+foo
+foo
diff --git a/test/files/run/t6443-by-name.scala b/test/files/run/t6443-by-name.scala
new file mode 100644
index 0000000000..bfd9bf9791
--- /dev/null
+++ b/test/files/run/t6443-by-name.scala
@@ -0,0 +1,18 @@
+object Test {
+
+ def main(args: Array[String]) {
+ def foo = {println("foo"); 0}
+ lazyDep(X)(foo)
+ }
+
+ trait T {
+ type U
+ }
+ object X extends T { type U = Int }
+
+ def lazyDep(t: T)(u: => t.U) {
+ println("1")
+ u
+ u
+ }
+}
diff --git a/test/files/run/t6443-varargs.check b/test/files/run/t6443-varargs.check
new file mode 100644
index 0000000000..257cc5642c
--- /dev/null
+++ b/test/files/run/t6443-varargs.check
@@ -0,0 +1 @@
+foo
diff --git a/test/files/run/t6443-varargs.scala b/test/files/run/t6443-varargs.scala
new file mode 100644
index 0000000000..9cbae3e99c
--- /dev/null
+++ b/test/files/run/t6443-varargs.scala
@@ -0,0 +1,16 @@
+object Test {
+
+ def main(args: Array[String]) {
+ def foo = {println("foo"); 0}
+ lazyDep(X)(foo)
+ }
+
+ trait T {
+ type U
+ }
+ object X extends T { type U = Int }
+
+ def lazyDep(t: T)(us: t.U*) {
+ List(us: _*)
+ }
+}
diff --git a/test/files/run/t6443.scala b/test/files/run/t6443.scala
new file mode 100644
index 0000000000..67fe2cab22
--- /dev/null
+++ b/test/files/run/t6443.scala
@@ -0,0 +1,15 @@
+class Base
+class Derived extends Base
+
+trait A {
+ def foo(d: String)(d2: d.type): Base
+ val s = ""
+ def bar: Unit = foo(s)(s)
+}
+object B extends A {
+ def foo(d: String)(d2: d.type): D forSome { type D <: S; type S <: Derived } = {d2.isEmpty; null} // Bridge method required here!
+}
+
+object Test extends App {
+ B.bar
+}
diff --git a/test/files/run/t6443b.scala b/test/files/run/t6443b.scala
new file mode 100644
index 0000000000..9320b1dcfe
--- /dev/null
+++ b/test/files/run/t6443b.scala
@@ -0,0 +1,16 @@
+trait A {
+ type D >: Null <: C
+ def foo(d: D)(d2: d.type): Unit
+ trait C {
+ def bar: Unit = foo(null)(null)
+ }
+}
+object B extends A {
+ class D extends C
+
+ def foo(d: D)(d2: d.type): Unit = () // Bridge method required here!
+}
+
+object Test extends App {
+ new B.D().bar
+}
diff --git a/test/files/run/t6548.check b/test/files/run/t6548.check
index e82cae110a..5dfcb12e02 100644
--- a/test/files/run/t6548.check
+++ b/test/files/run/t6548.check
@@ -1,2 +1,2 @@
false
-List(JavaAnnotationWithNestedEnum(value = VALUE))
+List(JavaAnnotationWithNestedEnum_1(value = VALUE))
diff --git a/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java b/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java
new file mode 100644
index 0000000000..32004de537
--- /dev/null
+++ b/test/files/run/t6548/JavaAnnotationWithNestedEnum_1.java
@@ -0,0 +1,17 @@
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target({ElementType.ANNOTATION_TYPE, ElementType.METHOD, ElementType.FIELD,
+ ElementType.TYPE, ElementType.PARAMETER})
+@Retention(RetentionPolicy.RUNTIME)
+public @interface JavaAnnotationWithNestedEnum_1
+{
+ public Value value() default Value.VALUE;
+
+ public enum Value
+ {
+ VALUE;
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t6548.scala b/test/files/run/t6548/Test_2.scala
index be3eb5b932..6e4f6ba92a 100644
--- a/test/files/run/t6548.scala
+++ b/test/files/run/t6548/Test_2.scala
@@ -2,7 +2,7 @@ import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
class Bean {
- @JavaAnnotationWithNestedEnum(JavaAnnotationWithNestedEnum.Value.VALUE)
+ @JavaAnnotationWithNestedEnum_1(JavaAnnotationWithNestedEnum_1.Value.VALUE)
def value = 1
}
diff --git a/test/files/run/t6572/bar_1.scala b/test/files/run/t6572/bar_1.scala
new file mode 100644
index 0000000000..5518ced7af
--- /dev/null
+++ b/test/files/run/t6572/bar_1.scala
@@ -0,0 +1,19 @@
+package bar
+
+abstract class IntBase[V] extends Base[Int, V]
+
+class DefaultIntBase[V <: IntProvider] extends IntBase[V] {
+ override protected def hashCode(key: Int) = key
+}
+
+trait IntProvider {
+ def int: Int
+}
+
+abstract class Base[@specialized K, V] {
+
+ protected def hashCode(key: K) = key.hashCode
+
+ def get(key: K): V = throw new RuntimeException
+
+} \ No newline at end of file
diff --git a/test/files/run/t6572/foo_2.scala b/test/files/run/t6572/foo_2.scala
new file mode 100644
index 0000000000..465f0b7c3c
--- /dev/null
+++ b/test/files/run/t6572/foo_2.scala
@@ -0,0 +1,17 @@
+//package foo
+
+import bar._
+
+class FooProvider extends IntProvider {
+ def int = 3
+}
+
+class Wrapper(users: DefaultIntBase[FooProvider]) {
+ final def user(userId: Int) = users.get(userId)
+}
+
+object Test {
+ def main(args: Array[String]) {
+ new Wrapper(new DefaultIntBase)
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t6584.check b/test/files/run/t6584.check
new file mode 100644
index 0000000000..35c8688751
--- /dev/null
+++ b/test/files/run/t6584.check
@@ -0,0 +1,8 @@
+Array: 102400
+Vector: 102400
+List: 102400
+Stream: 102400
+Array: 102400
+Vector: 102400
+List: 102400
+Stream: 102400
diff --git a/test/files/run/t6584.scala b/test/files/run/t6584.scala
new file mode 100644
index 0000000000..24c236ef35
--- /dev/null
+++ b/test/files/run/t6584.scala
@@ -0,0 +1,16 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ val size = 100 * 1024
+ val doubled = (1 to size) ++ (1 to size)
+
+ println("Array: " + Array.tabulate(size)(x => x).distinct.size)
+ println("Vector: " + Vector.tabulate(size)(x => x).distinct.size)
+ println("List: " + List.tabulate(size)(x => x).distinct.size)
+ println("Stream: " + Stream.tabulate(size)(x => x).distinct.size)
+
+ println("Array: " + doubled.toArray.distinct.size)
+ println("Vector: " + doubled.toVector.distinct.size)
+ println("List: " + doubled.toList.distinct.size)
+ println("Stream: " + doubled.toStream.distinct.size)
+ }
+}
diff --git a/test/files/run/t6611.scala b/test/files/run/t6611.scala
new file mode 100644
index 0000000000..0947a48f90
--- /dev/null
+++ b/test/files/run/t6611.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+ locally {
+ val a = Array("1")
+ val a2 = Array(a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array("1": Object)
+ val a2 = Array[Object](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(true)
+ val a2 = Array[Boolean](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1: Short)
+ val a2 = Array[Short](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1: Byte)
+ val a2 = Array[Byte](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1)
+ val a2 = Array[Int](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1L)
+ val a2 = Array[Long](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1f)
+ val a2 = Array[Float](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(1d)
+ val a2 = Array[Double](a: _*)
+ assert(a ne a2)
+ }
+
+ locally {
+ val a = Array(())
+ val a2 = Array[Unit](a: _*)
+ assert(a ne a2)
+ }
+}
diff --git a/test/files/run/t6637.check b/test/files/run/t6637.check
new file mode 100644
index 0000000000..9766475a41
--- /dev/null
+++ b/test/files/run/t6637.check
@@ -0,0 +1 @@
+ok
diff --git a/test/files/run/t6637.scala b/test/files/run/t6637.scala
new file mode 100644
index 0000000000..d3c380370b
--- /dev/null
+++ b/test/files/run/t6637.scala
@@ -0,0 +1,8 @@
+
+object Test extends App {
+ try {
+ class A ; class B ; List().head.isInstanceOf[A with B]
+ } catch {
+ case _ :java.util.NoSuchElementException => println("ok")
+ }
+}
diff --git a/test/files/run/t6669.scala b/test/files/run/t6669.scala
new file mode 100644
index 0000000000..b55718b12b
--- /dev/null
+++ b/test/files/run/t6669.scala
@@ -0,0 +1,26 @@
+import java.io.{ByteArrayOutputStream, PrintStream}
+
+object Test extends App {
+ val baos = new ByteArrayOutputStream()
+ val ps = new PrintStream(baos)
+
+ // first test with the default classpath
+ (scala.Console withOut ps) {
+ scala.tools.scalap.Main.main(Array("-verbose", "java.lang.Object"))
+ }
+
+ // now make sure we saw the '.' in the classpath
+ val msg1 = baos.toString()
+ assert(msg1 contains "directory classpath: .", s"Did not see '.' in the default class path. Full results were:\n$msg1")
+
+ // then test again with a user specified classpath
+ baos.reset
+
+ (scala.Console withOut ps) {
+ scala.tools.scalap.Main.main(Array("-verbose", "-cp", "whatever", "java.lang.Object"))
+ }
+
+ // now make sure we did not see the '.' in the classpath
+ val msg2 = baos.toString()
+ assert(!(msg2 contains "directory classpath: ."), s"Did saw '.' in the user specified class path. Full results were:\n$msg2")
+}
diff --git a/test/files/run/t6853.scala b/test/files/run/t6853.scala
new file mode 100644
index 0000000000..352375c99c
--- /dev/null
+++ b/test/files/run/t6853.scala
@@ -0,0 +1,18 @@
+// Test cases: the only place we can cut and paste without crying
+// ourself to sleep.
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ // First testing the basic operations
+ val m = collection.mutable.ListMap[String, Int]()
+ var i = 0
+ while(i < 2) { m += ("foo" + i) -> i; i = i+1}
+ assert(m == Map("foo1"->1,"foo0"->0))
+ m-= "foo0"
+ assert(m == Map("foo1"->1))
+ // Now checking if it scales as described in SI-6853
+ i = 0
+ while(i < 80000) { m += ("foo" + i) -> i; i = i+1}
+ assert(m.size == 80000)
+ }
+}
diff --git a/test/files/run/t6863.scala b/test/files/run/t6863.scala
new file mode 100644
index 0000000000..d77adb6af4
--- /dev/null
+++ b/test/files/run/t6863.scala
@@ -0,0 +1,114 @@
+/** Make sure that when a variable is captured its initialization expression is handled properly */
+object Test {
+ def lazyVal() = {
+ // internally lazy vals become vars which are initialized with "_", so they need to be tested just like vars do
+ lazy val x = "42"
+ assert({ () => x }.apply == "42")
+ }
+ def ident() = {
+ val y = "42"
+ var x = y
+ assert({ () => x }.apply == "42")
+ }
+ def apply() = {
+ def y(x : Int) = x.toString
+ var x = y(42)
+ assert({ () => x }.apply == "42")
+ }
+ def literal() = {
+ var x = "42"
+ assert({ () => x }.apply == "42")
+ }
+ def `new`() = {
+ var x = new String("42")
+ assert({ () => x }.apply == "42")
+ }
+ def select() = {
+ object Foo{val bar = "42"}
+ var x = Foo.bar
+ assert({ () => x }.apply == "42")
+ }
+ def `throw`() = {
+ var x = if (true) "42" else throw new Exception("42")
+ assert({ () => x }.apply == "42")
+ }
+ def assign() = {
+ var y = 1
+ var x = y = 42
+ assert({ () => x}.apply == ())
+ }
+ def valDef() = {
+ var x = {val y = 42}
+ assert({ () => x}.apply == ())
+ }
+ def `return`(): String = {
+ var x = if (true) return "42" else ()
+ assert({ () => x}.apply == ())
+ "42"
+ }
+ def tryFinally() = {
+ var x = try { "42" } finally ()
+ assert({ () => x }.apply == "42")
+ }
+ def tryCatch() = {
+ var x = try { "42" } catch { case _ => "43" }
+ assert({ () => x }.apply == "42")
+ }
+ def `if`() = {
+ var x = if (true) ()
+ assert({ () => x }.apply == ())
+ }
+ def ifElse() = {
+ var x = if(true) "42" else "43"
+ assert({ () => x }.apply == "42")
+ }
+ def matchCase() = {
+ var x = 100 match {
+ case 100 => "42"
+ case _ => "43"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def block() = {
+ var x = {
+ val y = 42
+ "42"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def labelDef() = {
+ var x = 100 match {
+ case 100 => try "42" finally ()
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def nested() = {
+ var x = {
+ val y = 42
+ if(true) try "42" catch {case _ => "43"}
+ else "44"
+ }
+ assert({ () => x }.apply == "42")
+ }
+ def main(args: Array[String]) {
+ lazyVal()
+ ident()
+ apply()
+ literal()
+ `new`()
+ select()
+ `throw`()
+ assign()
+ valDef()
+ `return`()
+ tryFinally()
+ tryCatch()
+ ifElse()
+ `if`()
+ matchCase()
+ block()
+ labelDef()
+ nested()
+ }
+}
+
diff --git a/test/files/run/t6888.check b/test/files/run/t6888.check
new file mode 100644
index 0000000000..4e8a2de2db
--- /dev/null
+++ b/test/files/run/t6888.check
@@ -0,0 +1,3 @@
+2
+3
+3
diff --git a/test/files/run/t6888.scala b/test/files/run/t6888.scala
new file mode 100644
index 0000000000..0c64cbe5b6
--- /dev/null
+++ b/test/files/run/t6888.scala
@@ -0,0 +1,19 @@
+class C {
+ val x = 1
+ object $ {
+ val y = x + x
+ class abc$ {
+ def xy = x + y
+ }
+ object abc$ {
+ def xy = x + y
+ }
+ }
+}
+
+object Test extends App {
+ val c = new C()
+ println(c.$.y)
+ println(c.$.abc$.xy)
+ println(new c.$.abc$().xy)
+}
diff --git a/test/files/run/t6968.check b/test/files/run/t6968.check
new file mode 100644
index 0000000000..7a18941537
--- /dev/null
+++ b/test/files/run/t6968.check
@@ -0,0 +1 @@
+1, 3, 5
diff --git a/test/files/run/t6968.scala b/test/files/run/t6968.scala
new file mode 100644
index 0000000000..b5cadfd9e1
--- /dev/null
+++ b/test/files/run/t6968.scala
@@ -0,0 +1,7 @@
+object Test {
+ def main(args: Array[String]) {
+ val mixedList = List(1,(1,2),4,(3,1),(5,4),6)
+ val as = for((a,b) <- mixedList) yield a
+ println(as.mkString(", "))
+ }
+}
diff --git a/test/files/run/t6969.check b/test/files/run/t6969.check
new file mode 100644
index 0000000000..78297812c9
--- /dev/null
+++ b/test/files/run/t6969.check
@@ -0,0 +1 @@
+All threads completed.
diff --git a/test/files/run/t6969.scala b/test/files/run/t6969.scala
new file mode 100644
index 0000000000..8cfc28c1e5
--- /dev/null
+++ b/test/files/run/t6969.scala
@@ -0,0 +1,28 @@
+object Test {
+ private type Clearable = { def clear(): Unit }
+ private def choke() = {
+ try new Array[Object]((Runtime.getRuntime().maxMemory min Int.MaxValue).toInt)
+ catch {
+ case _: OutOfMemoryError => // what do you mean, out of memory?
+ case t: Throwable => println(t)
+ }
+ }
+ private def f(x: Clearable) = x.clear()
+ class Choker(id: Int) extends Thread {
+ private def g(iteration: Int) = {
+ val map = scala.collection.mutable.Map[Int, Int](1 -> 2)
+ try f(map) catch { case t: NullPointerException => println(s"Failed at $id/$iteration") ; throw t }
+ choke()
+ }
+ override def run() {
+ 1 to 50 foreach g
+ }
+ }
+
+ def main(args: Array[String]): Unit = {
+ val threads = 1 to 3 map (id => new Choker(id))
+ threads foreach (_.start())
+ threads foreach (_.join())
+ println("All threads completed.")
+ }
+}
diff --git a/test/files/run/t6989.check b/test/files/run/t6989.check
new file mode 100644
index 0000000000..8943792115
--- /dev/null
+++ b/test/files/run/t6989.check
@@ -0,0 +1,216 @@
+============
+sym = class PackagePrivateJavaClass, signature = ClassInfoType(...), owner = package foo
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = constructor PackagePrivateJavaClass, signature = (x$1: Int, x$2: Int)foo.PackagePrivateJavaClass, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = variable privateField, signature = Int, owner = class PackagePrivateJavaClass
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = method privateMethod, signature = ()Unit, owner = class PackagePrivateJavaClass
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = variable protectedField, signature = Int, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = method protectedMethod, signature = ()Unit, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = variable publicField, signature = Int, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = method publicMethod, signature = ()Unit, owner = class PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = object PackagePrivateJavaClass, signature = foo.PackagePrivateJavaClass.type, owner = package foo
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = variable privateStaticField, signature = Int, owner = object PackagePrivateJavaClass
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = method privateStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = variable protectedStaticField, signature = Int, owner = object PackagePrivateJavaClass
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = method protectedStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = variable publicStaticField, signature = Int, owner = object PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = method publicStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = class JavaClass_1, signature = ClassInfoType(...), owner = package foo
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = class $PrivateJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = value this$0, signature = foo.JavaClass_1, owner = class $PrivateJavaClass
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = object $PrivateJavaClass, signature = JavaClass_1.this.$PrivateJavaClass.type, owner = class JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = class $ProtectedJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+isPrivate = false
+isProtected = true
+isPublic = false
+privateWithin = package foo
+============
+sym = value this$0, signature = foo.JavaClass_1, owner = class $ProtectedJavaClass
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = object $ProtectedJavaClass, signature = JavaClass_1.this.$ProtectedJavaClass.type, owner = class JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = class $PublicJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = constructor $PublicJavaClass, signature = (x$1: foo.JavaClass_1)JavaClass_1.this.$PublicJavaClass, owner = class $PublicJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = value this$0, signature = foo.JavaClass_1, owner = class $PublicJavaClass
+isPrivate = false
+isProtected = false
+isPublic = false
+privateWithin = package foo
+============
+sym = object $PublicJavaClass, signature = JavaClass_1.this.$PublicJavaClass.type, owner = class JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = constructor JavaClass_1, signature = ()foo.JavaClass_1, owner = class JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = object JavaClass_1, signature = foo.JavaClass_1.type, owner = package foo
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = class PrivateStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = object PrivateStaticJavaClass, signature = foo.JavaClass_1.PrivateStaticJavaClass.type, owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = class ProtectedStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = object ProtectedStaticJavaClass, signature = foo.JavaClass_1.ProtectedStaticJavaClass.type, owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
+============
+sym = class PublicStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = constructor PublicStaticJavaClass, signature = ()foo.JavaClass_1.PublicStaticJavaClass, owner = class PublicStaticJavaClass
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = object PublicStaticJavaClass, signature = foo.JavaClass_1.PublicStaticJavaClass.type, owner = object JavaClass_1
+isPrivate = false
+isProtected = false
+isPublic = true
+privateWithin = <none>
+============
+sym = variable staticField, signature = Int, owner = object JavaClass_1
+isPrivate = true
+isProtected = false
+isPublic = false
+privateWithin = <none>
diff --git a/test/files/run/t6989/JavaClass_1.java b/test/files/run/t6989/JavaClass_1.java
new file mode 100644
index 0000000000..eb26a08700
--- /dev/null
+++ b/test/files/run/t6989/JavaClass_1.java
@@ -0,0 +1,41 @@
+package foo;
+
+// Originally composed to accommodate pull request feedback, this test has
+// uncovered a handful of bugs in FromJavaClassCompleter, namely:
+// * SI-7071 non-public ctors get lost
+// * SI-7072 inner classes are read incorrectly
+
+// I'm leaving the incorrect results of FromJavaClassCompleters in the check
+// file, so that we get notified when something changes there.
+
+class PackagePrivateJavaClass {
+ private int privateField = 0;
+ protected int protectedField = 1;
+ public int publicField = 2;
+
+ private static int privateStaticField = 3;
+ protected static int protectedStaticField = 4;
+ public static int publicStaticField = 5;
+
+ private void privateMethod() {}
+ protected void protectedMethod() {}
+ public void publicMethod() {}
+
+ private static void privateStaticMethod() {}
+ protected static void protectedStaticMethod() {}
+ public static void publicStaticMethod() {}
+
+ private PackagePrivateJavaClass() {}
+ protected PackagePrivateJavaClass(int x) {}
+ public PackagePrivateJavaClass(int x, int y) {}
+}
+
+public class JavaClass_1 {
+ private class PrivateJavaClass {}
+ private static class PrivateStaticJavaClass {}
+ protected class ProtectedJavaClass {}
+ private static class ProtectedStaticJavaClass {}
+ public class PublicJavaClass {}
+ public static class PublicStaticJavaClass {}
+ private static int staticField = 0;
+} \ No newline at end of file
diff --git a/test/files/run/t6989/Test_2.scala b/test/files/run/t6989/Test_2.scala
new file mode 100644
index 0000000000..e48e82422d
--- /dev/null
+++ b/test/files/run/t6989/Test_2.scala
@@ -0,0 +1,42 @@
+import scala.reflect.runtime.universe._
+
+// Originally composed to accommodate pull request feedback, this test has
+// uncovered a handful of bugs in FromJavaClassCompleter, namely:
+// * SI-7071 non-public ctors get lost
+// * SI-7072 inner classes are read incorrectly
+
+// I'm leaving the incorrect results of FromJavaClassCompleters in the check
+// file, so that we get notified when something changes there.
+
+package object foo {
+ def testAll(): Unit = {
+ test(typeOf[foo.PackagePrivateJavaClass].typeSymbol)
+ test(typeOf[foo.PackagePrivateJavaClass].typeSymbol.companionSymbol)
+ test(typeOf[foo.JavaClass_1].typeSymbol)
+ test(typeOf[foo.JavaClass_1].typeSymbol.companionSymbol)
+ }
+
+ def test(sym: Symbol): Unit = {
+ printSymbolDetails(sym)
+ if (sym.isClass || sym.isModule) {
+ sym.typeSignature.declarations.toList.sortBy(_.name.toString) foreach test
+ }
+ }
+
+ def printSymbolDetails(sym: Symbol): Unit = {
+ def stableSignature(sym: Symbol) = sym.typeSignature match {
+ case ClassInfoType(_, _, _) => "ClassInfoType(...)"
+ case tpe => tpe.toString
+ }
+ println("============")
+ println(s"sym = $sym, signature = ${stableSignature(sym)}, owner = ${sym.owner}")
+ println(s"isPrivate = ${sym.isPrivate}")
+ println(s"isProtected = ${sym.isProtected}")
+ println(s"isPublic = ${sym.isPublic}")
+ println(s"privateWithin = ${sym.privateWithin}")
+ }
+}
+
+object Test extends App {
+ foo.testAll()
+} \ No newline at end of file
diff --git a/test/files/run/t7008-scala-defined.check b/test/files/run/t7008-scala-defined.check
new file mode 100644
index 0000000000..84ed62619e
--- /dev/null
+++ b/test/files/run/t7008-scala-defined.check
@@ -0,0 +1,7 @@
+<init>: List(throws[NullPointerException](""))
+bar: List(throws[E1](""))
+baz: List(throws[IllegalStateException](""))
+=============
+<init>: List(throws[NullPointerException](""))
+bar: List(throws[E1](""))
+baz: List(throws[IllegalStateException](""))
diff --git a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
new file mode 100644
index 0000000000..94fd99018e
--- /dev/null
+++ b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
@@ -0,0 +1,12 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ val decls = c.typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList
+ val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL)
+ c.universe.reify(println(c.literal(s).splice))
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala b/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala
new file mode 100644
index 0000000000..7783c873ec
--- /dev/null
+++ b/test/files/run/t7008-scala-defined/ScalaClassWithCheckedExceptions_1.scala
@@ -0,0 +1,6 @@
+class ScalaClassWithCheckedExceptions_1[E1 <: Exception] @throws[NullPointerException]("") () {
+ @throws[E1]("") def bar() {}
+ @throws[IllegalStateException]("") def baz(x: Int) {}
+ // FIXME: SI-7066
+ // @throws[E2]("") def foo[E2 <: Exception] {}
+} \ No newline at end of file
diff --git a/test/files/run/t7008-scala-defined/Test_3.scala b/test/files/run/t7008-scala-defined/Test_3.scala
new file mode 100644
index 0000000000..03bb79d311
--- /dev/null
+++ b/test/files/run/t7008-scala-defined/Test_3.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ Macros.foo
+ println("=============")
+
+ val decls = typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList
+ decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}"))
+} \ No newline at end of file
diff --git a/test/files/run/t7008.check b/test/files/run/t7008.check
new file mode 100644
index 0000000000..ee077f90ff
--- /dev/null
+++ b/test/files/run/t7008.check
@@ -0,0 +1,9 @@
+<init>: List(throws[NullPointerException](classOf[java.lang.NullPointerException]))
+bar: List(throws[Exception](classOf[java.lang.Exception]))
+baz: List(throws[IllegalStateException](classOf[java.lang.IllegalStateException]))
+foo: List(throws[Exception](classOf[java.lang.Exception]))
+=============
+<init>: List(throws[java.lang.NullPointerException](classOf[java.lang.NullPointerException]))
+bar: List(throws[java.lang.Exception](classOf[java.lang.Exception]))
+baz: List(throws[java.lang.IllegalStateException](classOf[java.lang.IllegalStateException]))
+foo: List(throws[java.lang.Exception](classOf[java.lang.Exception]))
diff --git a/test/files/run/t7008/Impls_Macros_2.scala b/test/files/run/t7008/Impls_Macros_2.scala
new file mode 100644
index 0000000000..7a17314085
--- /dev/null
+++ b/test/files/run/t7008/Impls_Macros_2.scala
@@ -0,0 +1,12 @@
+import language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def impl(c: Context) = {
+ val decls = c.typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList
+ val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL)
+ c.universe.reify(println(c.literal(s).splice))
+ }
+
+ def foo = macro impl
+} \ No newline at end of file
diff --git a/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java b/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java
new file mode 100644
index 0000000000..dda2128302
--- /dev/null
+++ b/test/files/run/t7008/JavaClassWithCheckedExceptions_1.java
@@ -0,0 +1,7 @@
+class JavaClassWithCheckedExceptions_1<E1 extends Exception> {
+ public JavaClassWithCheckedExceptions_1() throws NullPointerException {}
+
+ public void bar() throws E1 {}
+ public void baz(int x) throws IllegalStateException {}
+ public <E2 extends Exception> void foo() throws E2 {}
+} \ No newline at end of file
diff --git a/test/files/run/t7008/Test_3.scala b/test/files/run/t7008/Test_3.scala
new file mode 100644
index 0000000000..b2961a829e
--- /dev/null
+++ b/test/files/run/t7008/Test_3.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ Macros.foo
+ println("=============")
+
+ val decls = typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList
+ decls sortBy (_.name.toString) foreach (decl => println(s"${decl.name}: ${decl.annotations}"))
+} \ No newline at end of file
diff --git a/test/files/run/t7039.check b/test/files/run/t7039.check
new file mode 100644
index 0000000000..954906040f
--- /dev/null
+++ b/test/files/run/t7039.check
@@ -0,0 +1 @@
+Matched!
diff --git a/test/files/run/t7039.scala b/test/files/run/t7039.scala
new file mode 100644
index 0000000000..475c4ae267
--- /dev/null
+++ b/test/files/run/t7039.scala
@@ -0,0 +1,11 @@
+object UnapplySeqTest {
+ def unapplySeq(any: Any): Option[(Int, Seq[Int])] = Some((5, List(1)))
+}
+
+object Test extends App {
+ null match {
+ case UnapplySeqTest(5) => println("uh-oh")
+ case UnapplySeqTest(5, 1) => println("Matched!") // compiles
+ case UnapplySeqTest(5, xs @ _*) => println("toooo long: "+ (xs: Seq[Int]))
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t7046.check b/test/files/run/t7046.check
new file mode 100644
index 0000000000..427f1ce610
--- /dev/null
+++ b/test/files/run/t7046.check
@@ -0,0 +1,2 @@
+Set(class D, class E)
+Set(class D, class E)
diff --git a/test/files/run/t7046.scala b/test/files/run/t7046.scala
new file mode 100644
index 0000000000..647a15cd18
--- /dev/null
+++ b/test/files/run/t7046.scala
@@ -0,0 +1,13 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+sealed class C
+class D extends C
+class E extends C
+
+object Test extends App {
+ val c = cm.staticClass("C")
+ println(c.knownDirectSubclasses)
+ c.typeSignature
+ println(c.knownDirectSubclasses)
+} \ No newline at end of file
diff --git a/test/files/run/t7064-old-style-supercalls.check b/test/files/run/t7064-old-style-supercalls.check
new file mode 100644
index 0000000000..0cfbf08886
--- /dev/null
+++ b/test/files/run/t7064-old-style-supercalls.check
@@ -0,0 +1 @@
+2
diff --git a/test/files/run/t7064-old-style-supercalls.scala b/test/files/run/t7064-old-style-supercalls.scala
new file mode 100644
index 0000000000..cffa7b1888
--- /dev/null
+++ b/test/files/run/t7064-old-style-supercalls.scala
@@ -0,0 +1,48 @@
+import scala.reflect.runtime.universe._
+import Flag._
+import definitions._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.Eval
+
+object Test extends App {
+ val PARAMACCESSOR = (1L << 29).asInstanceOf[FlagSet]
+
+ // these trees can be acquired by running the following incantation:
+ // echo 'class C(val x: Int); class D extends C(2)' > foo.scala
+ // ./scalac -Xprint:parser -Yshow-trees-stringified -Yshow-trees-compact foo.scala
+
+ val c = ClassDef(
+ Modifiers(), newTypeName("C"), List(),
+ Template(
+ List(Select(Ident(ScalaPackage), newTypeName("AnyRef"))),
+ emptyValDef,
+ List(
+ ValDef(Modifiers(PARAMACCESSOR), newTermName("x"), Ident(newTypeName("Int")), EmptyTree),
+ DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ List(),
+ List(List(ValDef(Modifiers(PARAM | PARAMACCESSOR), newTermName("x"), Ident(newTypeName("Int")), EmptyTree))),
+ TypeTree(),
+ Block(
+ List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())),
+ Literal(Constant(())))))))
+ val d = ClassDef(
+ Modifiers(), newTypeName("D"), List(),
+ Template(
+ List(Ident(newTypeName("C"))),
+ emptyValDef,
+ List(
+ DefDef(
+ Modifiers(),
+ nme.CONSTRUCTOR,
+ List(),
+ List(List()),
+ TypeTree(),
+ Block(
+ List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List(Literal(Constant(2))))),
+ Literal(Constant(())))))))
+ val result = Select(Apply(Select(New(Ident(newTypeName("D"))), nme.CONSTRUCTOR), List()), newTermName("x"))
+ println(cm.mkToolBox().eval(Block(List(c, d), result)))
+} \ No newline at end of file
diff --git a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
index 05237bace8..b6af8f41bd 100644
--- a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala
@@ -20,7 +20,7 @@ abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterable
property("gets iterated keys") = forAll(collectionPairs) {
case (t, coll) =>
val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v))
- val containsSelf = for ((k, v) <- coll) yield (coll.get(k) == Some(v))
+ val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) }
("Par contains elements of seq map" |: containsT.forall(_ == true)) &&
("Par contains elements of itself" |: containsSelf.forall(_ == true))
}
diff --git a/test/partest b/test/partest
index 8352f8a946..dd57137b21 100755
--- a/test/partest
+++ b/test/partest
@@ -74,10 +74,10 @@ if $cygwin; then
EXT_CLASSPATH=`cygpath --path --$format "$EXT_CLASSPATH"`
fi
-# last arg wins, so if JAVA_OPTS already contains -Xmx or -Xms the
-# supplied argument will be used.
-JAVA_OPTS="-Xmx1024M -Xms64M $JAVA_OPTS"
-[ -n "$SCALAC_OPTS" ] || SCALAC_OPTS="-deprecation"
+# last arg wins, so if JAVA_OPTS already contains one of these options
+# the supplied argument will be used.
+# At this writing it is reported test/partest --all requires 108m permgen.
+JAVA_OPTS="-Xmx1024M -Xms64M -XX:MaxPermSize=128M $JAVA_OPTS"
partestDebugStr=""
if [ ! -z "${PARTEST_DEBUG}" ] ; then
diff --git a/test/pending/neg/t5378.scala b/test/pending/neg/t5378.scala
deleted file mode 100644
index cada29b0a0..0000000000
--- a/test/pending/neg/t5378.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-import language.reflectiveCalls
-
-class Coll[+T] {
- def contains = new { def apply[T1 <: T](value: T1) = ??? }
-}
-
-object Test {
- def main(args: Array[String]): Unit = {
- val xs = new Coll[List[String]]
- val ys: Coll[Traversable[String]] = xs
-
- println(ys contains Nil)
- // java.lang.NoSuchMethodException: Coll$$anon$1.apply(scala.collection.Traversable)
- // at java.lang.Class.getMethod(Class.java:1605)
- // at Test$.reflMethod$Method1(a.scala:14)
- // at Test$.main(a.scala:14)
- // at Test.main(a.scala)
- }
-}
diff --git a/test/files/neg/t5589neg.check b/test/pending/neg/t5589neg.check
index f1dad94df3..f1dad94df3 100644
--- a/test/files/neg/t5589neg.check
+++ b/test/pending/neg/t5589neg.check
diff --git a/test/files/neg/t5589neg.scala b/test/pending/neg/t5589neg.scala
index 31ff2c3693..31ff2c3693 100644
--- a/test/files/neg/t5589neg.scala
+++ b/test/pending/neg/t5589neg.scala
diff --git a/test/files/neg/t5589neg2.scala b/test/pending/neg/t5589neg2.scala
index b7c7ab7218..b7c7ab7218 100644
--- a/test/files/neg/t5589neg2.scala
+++ b/test/pending/neg/t5589neg2.scala
diff --git a/test/files/pos/t1336.scala b/test/pending/pos/t1336.scala
index 63967985c7..63967985c7 100644
--- a/test/files/pos/t1336.scala
+++ b/test/pending/pos/t1336.scala
diff --git a/test/files/pos/t5589.scala b/test/pending/pos/t5589.scala
index 69cbb20391..69cbb20391 100644
--- a/test/files/pos/t5589.scala
+++ b/test/pending/pos/t5589.scala
diff --git a/test/pending/run/idempotency-partial-functions.scala b/test/pending/run/idempotency-partial-functions.scala
new file mode 100644
index 0000000000..bc0ca706dd
--- /dev/null
+++ b/test/pending/run/idempotency-partial-functions.scala
@@ -0,0 +1,28 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.{ToolBox, ToolBoxError}
+import scala.tools.reflect.Eval
+
+// Related to SI-6187
+//
+// Moved to pending as we are currently blocked by the inability
+// to reify the parent types of the anoymous function class,
+// which are not part of the tree, but rather only part of the
+// ClassInfoType.
+object Test extends App {
+ val partials = reify {
+ List((false,true)) collect { case (x,true) => x }
+ }
+ println(Seq(show(partials), showRaw(partials)).mkString("\n\n"))
+ try {
+ println(partials.eval)
+ } catch {
+ case e: ToolBoxError => println(e)
+ }
+ val tb = cm.mkToolBox()
+ val tpartials = tb.typeCheck(partials.tree)
+ println(tpartials)
+ val rtpartials = tb.resetAllAttrs(tpartials)
+ println(tb.eval(rtpartials))
+}
+Test.main(null) \ No newline at end of file
diff --git a/test/files/run/t4574.scala b/test/pending/run/t4574.scala
index 1dde496aca..1dde496aca 100644
--- a/test/files/run/t4574.scala
+++ b/test/pending/run/t4574.scala
diff --git a/test/scaladoc/run/SI-6017.check b/test/scaladoc/run/SI-6017.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-6017.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6017.scala b/test/scaladoc/run/SI-6017.scala
new file mode 100644
index 0000000000..9951534c6d
--- /dev/null
+++ b/test/scaladoc/run/SI-6017.scala
@@ -0,0 +1,28 @@
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.html.page.{Index, ReferenceIndex}
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def scaladocSettings = ""
+ override def code = """
+ class STAR
+ class Star
+ """
+
+ def testModel(rootPackage: Package) {
+ model match {
+ case Some(universe) => {
+ val index = IndexModelFactory.makeIndex(universe)
+ // Because "STAR" and "Star" are different
+ assert(index.firstLetterIndex('s').keys.toSeq.length == 2)
+
+ val indexPage = new Index(universe, index)
+ val letters = indexPage.letters
+ assert(letters.length > 1)
+ assert(letters(0).toString == "<span>#</span>")
+ }
+ case _ => assert(false)
+ }
+ }
+}
diff --git a/test/scaladoc/run/SI-6812.check b/test/scaladoc/run/SI-6812.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-6812.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6812.scala b/test/scaladoc/run/SI-6812.scala
new file mode 100644
index 0000000000..fbd9588ede
--- /dev/null
+++ b/test/scaladoc/run/SI-6812.scala
@@ -0,0 +1,24 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+import language._
+
+object Test extends ScaladocModelTest {
+
+ override def code = """
+ import scala.reflect.macros.Context
+ import language.experimental.macros
+
+ object Macros {
+ def impl(c: Context) = c.literalUnit
+ def foo = macro impl
+ }
+
+ class C {
+ def bar = Macros.foo
+ }
+ """
+
+ def scaladocSettings = ""
+ override def extraSettings = super.extraSettings + " -Ymacro-no-expand"
+ def testModel(root: Package) = ()
+}
diff --git a/test/scaladoc/run/package-object.check b/test/scaladoc/run/package-object.check
index 01dbcc682f..7da897a4f2 100644
--- a/test/scaladoc/run/package-object.check
+++ b/test/scaladoc/run/package-object.check
@@ -1,3 +1,4 @@
List(test.B, test.A, scala.AnyRef, scala.Any)
List(B, A, AnyRef, Any)
+Some((newSource,10))
Done.
diff --git a/test/scaladoc/run/package-object.scala b/test/scaladoc/run/package-object.scala
index 5fb5a4ddf1..f5c79b1332 100644
--- a/test/scaladoc/run/package-object.scala
+++ b/test/scaladoc/run/package-object.scala
@@ -11,6 +11,7 @@ object Test extends ScaladocModelTest {
val p = root._package("test")
println(p.linearizationTemplates)
println(p.linearizationTypes)
+ println(p.inSource)
}
}