summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CONTRIBUTING.md2
-rw-r--r--README.rst13
-rw-r--r--bincompat-backward.whitelist.conf32
-rw-r--r--bincompat-forward.whitelist.conf76
-rw-r--r--build.number2
-rw-r--r--build.xml3985
-rw-r--r--src/build/pack.xml22
-rw-r--r--src/compiler/scala/reflect/reify/Errors.scala5
-rw-r--r--src/compiler/scala/reflect/reify/phases/Reshape.scala1
-rw-r--r--src/compiler/scala/reflect/reify/utils/Extractors.scala65
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-windows.tmpl5
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala3
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala5
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala57
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala62
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala36
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala11
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala1
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala19
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala22
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala18
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala1
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala33
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/base/comment/Body.scala18
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala6
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Index.scala19
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala29
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala25
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala43
-rwxr-xr-xsrc/compiler/scala/tools/nsc/doc/model/TreeFactory.scala13
-rw-r--r--src/compiler/scala/tools/nsc/interactive/CompilerControl.scala31
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala86
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Picklers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala12
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaParsers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/matching/Patterns.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala11
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala310
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala1
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala3
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala64
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala71
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala14
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala35
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala644
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala709
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala258
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala615
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala674
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala614
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala256
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala242
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala1
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala23
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala18
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala25
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala3876
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala62
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala42
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala201
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/SimpleTracer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/TreeSet.scala20
-rw-r--r--src/compiler/scala/tools/reflect/MacroImplementations.scala77
-rw-r--r--src/library/scala/collection/immutable/ListMap.scala8
-rw-r--r--src/library/scala/collection/mutable/ArrayBuffer.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayOps.scala23
-rw-r--r--src/library/scala/collection/parallel/package.scala7
-rw-r--r--src/library/scala/concurrent/Future.scala12
-rw-r--r--src/library/scala/math/BigInt.scala2
-rw-r--r--src/library/scala/math/Ordering.scala2
-rw-r--r--src/library/scala/runtime/BoxedUnit.java2
-rwxr-xr-xsrc/library/scala/xml/Utility.scala2
-rw-r--r--src/partest/scala/tools/partest/CompilerTest.scala2
-rw-r--r--src/partest/scala/tools/partest/DirectTest.scala28
-rw-r--r--src/partest/scala/tools/partest/nest/Diff.java873
-rw-r--r--src/partest/scala/tools/partest/nest/DiffPrint.java606
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala41
-rw-r--r--src/partest/scala/tools/partest/nest/PathSettings.scala3
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala5
-rw-r--r--src/partest/scala/tools/partest/nest/RunnerManager.scala3
-rw-r--r--src/reflect/scala/reflect/api/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/api/TypeTags.scala38
-rw-r--r--src/reflect/scala/reflect/api/Types.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ClassfileConstants.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala2
-rw-r--r--src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala77
-rw-r--r--src/reflect/scala/reflect/internal/Flags.scala14
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala17
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/internal/TreeInfo.scala19
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala5
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala38
-rw-r--r--src/reflect/scala/reflect/internal/transform/UnCurry.scala11
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala4
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala2
-rw-r--r--starr.number1
-rw-r--r--test/debug/OBSOLETE0
-rw-r--r--test/disabled/jvm/scala-concurrent-tck-akka.scala391
-rw-r--r--test/disabled/presentation/akka/src/akka/dispatch/Future.scala832
-rw-r--r--test/disabled/presentation/akka/src/pi.scala108
-rw-r--r--test/files/jvm/t7253.check1
-rw-r--r--test/files/jvm/t7253/Base_1.scala5
-rw-r--r--test/files/jvm/t7253/JavaClient_1.java9
-rw-r--r--test/files/jvm/t7253/ScalaClient_1.scala9
-rw-r--r--test/files/jvm/t7253/test.scala28
-rw-r--r--test/files/neg/delayed-init-ref.check10
-rw-r--r--test/files/neg/delayed-init-ref.flags1
-rw-r--r--test/files/neg/delayed-init-ref.scala42
-rw-r--r--test/files/neg/macro-without-xmacros-a.check6
-rw-r--r--test/files/neg/macro-without-xmacros-b.check6
-rw-r--r--test/files/neg/stringinterpolation_macro-neg.check2
-rw-r--r--test/files/neg/t414.check2
-rw-r--r--test/files/neg/t4879.check4
-rw-r--r--test/files/neg/t5510.check6
-rw-r--r--test/files/neg/t5663-badwarneq.check34
-rw-r--r--test/files/neg/t5663-badwarneq.scala18
-rw-r--r--test/files/neg/t5856.check2
-rw-r--r--test/files/neg/t6040.check2
-rw-r--r--test/files/neg/t6323a.check6
-rw-r--r--test/files/neg/t6771b.check6
-rw-r--r--test/files/neg/t6771b.scala16
-rw-r--r--test/files/neg/t6952.check4
-rw-r--r--test/files/neg/t7171.check7
-rw-r--r--test/files/neg/t7171.flags1
-rw-r--r--test/files/neg/t7171.scala11
-rw-r--r--test/files/neg/t7171b.check10
-rw-r--r--test/files/neg/t7171b.flags1
-rw-r--r--test/files/neg/t7171b.scala15
-rw-r--r--test/files/neg/t7185.check7
-rw-r--r--test/files/neg/t7185.scala3
-rw-r--r--test/files/neg/t7235.check4
-rw-r--r--test/files/neg/t7235.scala14
-rw-r--r--test/files/neg/t7238.check6
-rw-r--r--test/files/neg/t7238.scala7
-rw-r--r--test/files/neg/t7251.check4
-rw-r--r--test/files/neg/t7251/A_1.scala10
-rw-r--r--test/files/neg/t7251/B_2.scala7
-rw-r--r--test/files/neg/t7259.check7
-rw-r--r--test/files/neg/t7259.scala9
-rw-r--r--test/files/neg/t7285.check13
-rw-r--r--test/files/neg/t7285.flags1
-rw-r--r--test/files/neg/t7285.scala55
-rw-r--r--test/files/neg/t7289.check4
-rw-r--r--test/files/neg/t7289.scala39
-rw-r--r--test/files/neg/t7289_status_quo.check22
-rw-r--r--test/files/neg/t7289_status_quo.scala23
-rw-r--r--test/files/neg/t7290.check10
-rw-r--r--test/files/neg/t7290.flags1
-rw-r--r--test/files/neg/t7290.scala10
-rw-r--r--test/files/neg/t7299.check7
-rw-r--r--test/files/neg/t7299.scala6
-rw-r--r--test/files/neg/t7325.check19
-rw-r--r--test/files/neg/t7325.scala25
-rw-r--r--test/files/neg/t7330.check5
-rw-r--r--test/files/neg/t7330.scala5
-rw-r--r--test/files/neg/t7369.check13
-rw-r--r--test/files/neg/t7369.flags1
-rw-r--r--test/files/neg/t7369.scala43
-rw-r--r--test/files/neg/t7385.check10
-rw-r--r--test/files/neg/t7385.scala7
-rw-r--r--test/files/neg/t7388.check4
-rw-r--r--test/files/neg/t7388.scala1
-rw-r--r--test/files/neg/t7441.check6
-rw-r--r--test/files/neg/t7441.scala7
-rwxr-xr-xtest/files/pos/spec-t6286.scala10
-rw-r--r--test/files/pos/t3120/J1.java4
-rw-r--r--test/files/pos/t3120/J2.java4
-rw-r--r--test/files/pos/t3120/Q.java3
-rw-r--r--test/files/pos/t3120/Test.scala3
-rw-r--r--test/files/pos/t5744/Macros_1.scala22
-rw-r--r--test/files/pos/t5744/Test_2.scala6
-rw-r--r--test/files/pos/t6091.flags1
-rw-r--r--test/files/pos/t6091.scala10
-rw-r--r--test/files/pos/t6210.flags1
-rw-r--r--test/files/pos/t6210.scala21
-rw-r--r--test/files/pos/t6225.scala20
-rw-r--r--test/files/pos/t6386.scala5
-rw-r--r--test/files/pos/t6514.scala11
-rw-r--r--test/files/pos/t6675.flags1
-rw-r--r--test/files/pos/t6675.scala20
-rw-r--r--test/files/pos/t6771.flags1
-rw-r--r--test/files/pos/t6771.scala9
-rw-r--r--test/files/pos/t6921.scala11
-rw-r--r--test/files/pos/t7091.scala7
-rw-r--r--test/files/pos/t7190.scala26
-rw-r--r--test/files/pos/t7200b.scala50
-rw-r--r--test/files/pos/t7226.scala26
-rw-r--r--test/files/pos/t7232.flags1
-rw-r--r--test/files/pos/t7232/Foo.java9
-rw-r--r--test/files/pos/t7232/List.java4
-rw-r--r--test/files/pos/t7232/Test.scala5
-rw-r--r--test/files/pos/t7232b.flags1
-rw-r--r--test/files/pos/t7232b/Foo.java8
-rw-r--r--test/files/pos/t7232b/List.java5
-rw-r--r--test/files/pos/t7232b/Test.scala5
-rw-r--r--test/files/pos/t7232c.flags1
-rw-r--r--test/files/pos/t7232c/Foo.java10
-rw-r--r--test/files/pos/t7232c/Test.scala4
-rw-r--r--test/files/pos/t7232d.flags1
-rw-r--r--test/files/pos/t7232d/Entry.java4
-rw-r--r--test/files/pos/t7232d/Foo.java8
-rw-r--r--test/files/pos/t7232d/Test.scala4
-rw-r--r--test/files/pos/t7233.scala14
-rw-r--r--test/files/pos/t7233b.scala8
-rw-r--r--test/files/pos/t7234.scala15
-rw-r--r--test/files/pos/t7234b.scala20
-rw-r--r--test/files/pos/t7239.scala38
-rw-r--r--test/files/pos/t7285a.flags1
-rw-r--r--test/files/pos/t7285a.scala83
-rw-r--r--test/files/pos/t7329.scala1
-rw-r--r--test/files/pos/t7369.flags1
-rw-r--r--test/files/pos/t7369.scala37
-rw-r--r--test/files/pos/t7377/Client_2.scala11
-rw-r--r--test/files/pos/t7377/Macro_1.scala7
-rw-r--r--test/files/pos/t7377b.flags1
-rw-r--r--test/files/pos/t7377b.scala13
-rw-r--r--test/files/pos/t7426.scala3
-rw-r--r--test/files/pos/xlint1.flags1
-rw-r--r--test/files/pos/xlint1.scala13
-rw-r--r--test/files/presentation/doc.check50
-rwxr-xr-xtest/files/presentation/doc/doc.scala132
-rwxr-xr-xtest/files/presentation/doc/src/Class.scala1
-rwxr-xr-xtest/files/presentation/doc/src/Test.scala1
-rwxr-xr-xtest/files/presentation/doc/src/p/Base.scala11
-rwxr-xr-xtest/files/presentation/doc/src/p/Derived.scala9
-rw-r--r--test/files/presentation/hyperlinks.check137
-rw-r--r--test/files/presentation/hyperlinks/src/SuperTypes.scala32
-rw-r--r--test/files/presentation/ide-t1000567.check1
-rw-r--r--test/files/presentation/ide-t1000567/Runner.scala15
-rw-r--r--test/files/presentation/ide-t1000567/src/a/a.scala5
-rw-r--r--test/files/presentation/ide-t1000567/src/b/b.scala5
-rw-r--r--test/files/presentation/memory-leaks/MemoryLeaksTest.scala2
-rw-r--r--test/files/run/classfile-format-51.scala126
-rw-r--r--test/files/run/inline-ex-handlers.check915
-rw-r--r--test/files/run/interpolation.scala2
-rw-r--r--test/files/run/interpolationMultiline1.scala2
-rw-r--r--test/files/run/macro-expand-nullary-generic.check10
-rw-r--r--test/files/run/macro-expand-tparams-explicit.check2
-rw-r--r--test/files/run/macro-expand-tparams-implicit.check2
-rw-r--r--test/files/run/macro-expand-tparams-prefix-a.check6
-rw-r--r--test/files/run/macro-expand-tparams-prefix-b.check4
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c1.check4
-rw-r--r--test/files/run/macro-expand-tparams-prefix-c2.check4
-rw-r--r--test/files/run/macro-expand-tparams-prefix-d1.check2
-rw-r--r--test/files/run/macro-undetparams-consfromsls.check6
-rw-r--r--test/files/run/macro-undetparams-macroitself.check2
-rw-r--r--test/files/run/outertest.scala47
-rw-r--r--test/files/run/repl-colon-type.check21
-rw-r--r--test/files/run/repl-colon-type.scala4
-rw-r--r--test/files/run/resetattrs-this.check1
-rw-r--r--test/files/run/resetattrs-this.scala11
-rw-r--r--test/files/run/t3994.scala20
-rw-r--r--test/files/run/t5527.check2
-rw-r--r--test/files/run/t5603.check2
-rwxr-xr-xtest/files/run/t5699.check11
-rwxr-xr-xtest/files/run/t5699.scala24
-rw-r--r--test/files/run/t5710-1.check1
-rw-r--r--test/files/run/t5710-1.scala15
-rw-r--r--test/files/run/t5710-2.check1
-rw-r--r--test/files/run/t5710-2.scala15
-rw-r--r--test/files/run/t6028.check2
-rw-r--r--test/files/run/t6146b.check11
-rw-r--r--test/files/run/t6146b.scala3
-rw-r--r--test/files/run/t6223.check2
-rw-r--r--test/files/run/t6223.scala4
-rw-r--r--test/files/run/t6288.check2
-rw-r--r--test/files/run/t6370.scala12
-rw-r--r--test/files/run/t6440.check2
-rw-r--r--test/files/run/t6555.check2
-rw-r--r--test/files/run/t6715.scala15
-rw-r--r--test/files/run/t6725-1.check2
-rw-r--r--test/files/run/t6725-1.scala5
-rw-r--r--test/files/run/t6725-2.check8
-rw-r--r--test/files/run/t6725-2.scala6
-rw-r--r--test/files/run/t6793.scala9
-rw-r--r--test/files/run/t6793b.scala11
-rw-r--r--test/files/run/t6793c.scala11
-rw-r--r--test/files/run/t6900.scala36
-rw-r--r--test/files/run/t6935.scala14
-rw-r--r--test/files/run/t6937.check26
-rw-r--r--test/files/run/t6937.scala12
-rw-r--r--test/files/run/t7074.check9
-rw-r--r--test/files/run/t7074.scala15
-rw-r--r--test/files/run/t7171.scala22
-rw-r--r--test/files/run/t7185.check34
-rw-r--r--test/files/run/t7185.scala12
-rw-r--r--test/files/run/t7200.scala34
-rw-r--r--test/files/run/t7214.scala57
-rw-r--r--test/files/run/t7215.scala6
-rw-r--r--test/files/run/t7235.check4
-rw-r--r--test/files/run/t7235.scala14
-rw-r--r--test/files/run/t7240.check0
-rw-r--r--test/files/run/t7240/Macros_1.scala48
-rw-r--r--test/files/run/t7240/Test_2.scala3
-rw-r--r--test/files/run/t7242.scala71
-rwxr-xr-xtest/files/run/t7246.check1
-rwxr-xr-xtest/files/run/t7246/Outer.java4
-rwxr-xr-xtest/files/run/t7246/Test.scala16
-rwxr-xr-xtest/files/run/t7246b.check2
-rwxr-xr-xtest/files/run/t7246b/Base.scala7
-rwxr-xr-xtest/files/run/t7246b/Outer.java4
-rwxr-xr-xtest/files/run/t7246b/Test.scala14
-rw-r--r--test/files/run/t7249.check1
-rw-r--r--test/files/run/t7249.scala7
-rw-r--r--test/files/run/t7271.check12
-rw-r--r--test/files/run/t7271.scala34
-rw-r--r--test/files/run/t7290.scala9
-rw-r--r--test/files/run/t7319.check38
-rw-r--r--test/files/run/t7319.scala13
-rw-r--r--test/files/run/t7325.check19
-rw-r--r--test/files/run/t7325.scala25
-rw-r--r--test/files/run/t7341.check0
-rwxr-xr-xtest/files/run/t7341.flags1
-rwxr-xr-xtest/files/run/t7341.scala15
-rw-r--r--test/files/run/t7398.scala31
-rw-r--r--test/files/run/test-cpp.check146
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_lookup.check2
-rw-r--r--test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check2
-rwxr-xr-xtest/partest4
-rw-r--r--test/pending/run/t6387.check1
-rw-r--r--test/pending/run/t6387.scala16
-rw-r--r--test/scaladoc/run/SI-6580.check11
-rw-r--r--test/scaladoc/run/SI-6580.scala32
-rw-r--r--test/scaladoc/run/SI-6715.check1
-rw-r--r--test/scaladoc/run/SI-6715.scala15
-rwxr-xr-xtest/scaladoc/run/SI-7367.check1
-rwxr-xr-xtest/scaladoc/run/SI-7367.scala25
-rw-r--r--test/scaladoc/scalacheck/IndexTest.scala7
-rwxr-xr-xtools/stability-test.sh29
338 files changed, 10112 insertions, 10843 deletions
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 551100ae85..2451a52682 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -6,7 +6,7 @@ These guidelines are meant to be a living document that should be changed and ad
This is the process for committing code to the Scala project. There are of course exceptions to these rules, for example minor changes to comments and documentation, fixing a broken build etc.
-1. Make sure you have signed the [Scala CLA](http://www.scala-lang.org/sites/default/files/contributor_agreement.pdf), if not, sign it.
+1. Make sure you have signed the [Scala CLA](http://typesafe.com/contribute/cla/scala), if not, sign it.
2. Before starting to work on a feature or a fix, it's good practice to ensure that:
1. There is a ticket for your work in the project's issue tracker. If not, create it first (perhaps given a thumbs up from the scala-internals mailing list first).
2. The ticket has been discussed and prioritized by the team.
diff --git a/README.rst b/README.rst
index 72c4b6028b..4ed283dd29 100644
--- a/README.rst
+++ b/README.rst
@@ -190,15 +190,10 @@ In detail:
- Scala live git source tree:
http://github.com/scala/scala
-- Contact form:
- http://www.scala-lang.org/node/188
-
-
-If you are interested in contributing code, we ask you to complete and submit
-to us the Scala Contributor License Agreement, which allows us to ensure that
-all code submitted to the project is unencumbered by copyrights or patents.
-The form is available at:
-http://www.scala-lang.org/sites/default/files/contributor_agreement.pdf
+If you are interested in contributing code, we ask you to sign the
+[Scala Contributor License Agreement](http://typesafe.com/contribute/cla/scala),
+which allows us to ensure that all code submitted to the project is
+unencumbered by copyrights or patents.
Before submitting a pull-request, please make sure you have followed the guidelines
outlined in our `Pull Request Policy <https://github.com/scala/scala/wiki/Pull-Request-Policy>`_.
diff --git a/bincompat-backward.whitelist.conf b/bincompat-backward.whitelist.conf
index 4794666721..3426dc72e1 100644
--- a/bincompat-backward.whitelist.conf
+++ b/bincompat-backward.whitelist.conf
@@ -158,6 +158,38 @@ filter {
{
matchName="scala.reflect.internal.Names#NameOps.name"
problemName=MissingFieldProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.existentialTransform$default$3"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.existentialTransform"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.packSymbols"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.packSymbols$default$3"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.isRawParameter"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.Trees.substituteInfoParamsIntoDefDef"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ClassfileConstants.xxxunusedxxxx"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.Types#TypeVar.setInst"
+ problemName=IncompatibleResultTypeProblem
}
]
}
diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf
index 529fab1e14..88eabd4f8c 100644
--- a/bincompat-forward.whitelist.conf
+++ b/bincompat-forward.whitelist.conf
@@ -354,6 +354,82 @@ filter {
{
matchName="scala.reflect.internal.StdNames#TermNames.SelectFromTypeTree"
problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.existentialTransform$default$3"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.existentialTransform"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.packSymbols"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.packSymbols$default$3"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ExistentialsAndSkolems.isRawParameter"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.Trees.substituteInfoParamsIntoDefDef"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.SymbolTable.existentialTransform$default$3"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.SymbolTable.existentialTransform"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.SymbolTable.substituteInfoParamsIntoDefDef"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.SymbolTable.packSymbols"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.SymbolTable.packSymbols$default$3"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.SymbolTable.isRawParameter"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ClassfileConstants.CONSTANT_INVOKEDYNAMIC"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ClassfileConstants.invokedynamic"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ClassfileConstants.CONSTANT_METHODHANDLE"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ClassfileConstants.CONSTANT_METHODTYPE"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.Types#TypeVar.setInst"
+ problemName=IncompatibleResultTypeProblem
+ },
+ {
+ matchName="scala.reflect.internal.TreeInfo.effectivePatternArity"
+ problemName=MissingMethodProblem
+ },
+ {
+ matchName="scala.reflect.internal.ModifierFlags.DEFAULTMETHOD"
+ problemName=MissingMethodProblem
}
]
}
diff --git a/build.number b/build.number
index f28886751b..0b06d13468 100644
--- a/build.number
+++ b/build.number
@@ -1,7 +1,7 @@
#Tue Sep 11 19:21:09 CEST 2007
version.major=2
version.minor=10
-version.patch=1
+version.patch=2
# This is the -N part of a version. if it's 0, it's dropped from maven versions.
version.bnum=0
diff --git a/build.xml b/build.xml
index 01d867d8ca..f12b4e8dbe 100644
--- a/build.xml
+++ b/build.xml
@@ -5,359 +5,295 @@
SuperSabbus for Scala core, builds the scala library and compiler. It can also package it as a simple distribution, tests it for stable bootstrapping and against the Scala test suite.
</description>
-<!-- ===========================================================================
-END-USER TARGETS
-============================================================================ -->
-
- <target name="build" depends="pack.done"
- description="Builds the Scala compiler and library. Executables are in 'build/pack/bin'."/>
-
- <target name="build-opt"
- description="Builds the optimised Scala compiler and library. Executables are in 'build/pack/bin'.">
- <antcall target="build">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
-
- <target name="clean" depends="quick.clean"
- description="Removes binaries of compiler and library. Distributions are untouched."/>
+<!-- HINTS
- <target name="test" depends="test.done"
- description="Runs test suite and bootstrapping test on Scala compiler and library."/>
+ - for faster builds, have a build.properties in the same directory as build.xml that says:
+ locker.skip=1
+ starr.use.released=1
- <target name="test-opt"
- description="Runs test suite and bootstrapping test, everything is optimised (compiler, library, tests).">
- <antcall target="test">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+-->
- <target name="docs" depends="docs.done"
- description="Builds documentation for the Scala library. Scaladoc is in 'build/scaladoc/library'."/>
+<!-- USAGE FROM JENKINS SCRIPTS IS (CURRENTLY) AS FOLLOWS:
+ant $antArgs $scalacArgs $targets
- <target name="docscomp" depends="docs.comp"
- description="Builds documentation for the Scala compiler and library. Scaladoc is in 'build/scaladoc'."/>
+antArgs tend to be:
+ -Darchives.skipxz=true
+ -Dscalac.args.optimise=-optimise
- <target name="docsclean" depends="docs.clean"
- description="Removes generated documentation. Distributions are untouched."/>
+scalacArgs examples:
+ "-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\""
- <target name="dist"
- description="Makes a new distribution and tests it. Will remove existing binaries and documentation.">
- <antcall target="locker.clean"/>
- <antcall target="docs.clean"/>
- <antcall target="all.done"/>
- </target>
-
- <target name="dist-opt"
- description="Makes a new optimised distribution and tests it. Will remove existing binaries and documentation.">
- <antcall target="dist">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
-
-
- <target name="partialdist" depends="dist.partial"
- description="Makes a new distribution without documentation, so just for testing."/>
-
- <target name="partialdist-opt"
- description="Makes a new optimised distribution without testing it or removing partially build elements.">
- <antcall target="partialdist">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+targets exercised:
+ locker.done build-opt nightly test.suite test.continuations.suite test.scaladoc
+-->
+<!-- To use Zinc with the ant build:
+ - install zinc and symlink the installed zinc script to ${basedir}/tools/zinc (${basedir} is where build.xml and the rest of your checkout resides)
+ - make sure to set ZINC_OPTS to match ANT_OPTS!
+-->
- <target name="fastdist" depends="dist.done"
- description="Makes a new distribution without testing it or removing partially build elements."/>
+<!--
+TODO:
+ - detect zinc anywhere on PATH
+ - automatically set ZINC_OPTS
+ - skip locker (and test.stability) by default to speed up PR validation, still do full build & testing during nightly
+ - (rework the build to pack locker and build using that when using zinc)
+-->
- <target name="fastdist-opt"
- description="Makes a new optimised distribution without testing it or removing partially build elements.">
- <antcall target="fastdist">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
-
- <target name="distclean" depends="dist.clean"
- description="Removes all distributions. Binaries and documentation are untouched."/>
-
- <target name="replacestarr"
- description="Replaces the Starr compiler and library by fresh ones built from current sources and tests them.">
- <fail message="This target is not available on Windows. Use 'ant replacestarrwin' instead.">
- <condition>
- <os family="windows"/>
- </condition>
- </fail>
- <antcall target="locker.clean"/>
- <antcall target="pack.done"/>
- <antcall target="starr.done"/>
- <antcall target="locker.clean"/>
- <antcall target="test.done"/>
- </target>
- <target name="replacestarr-opt"
- description="Replaces the Starr compiler and library by fresh, optimised ones built from current sources and tests them.">
- <antcall target="replacestarr">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
-
- <!-- Ant on Windows is not able to delete jar files that are referenced in any <path>.
- See ticket 1290 on trac. -->
- <target name="replacestarrwin"
- description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
- <fail message="This target is only available on Windows. Use 'ant replacestarr' instead.">
- <condition>
- <not><os family="windows"/></not>
- </condition>
- </fail>
- <echo message="CAUTION: Make sure to execute 'ant locker.clean build' prior to calling 'replacestarrwin'."/>
- <antcall target="starr.done"/>
- <antcall target="locker.clean"/>
- <antcall target="test.done"/>
- </target>
-
- <target name="replacestarrwin-opt"
- description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
- <antcall target="replacestarrwin">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
- <target name="replacelocker"
- description="Replaces the Locker compiler and library by fresh ones built from current sources.">
- <antcall target="palo.clean"/>
- <antcall target="unlocklocker"/>
- </target>
-
- <target name="replacelocker-opt"
- description="Replaces the Locker compiler and library by fresh, optimised ones built from current sources.">
- <antcall target="replacelocker">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
-
- <target name="unlocklocker"
- description="Unlocks Locker, allowing its compiler and library to be rebuilt">
- <antcall target="locker.unlock"/>
- <antcall target="palo.done"/>
- </target>
+<!-- ===========================================================================
+ END-USER TARGETS
+============================================================================ -->
+ <target name="build" depends="pack.done" description="Builds the Scala compiler and library. Executables are in 'build/pack/bin'."/>
+ <target name="test" depends="test.done" description="Runs test suite and bootstrapping test on Scala compiler and library."/>
+ <target name="docs" depends="docs.done" description="Builds documentation for the Scala library. Scaladoc is in 'build/scaladoc/library'."/>
+ <target name="docscomp" depends="docs.comp" description="Builds documentation for the Scala compiler and library. Scaladoc is in 'build/scaladoc'."/>
+ <target name="dist" depends="all.clean, all.done" description="Cleans all and builds and tests a new distribution."/>
+ <target name="partialdist" depends="dist.partial" description="Makes a new distribution without documentation, so just for testing."/>
+ <target name="fastdist" depends="dist.done" description="Makes a new distribution without testing it or removing partially build elements."/>
+
+ <target name="build-opt" description="Optimized version of build."> <optimized name="build"/></target>
+ <target name="test-opt" description="Optimized version of test."> <optimized name="test"/></target>
+ <target name="dist-opt" description="Optimized version of dist."> <optimized name="dist"/></target>
+ <target name="partialdist-opt" description="Optimized version of partialdist."> <optimized name="partialdist"/></target>
+ <target name="fastdist-opt" description="Optimized version of fastdist."> <optimized name="fastdist"/></target>
+
+ <!-- packaging -->
+ <target name="distpack" depends="dist.done, docs.done">
+ <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/></target>
+
+ <target name="distpack-maven" depends="dist.done, docs.done">
+ <ant antfile="${src.dir}/build/pack.xml" target="pack-maven.done" inheritall="yes" inheritrefs="yes"/></target>
+
+ <target name="distpack-opt" description="Builds an optimised distribution."> <optimized name="distpack"/></target>
+ <target name="distpack-maven-opt" description="Builds an optimised maven distribution."><optimized name="distpack-maven"/></target>
- <target name="fastlocker.lib"
- description="Buildlocker without extra fuss">
- <property name="fastlocker" value="true"/>
- <antcall target="locker.unlock"/>
- <antcall target="locker.lib"/>
- </target>
+ <target name="all.done" depends="dist.done, test.done"/>
- <target name="fastlocker.reflect"
- description="Buildlocker without extra fuss">
- <property name="fastlocker" value="true"/>
- <antcall target="locker.unlock"/>
- <antcall target="locker.reflect"/>
- </target>
+ <!-- must use depends for all.done, not antcall: need the properties defined in there (dist.dir) -->
+ <target name="nightly-nopt" depends="all.done, docs.done">
+ <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/></target>
+ <target name="nightly"><optimized name="nightly-nopt"/></target>
- <target name="fastlocker.comp"
- description="Buildlocker without extra fuss">
- <property name="fastlocker" value="true"/>
- <antcall target="locker.unlock"/>
- <antcall target="locker.comp"/>
- </target>
+ <target name="nightly.checkall">
+ <antcall target="nightly-nopt"> <param name="partest.scalac_opts" value="-Ycheck:all"/></antcall></target>
- <target name="fastlocker"
- description="Buildlocker without extra fuss">
- <antcall target="fastlocker.comp"/>
- </target>
+ <target name="clean" depends="quick.clean" description="Removes binaries of compiler and library. Locker and distributions are untouched."/>
+ <target name="docsclean" depends="docs.clean" description="Removes generated documentation. Distributions are untouched."/>
+ <target name="distclean" depends="dist.clean" description="Removes all distributions. Binaries and documentation are untouched."/>
- <target name="buildlocker"
- description="Does the same for locker as build does for quick">
- <antcall target="locker.unlock"/>
- <antcall target="palo.bin"/>
- </target>
+ <macrodef name="optimized" >
+ <attribute name="name"/>
+ <sequential>
+ <antcall target="@{name}"><param name="scalac.args.optimise" value="-optimise"/></antcall>
+ </sequential>
+ </macrodef>
<!-- ===========================================================================
-PROPERTIES
+ PROPERTIES
============================================================================ -->
<property environment="env"/>
<!-- Prevents system classpath from being used -->
- <property name="build.sysclasspath" value="ignore"/>
+ <property name="build.sysclasspath" value="ignore"/>
<!-- Defines the repository layout -->
- <property name="docs.dir" value="${basedir}/docs"/>
- <property name="lib.dir" value="${basedir}/lib"/>
- <property name="lib-ant.dir" value="${lib.dir}/ant"/>
- <property name="src.dir" value="${basedir}/src"/>
- <property name="partest.dir" value="${basedir}/test"/>
+ <property name="docs.dir" value="${basedir}/docs"/>
+ <property name="lib.dir" value="${basedir}/lib"/>
+ <property name="src.dir" value="${basedir}/src"/>
+ <property name="partest.dir" value="${basedir}/test"/>
+ <property name="lib-ant.dir" value="${lib.dir}/ant"/>
<!-- For developers: any jars placed in this dir will be added to the classpath
of all targets and copied into quick/pack/etc builds. -->
- <property name="lib-extra.dir" value="${lib.dir}/extra"/>
+ <property name="lib-extra.dir" value="${lib.dir}/extra"/>
<!-- Loads custom properties definitions -->
<property file="${basedir}/build.properties"/>
+
<!-- Generating version number -->
<property file="${basedir}/build.number"/>
- <!-- Additional command line arguments for scalac. They are added to all build targets -->
- <property name="scalac.args" value=""/>
- <property name="javac.args" value=""/>
+ <!-- read starr.version -->
+ <property file="${basedir}/starr.number"/>
<!-- Sets location of pre-compiled libraries -->
- <property name="lib.starr.jar" value="${lib.dir}/scala-library.jar"/>
- <property name="msil.starr.jar" value="${lib.dir}/msil.jar"/>
- <property name="reflect.starr.jar" value="${lib.dir}/scala-reflect.jar"/>
- <property name="comp.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
- <property name="jline.jar" value="${lib.dir}/jline.jar"/>
- <property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
- <property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
+ <property name="library.starr.jar" value="${lib.dir}/scala-library.jar"/>
+ <property name="reflect.starr.jar" value="${lib.dir}/scala-reflect.jar"/>
+ <property name="compiler.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
+ <property name="msil.starr.jar" value="${lib.dir}/msil.jar"/>
+ <property name="jline.jar" value="${lib.dir}/jline.jar"/>
+ <property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
+ <property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
<!-- Sets location of build folders -->
- <property name="build.dir" value="${basedir}/build"/>
- <property name="build-asm.dir" value="${build.dir}/asm"/>
- <property name="build-locker.dir" value="${build.dir}/locker"/>
- <property name="build-palo.dir" value="${build.dir}/palo"/>
- <property name="build-quick.dir" value="${build.dir}/quick"/>
- <property name="build-pack.dir" value="${build.dir}/pack"/>
- <property name="build-osgi.dir" value="${build.dir}/osgi"/>
- <property name="build-strap.dir" value="${build.dir}/strap"/>
- <property name="build-docs.dir" value="${build.dir}/scaladoc"/>
- <property name="build-libs.dir" value="${build.dir}/libs"/>
- <property name="build-sbt.dir" value="${build.dir}/sbt-interface"/>
-
-
- <property name="dists.dir" value="${basedir}/dists"/>
-
- <property name="copyright.string" value="Copyright 2002-2013, LAMP/EPFL"/>
- <property name="partest.version.number" value="0.9.2"/>
+ <property name="build.dir" value="${basedir}/build"/>
+ <property name="build-libs.dir" value="${build.dir}/libs"/>
+ <property name="build-asm.dir" value="${build.dir}/asm"/>
+ <property name="build-fjbg.dir" value="${build-libs.dir}"/>
+ <property name="build-forkjoin.dir" value="${build-libs.dir}"/>
+ <property name="build-locker.dir" value="${build.dir}/locker"/>
+ <property name="build-palo.dir" value="${build.dir}/palo"/>
+ <property name="build-quick.dir" value="${build.dir}/quick"/>
+ <property name="build-pack.dir" value="${build.dir}/pack"/>
+ <property name="build-osgi.dir" value="${build.dir}/osgi"/>
+ <property name="build-strap.dir" value="${build.dir}/strap"/>
+ <property name="build-docs.dir" value="${build.dir}/scaladoc"/>
+ <property name="build-sbt.dir" value="${build.dir}/sbt-interface"/>
+
+ <property name="test.osgi.src" value="${partest.dir}/osgi/src"/>
+ <property name="test.osgi.classes" value="${build-osgi.dir}/classes"/>
+
+ <property name="dists.dir" value="${basedir}/dists"/>
+
+ <property name="copyright.string" value="Copyright 2002-2013, LAMP/EPFL"/>
+ <property name="partest.version.number" value="0.9.2"/>
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
into the script runners created with scala.tools.ant.ScalaTool -->
- <property name="java.flags" value="-Xmx256M -Xms32M"/>
- <property name="jvm.opts" value=""/>
+ <property name="java.flags" value="-Xmx256M -Xms32M"/>
+ <property name="jvm.opts" value=""/>
<!-- if ANT_OPTS is already set by the environment, it will be unaltered,
but if it is unset it will take this default value. -->
- <property name="env.ANT_OPTS" value="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=192M -XX:+UseParallelGC" />
+ <property name="env.ANT_OPTS" value="-Xms1536M -Xmx1536M -Xss1M -XX:MaxPermSize=192M -XX:+UseParallelGC" />
- <property
- name="scalacfork.jvmargs"
- value="${env.ANT_OPTS} ${jvm.opts}"/>
+ <property name="scalacfork.jvmargs" value="${env.ANT_OPTS} ${jvm.opts}"/>
<!-- ===========================================================================
-INITIALISATION
+ INITIALIZATION
============================================================================ -->
+ <target name="desired.jars.uptodate">
+ <patternset id="desired.jars">
+ <include name="lib/**/*.desired.sha1"/>
+ <include name="test/files/**/*.desired.sha1"/>
+ <include name="tools/**/*.desired.sha1"/>
+ </patternset>
- <target name="init.jars.check">
<uptodate property="lib.jars.uptodate">
- <srcfiles dir="${basedir}">
- <include name="lib/**/*.desired.sha1"/>
- <include name="test/files/**/*.desired.sha1"/>
- <include name="tools/**/*.desired.sha1"/>
- </srcfiles>
+ <srcfiles dir="${basedir}"><patternset refid="desired.jars"/></srcfiles>
<mapper type="glob" from="*.desired.sha1" to="*"/>
</uptodate>
</target>
- <target name="init.jars" depends="init.jars.check" unless="lib.jars.uptodate">
+ <target name="boot" depends="desired.jars.uptodate" unless="lib.jars.uptodate">
<echo level="warn" message="Updating bootstrap libs. (To do this by hand, run ./pull-binary-libs.sh)"/>
<exec osfamily="unix" vmlauncher="false" executable="./pull-binary-libs.sh" failifexecutionfails="true" />
<exec osfamily="windows" vmlauncher="false" executable="pull-binary-libs.sh" failifexecutionfails="true" />
<!-- uptodate task needs to know these are what's in the sha. -->
<touch>
- <fileset dir="${basedir}">
- <include name="lib/**/*.desired.sha1"/>
- <include name="test/files/**/*.desired.sha1"/>
- <include name="tools/**/*.desired.sha1"/>
- </fileset>
+ <fileset dir="${basedir}"><patternset refid="desired.jars"/></fileset>
<mapper type="glob" from="*.desired.sha1" to="*"/>
</touch>
</target>
- <!-- Add our maven ant tasks -->
- <target name="init.maven.tasks" depends="init.jars.check" unless="init.maven.tasks.finished">
- <path id="maven-ant-tasks.classpath" path="${lib.dir}/ant/maven-ant-tasks-2.1.1.jar" />
- <typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
-
- <property name="init.maven.tasks.finished" value="true" />
- </target>
-
- <target name="init.extra.tasks" depends="init.maven.tasks" unless="init.extra.tasks.finished">
- <artifact:dependencies pathId="extra.tasks.classpath" filesetId="extra.tasks.fileset">
- <dependency groupId="biz.aQute" artifactId="bnd" version="1.50.0"/>
- </artifact:dependencies>
- <!-- Pax runner -->
- <property name="pax.exam.version" value="2.5.0"/>
- <artifact:dependencies pathId="pax.exam.classpath" filesetId="pax.exam.fileset">
- <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-container-native" version="${pax.exam.version}"/>
- <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-junit4" version="${pax.exam.version}"/>
- <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-link-assembly" version="${pax.exam.version}"/>
- <dependency groupId="org.ops4j.pax.url" artifactId="pax-url-aether" version="1.4.0"/>
- <dependency groupId="org.ops4j.pax.swissbox" artifactId="pax-swissbox-framework" version="1.5.1"/>
- <dependency groupId="ch.qos.logback" artifactId="logback-core" version="0.9.20"/>
- <dependency groupId="ch.qos.logback" artifactId="logback-classic" version="0.9.20"/>
- <dependency groupId="junit" artifactId="junit" version="4.10"/>
- <dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="3.2.2"/>
- </artifact:dependencies>
- <!-- BND support -->
- <typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
- <property name="init.maven.tasks.finished" value="true" />
- </target>
-
- <!-- Resolve maven dependencies -->
- <target name="init.maven.jars" depends="init.maven.tasks">
- <!-- This target has an issue where if the user directory does not exist, we BOMB. ugh. -->
- <mkdir dir="${user.home}/.m2/repository"/>
- <artifact:dependencies pathId="dependency.classpath" filesetId="dependency.fileset">
- <!--<dependency groupId="com.typesafe" artifactId="config" version="0.4.0"/>-->
- </artifact:dependencies>
- </target>
-
- <!-- Determines OSGi string + maven extension. -->
- <target name="init.hasbuildnum">
- <condition property="version.hasbuildnum">
- <not><equals arg1="${version.bnum}" arg2="0"/></not>
- </condition>
- </target>
- <target name="init.build.snapshot" unless="build.release">
- <property name="maven.version.suffix" value="-SNAPSHOT"/>
- </target>
- <target name="init.build.release" if="build.release" depends="init.hasbuildnum, init.build.snapshot">
- <property name="maven.version.suffix" value=""/>
- </target>
- <target name="init.build.nopatch.release" unless="version.hasbuildnum" depends="init.hasbuildnum">
- <property name="version.suffix" value=""/>
- </target>
- <!-- funny thing, ant is. Can only specify *one* property in if check. Guaranteed that both are true here,
- since properties are immutable. -->
- <target name="init.build.patch.release" if="version.hasbuildnum" depends="init.build.nopatch.release">
- <property name="version.suffix" value="-${version.bnum}"/>
- </target>
-
- <target name="init.hasmavensuffix" if="build.release" depends="init.build.patch.release, init.build.release">
- <condition property="version.hasmavensuffix">
- <not><equals arg1="${maven.version.suffix}" arg2=""/></not>
- </condition>
- </target>
-
- <target name="init.osgi.suffix" if="version.hasmavensuffix" depends="init.hasmavensuffix">
- <property name="osgi.version.suffix" value="${maven.version.suffix}"/>
- </target>
-
- <target name="init.osgi.suffix.final" if="build.release" unless="version.hasmavensuffix" depends="init.hasmavensuffix">
- <property name="osgi.version.suffix" value="-VFINAL"/>
- </target>
+ <target name="init" depends="boot">
+ <!-- Set up Ant contrib tasks so we can use <if><then><else> instead of the clunky `unless` attribute -->
+ <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib-ant.dir}/ant-contrib.jar"/>
+ <!-- Add our maven ant tasks -->
+ <path id="maven-ant-tasks.classpath" path="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar" />
+ <typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
- <target name="init.osgi.suffix.snapshot" unless="build.release" depends="init.hasmavensuffix">
- <property name="osgi.version.suffix" value=""/>
- </target>
+ <!-- Resolve maven dependencies -->
+
+ <!-- work around http://jira.codehaus.org/browse/MANTTASKS-203:
+ java.lang.ClassCastException: org.codehaus.plexus.DefaultPlexusContainer cannot be cast to org.codehaus.plexus.PlexusContainer
+ on repeated use of artifact:dependencies
+ -->
+ <if><not><isset property="maven-deps-done"></isset></not><then>
+ <mkdir dir="${user.home}/.m2/repository"/>
+ <!-- This task has an issue where if the user directory does not exist, so we create it above. UGH. -->
+ <artifact:dependencies pathId="extra.tasks.classpath" filesetId="extra.tasks.fileset">
+ <dependency groupId="biz.aQute" artifactId="bnd" version="1.50.0"/>
+ </artifact:dependencies>
+
+ <!-- Pax runner -->
+ <property name="pax.exam.version" value="2.5.0"/>
+ <artifact:dependencies pathId="pax.exam.classpath" filesetId="pax.exam.fileset">
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-container-native" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-junit4" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.exam" artifactId="pax-exam-link-assembly" version="${pax.exam.version}"/>
+ <dependency groupId="org.ops4j.pax.url" artifactId="pax-url-aether" version="1.4.0"/>
+ <dependency groupId="org.ops4j.pax.swissbox" artifactId="pax-swissbox-framework" version="1.5.1"/>
+ <dependency groupId="ch.qos.logback" artifactId="logback-core" version="0.9.20"/>
+ <dependency groupId="ch.qos.logback" artifactId="logback-classic" version="0.9.20"/>
+ <dependency groupId="junit" artifactId="junit" version="4.10"/>
+ <dependency groupId="org.apache.felix" artifactId="org.apache.felix.framework" version="3.2.2"/>
+ </artifact:dependencies>
+
+ <artifact:dependencies pathId="partest.extras.classpath" filesetId="partest.extras.fileset" versionsId="partest.extras.versions">
+ <dependency groupId="com.googlecode.java-diff-utils" artifactId="diffutils" version="1.3.0"/>
+ </artifact:dependencies>
+
+ <!-- BND support -->
+ <typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
+
+ <!-- Download STARR via maven if `starr.use.released` is set,
+ and `starr.version` is specified (see the starr.number properties file).
+ Want to slow down STARR changes, using only released versions. -->
+ <if><isset property="starr.use.released"/><then>
+ <echo message="Using Scala ${starr.version} for STARR."/>
+ <artifact:dependencies pathId="starr.core.path">
+ <dependency groupId="org.scala-lang" artifactId="scala-library" version="${starr.version}"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${starr.version}"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-compiler" version="${starr.version}"/>
+ </artifact:dependencies></then>
+ <else>
+ <path id="starr.core.path">
+ <pathelement location="${library.starr.jar}"/>
+ <pathelement location="${reflect.starr.jar}"/>
+ <pathelement location="${compiler.starr.jar}"/>
+ <pathelement location="${msil.starr.jar}"/>
+ </path></else>
+ </if>
+
+ <property name="maven-deps-done" value="yep!"/>
+ </then></if>
+
+ <!-- NOTE: ant properties are write-once: second writes are silently discarded; the logic below relies on this -->
+
+ <!-- Compute defaults (i.e., if not specified on command-line) for OSGi/maven version suffixes.
+ Try to establish the invariant (verified below):
+ `version.suffix == maven.version.suffix == osgi.version.suffix`,
+ except for:
+ - snapshot builds, where:
+ - `maven.suffix == "-SNAPSHOT"`
+ - `version.suffix == osgi.version.suffix == ""`
+ - final builds, where:
+ - `osgi.suffix == "-VFINAL"`
+ - `version.suffix == maven.version.suffix == ""`
+ -->
+ <if><not><equals arg1="${version.bnum}" arg2="0"/></not><then>
+ <property name="version.suffix" value="-${version.bnum}"/>
+ </then></if>
+
+ <if><or><not><isset property="version.suffix"/></not><equals arg1="${version.suffix}" arg2=""/></or><then>
+ <if><isset property="build.release"/><then>
+ <property name="maven.version.suffix" value=""/>
+ <property name="version.suffix" value="${maven.version.suffix}"/>
+ <if><equals arg1="${maven.version.suffix}" arg2=""/><then>
+ <property name="osgi.version.suffix" value="-VFINAL"/></then>
+ <else>
+ <property name="osgi.version.suffix" value="${maven.version.suffix}"/></else></if></then></if></then>
+ <else> <!-- version.suffix set and not empty -->
+ <property name="maven.version.suffix" value="${version.suffix}"/>
+ <property name="osgi.version.suffix" value="${version.suffix}"/></else></if>
+
+ <!-- if a maven version suffix was set (or inferred), assume we're building a release -->
+ <if><isset property="maven.version.suffix"/><then>
+ <property name="build.release" value="1"/></then></if>
+
+ <!-- not building a release and no version.suffix specified -->
+ <property name="maven.version.suffix" value="-SNAPSHOT"/>
+
+ <if><equals arg1="${maven.version.suffix}" arg2="-SNAPSHOT"/><then>
+ <property name="osgi.version.suffix" value=""/>
+ <property name="version.suffix" value=""/></then>
+ <else>
+ <property name="osgi.version.suffix" value="${maven.version.suffix}"/>
+ <property name="version.suffix" value="${maven.version.suffix}"/></else></if>
- <target name="init.build.suffix.done" depends="init.build.release, init.build.patch.release, init.osgi.suffix, init.osgi.suffix.final, init.osgi.suffix.snapshot"/>
-
- <target name="init.version.git" depends="init.build.suffix.done">
- <!-- Find out whether we are running on Windows -->
- <condition property="os.win">
- <os family="windows"/>
- </condition>
<exec osfamily="unix" executable="tools/get-scala-commit-sha" outputproperty="git.commit.sha" failifexecutionfails="false" />
<exec osfamily="windows" executable="cmd.exe" outputproperty="git.commit.sha" failifexecutionfails="false">
@@ -371,71 +307,68 @@ INITIALISATION
<arg value="tools\get-scala-commit-date.bat"/>
<arg value="-p"/>
</exec>
+
<!-- some default in case something went wrong getting the revision -->
- <property name="git.commit.sha" value="unknown"/>
- <property name="git.commit.date" value="unknown"/>
+ <property name="git.commit.sha" value="unknown"/>
+ <property name="git.commit.date" value="unknown"/>
<!-- We use the git describe to determine the OSGi modifier for our build. -->
- <property
- name="maven.version.number"
- value="${version.major}.${version.minor}.${version.patch}${version.suffix}${maven.version.suffix}"/>
- <property
- name="osgi.version.number"
- value="${version.major}.${version.minor}.${version.patch}.v${git.commit.date}${version.suffix}${osgi.version.suffix}-${git.commit.sha}"/>
- </target>
+ <property name="maven.version.number"
+ value="${version.major}.${version.minor}.${version.patch}${maven.version.suffix}"/>
+ <property name="osgi.version.number"
+ value="${version.major}.${version.minor}.${version.patch}.v${git.commit.date}${osgi.version.suffix}-${git.commit.sha}"/>
+
+ <if><isset property="build.release"/><then>
+ <property name="version.number" value="${maven.version.number}"/>
+ </then><else>
+ <property name="version.number" value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
+ </else></if>
+
+ <condition property="has.java6">
+ <equals arg1="${ant.java.version}" arg2="1.6"/>
+ </condition>
+ <condition property="has.java7">
+ <equals arg1="${ant.java.version}" arg2="1.7"/>
+ </condition>
+ <condition property="has.unsupported.jdk">
+ <not><or>
+ <isset property="has.java7" />
+ <isset property="has.java6" />
+ </or></not>
+ </condition>
- <target name="init.version.snapshot" unless="build.release" depends="init.version.git">
- <property
- name="version.number"
- value="${version.major}.${version.minor}.${version.patch}${version.suffix}-${git.commit.date}-${git.commit.sha}"/>
- </target>
+ <fail if="has.unsupported.jdk" message="JDK ${ant.java.version} is not supported by this build!"/>
- <target name="init.version.release" if="build.release" depends="init.version.git">
- <property
- name="version.number"
- value="${maven.version.number}"/>
- </target>
+ <if><isset property="has.java7"/><then>
+ <echo level="warning"> You are using JDK7 for this build.
+ While this will be able to build most of Scala, it will not build the Swing project.
+ You will be unable to create a distribution.
+ </echo>
+ </then></if>
- <target name="init.version.done" depends="init.version.release, init.version.snapshot"/>
+ <!-- Allow this to be overridden simply -->
+ <property name="sbt.latest.version" value="0.12.2"/>
- <target name="init.testjava6">
- <condition property="has.java6">
- <equals arg1="${ant.java.version}" arg2="1.6"/>
- </condition>
- <condition property="has.java7">
- <equals arg1="${ant.java.version}" arg2="1.7"/>
- </condition>
- <condition property="has.unsupported.jdk">
- <not><or>
- <isset property="has.java7" />
- <isset property="has.java6" />
- </or></not>
- </condition>
- </target>
+ <property name="sbt.src.dir" value="${build-sbt.dir}/${sbt.latest.version}/src"/>
+ <property name="sbt.lib.dir" value="${build-sbt.dir}/${sbt.latest.version}/lib"/>
+
+ <property name="sbt.interface.jar" value="${sbt.lib.dir}/interface.jar"/>
+ <property name="sbt.interface.url" value="http://typesafe.artifactoryonline.com/typesafe/ivy-releases/org.scala-sbt/interface/${sbt.latest.version}/jars/interface.jar"/>
+ <property name="sbt.interface.src.jar" value="${sbt.src.dir}/compiler-interface-src.jar"/>
+ <property name="sbt.interface.src.url" value="http://typesafe.artifactoryonline.com/typesafe/ivy-releases/org.scala-sbt/compiler-interface/${sbt.latest.version}/jars/compiler-interface-src.jar"/>
- <target name="init.fail.bad.jdk" depends="init.testjava6">
- <fail if="has.unsupported.jdk"
- message="JDK ${ant.java.version} is not supported by this build!"/>
- </target>
- <target name="init.warn.jdk7" depends="init.testjava6" if="has.java7">
- <echo level="warning"> You are using JDK7 for this build. While this will be able to build most of Scala, it will
- not build the Swing project. You will be unable to create a distribution.
- </echo>
- </target>
- <target name="init" depends="init.jars, init.maven.jars, init.version.done, init.fail.bad.jdk, init.warn.jdk7, init.extra.tasks">
- <property name="scalac.args.always" value="-Yreify-copypaste" />
- <!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
- <property name="scalac.args.optimise" value=""/>
- <!-- scalac.args.quickonly are added to quick.* targets but not others (particularly, locker.)
- This is to facilitate testing new command line options which do not yet exist in starr.
- Note: These are also used by strap since they may affect JVM target or other and we'd like
- to be able to test binary stability of features. -->
- <property name="scalac.args.quickonly" value=""/>
- <property name="scalac.args.all" value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
- <property name="scalac.args.quick" value="${scalac.args.all} ${scalac.args.quickonly}"/>
- <!-- Setting-up Ant contrib tasks -->
- <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="${lib.dir}/ant/ant-contrib.jar"/>
+ <!-- Additional command line arguments for scalac. They are added to all build targets -->
+ <property name="scalac.args" value=""/>
+ <property name="javac.args" value=""/>
+
+ <property name="scalac.args.always" value="" />
+ <property name="scalac.args.optimise" value=""/> <!-- scalac.args.optimise is selectively overridden in certain antcall tasks. -->
+ <property name="scalac.args.all" value="${scalac.args.always} ${scalac.args} ${scalac.args.optimise}"/>
+ <property name="scalac.args.locker" value="${scalac.args.all}"/>
+ <property name="scalac.args.quick" value="${scalac.args.all}"/>
+ <property name="scalac.args.strap" value="${scalac.args.quick}"/>
+
<!-- This is the start time for the distribution -->
<tstamp prefix="time">
<format property="human" pattern="d MMMM yyyy, HH:mm:ss" locale="en,US"/>
@@ -443,14 +376,15 @@ INITIALISATION
</tstamp>
<!-- some default in case something went wrong getting the revision -->
- <property name="version.number" value="-unknown-"/>
- <property name="init.avail" value="yes"/>
-
+ <property name="version.number" value="-unknown-"/>
+ <property name="init.avail" value="yes"/>
<!-- Local libs (developer use.) -->
<mkdir dir="${lib-extra.dir}"/>
- <path id="lib.extra">
+ <!-- Auxiliary libs placed on every classpath. -->
+ <path id="aux.libs">
+ <pathelement location="${ant.jar}"/>
<!-- needs ant 1.7.1 -->
<!-- <fileset dir="${lib-extra.dir}" erroronmissingdir="false"> -->
<fileset dir="${lib-extra.dir}">
@@ -458,21 +392,6 @@ INITIALISATION
</fileset>
</path>
- <!-- Libraries only used for STARR -->
- <path id="starr.dep.libs">
- <fileset dir="${lib.dir}">
- <include name="fjbg.jar"/>
- <include name="forkjoin.jar"/>
- <include name="msil.jar"/>
- </fileset>
- </path>
- <!-- Auxiliary libs placed on every classpath. -->
- <path id="aux.libs">
- <pathelement location="${ant.jar}"/>
- <path refid="lib.extra"/>
- <path refid="dependency.classpath"/>
- </path>
-
<!-- And print-out what we are building -->
<echo message=" build time: ${time.human}" />
<echo message=" java version: ${java.vm.name} ${java.version} (${ant.java.version})" />
@@ -486,1195 +405,900 @@ INITIALISATION
<echo message=" OSGi version: ${osgi.version.number}" />
<echo message="canonical version: ${version.number}" />
- <!-- Define tasks that can be run with Starr -->
- <path id="starr.classpath">
- <pathelement location="${lib.starr.jar}"/>
- <pathelement location="${reflect.starr.jar}"/>
- <pathelement location="${comp.starr.jar}"/>
+ <!-- validate version suffixes -->
+ <if><equals arg1="${maven.version.suffix}" arg2="-SNAPSHOT"/><then>
+ <condition property="version.suffixes.consistent"><and>
+ <equals arg1="${osgi.version.suffix}" arg2=""/>
+ <equals arg1="${version.suffix}" arg2=""/>
+ </and></condition></then>
+ <else>
+ <if><equals arg1="${osgi.version.suffix}" arg2="-VFINAL"/><then>
+ <condition property="version.suffixes.consistent"><and>
+ <equals arg1="${maven.version.suffix}" arg2=""/>
+ <equals arg1="${version.suffix}" arg2=""/>
+ </and></condition></then>
+ <else>
+ <condition property="version.suffixes.consistent"><and>
+ <equals arg1="${osgi.version.suffix}" arg2="${maven.version.suffix}"/>
+ <equals arg1="${version.suffix}" arg2="${maven.version.suffix}"/>
+ </and></condition></else></if></else></if>
+
+ <!-- <echo message=" maven suffix: ${maven.version.suffix}"/>
+ <echo message=" OSGi suffix: ${osgi.version.suffix}" />
+ <echo message="canonical suffix: ${version.suffix}" /> -->
+ <fail unless="version.suffixes.consistent" message="Version suffixes inconsistent!"/>
+
+ <path id="forkjoin.classpath" path="${build-libs.dir}/classes/forkjoin"/>
+ <path id="fjbg.classpath" path="${build-libs.dir}/classes/fjbg"/>
+ <path id="asm.classpath" path="${build-asm.dir}/classes"/>
+ <property name="forkjoin-classes" refid="forkjoin.classpath"/>
+ <property name="fjbg-classes" refid="fjbg.classpath"/>
+ <property name="asm-classes" refid="asm.classpath"/>
+
+ <!-- Compilers to use for the various stages.
+
+ There must be a variable of the shape @{stage}.compiler.path for all @{stage} in starr, locker, quick, strap.
+ -->
+ <path id="starr.compiler.path">
+ <path refid="starr.core.path"/>
+ <pathelement location="${lib.dir}/forkjoin.jar"/>
<path refid="aux.libs"/>
- <path refid="starr.dep.libs"/>
</path>
- <!-- What to have on the compilation path when compiling during certain phases -->
- <path id="quick.compilation.path">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-libs.dir}/classes/forkjoin"/>
- <path refid="lib.extra"/>
- </path>
- <path id="strap.compilation.path">
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-libs.dir}/classes/forkjoin"/>
- <path refid="lib.extra"/>
- </path>
- <taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.classpath"/>
- </target>
-<!-- ===========================================================================
-LOCAL DEPENDENCY (Adapted ASM)
-============================================================================ -->
+ <!-- To skip locker, use -Dlocker.skip=1 -->
+ <if><isset property="locker.skip"/><then>
+ <echo message="Using STARR to build the quick stage (skipping locker)."/>
+ <path id="locker.compiler.path" refid="starr.compiler.path"/>
+ <property name="locker.locked" value="locker skipped"/></then>
+ <else>
+ <path id="locker.compiler.path"><path refid="locker.compiler.build.path"/></path></else></if>
- <target name="asm.start" depends="init">
- <condition property="asm.available">
- <available file="${build-asm.dir}/asm.complete"/>
- </condition>
- </target>
+ <!-- compilerpathref for compiling with quick -->
+ <path id="quick.compiler.path"> <path refid="quick.compiler.build.path"/></path>
- <target name="asm.lib" depends="asm.start" unless="asm.available">
- <stopwatch name="asm.lib.timer"/>
- <mkdir dir="${build-asm.dir}/classes/"/>
- <javac
- srcdir="${src.dir}/asm"
- destdir="${build-asm.dir}/classes"
- classpath="${build-asm.dir}/classes"
- includes="**/*.java"
- target="1.6" source="1.5">
- <compilerarg line="${javac.args} -XDignore.symbol.file"/>
- </javac>
- <touch file="${build-asm.dir}/asm.complete" verbose="no"/>
- <stopwatch name="asm.lib.timer" action="total"/>
- </target>
+ <!-- What to have on the compilation path when compiling during certain phases.
+
+ There must be a variable of the shape @{stage}.@{project}.build.path
+ for all @{stage} in locker, quick, strap
+ and all @{project} in library, reflect, compiler
+ when stage is quick, @{project} also includes: actors, swing, plugins, scalacheck, partest, scalap
+ -->
- <target name="asm.done" depends="asm.lib">
- <path id="asm.classpath">
- <pathelement location="${build-asm.dir}/classes/"/>
+ <!-- LOCKER -->
+ <path id="locker.library.build.path">
+ <pathelement location="${build-locker.dir}/classes/library"/>
+ <path refid="forkjoin.classpath"/>
+ <path refid="aux.libs"/>
</path>
- </target>
- <target name="asm.clean" depends="init">
- <delete dir="${build-asm.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
+ <path id="locker.actors.build.path">
+ <path refid="locker.library.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/actors"/>
+ </path>
-<!-- ===========================================================================
-LOCAL DEPENDENCY (FORKJOIN)
-============================================================================ -->
- <target name="forkjoin.start" depends="init">
- <uptodate property="forkjoin.available" targetfile="${build-libs.dir}/forkjoin.complete">
- <srcfiles dir="${src.dir}/forkjoin">
- <include name="**/*.java"/>
- <include name="**/*.scala"/>
- </srcfiles>
- </uptodate>
- </target>
+ <path id="locker.msil.build.path">
+ <path refid="locker.compiler.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/msil"/>
+ </path>
- <target name="forkjoin.lib" depends="forkjoin.start" unless="forkjoin.available">
- <stopwatch name="forkjoin.lib.timer"/>
- <mkdir dir="${build-libs.dir}/classes/forkjoin"/>
- <javac
- fork="yes"
- compiler="javac1.6"
- srcdir="${src.dir}/forkjoin"
- destdir="${build-libs.dir}/classes/forkjoin"
- classpath="${build-libs.dir}/classes/forkjoin"
- includes="**/*.java"
- debug="true"
- target="1.6" source="1.5">
- <compilerarg line="${javac.args} -XDignore.symbol.file"/>
- </javac>
- <touch file="${build-libs.dir}/forkjoin.complete" verbose="no"/>
- <stopwatch name="forkjoin.lib.timer" action="total"/>
- </target>
+ <path id="locker.reflect.build.path">
+ <path refid="locker.library.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/reflect"/>
+ </path>
- <target name="forkjoin.pack" depends="forkjoin.lib">
- <jar destfile="${build-libs.dir}/forkjoin.jar">
- <fileset dir="${build-libs.dir}/classes/forkjoin"/>
- </jar>
- </target>
+ <path id="locker.compiler.build.path">
+ <path refid="locker.reflect.build.path"/>
+ <pathelement location="${build-locker.dir}/classes/compiler"/>
+ <pathelement location="${build-locker.dir}/classes/msil"/>
+ <path refid="asm.classpath"/>
+ <path refid="fjbg.classpath"/>
+ <pathelement location="${jline.jar}"/>
+ </path>
+
+ <path id="locker.msil.build.path" refid="locker.compiler.build.path"/>
- <target name="forkjoin.done" depends="forkjoin.pack">
- <!-- TODO - jar or classfiles? -->
- <path id="forkjoin.classpath">
- <pathelement location="${build-libs.dir}/classes/forkjoin"/>
+ <!-- QUICK -->
+ <path id="quick.library.build.path">
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <path refid="forkjoin.classpath"/>
+ <path refid="aux.libs"/>
</path>
- </target>
- <target name="forkjoin.clean" depends="init">
- <delete dir="${build-libs.dir}/classes/forkjoin" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- <delete file="${build-libs.dir}/forkjoin.complete" quiet="yes" failonerror="no"/>
- </target>
+ <path id="quick.actors.build.path">
+ <path refid="quick.library.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/actors"/>
+ </path>
-<!-- ===========================================================================
-LOCAL DEPENDENCY (FJBG)
-============================================================================ -->
+ <path id="quick.reflect.build.path">
+ <path refid="quick.library.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ </path>
- <target name="fjbg.init" depends="init">
- <uptodate property="fjbg.available" targetfile="${build-libs.dir}/fjbg.complete">
- <srcfiles dir="${src.dir}/fjbg">
- <include name="**/*.java"/>
- <include name="**/*.scala"/>
- </srcfiles>
- </uptodate>
- </target>
+ <path id="quick.msil.build.path">
+ <path refid="quick.compiler.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/msil"/>
+ </path>
- <target name="fjbg.lib" depends="fjbg.init" unless="fjbg.available">
- <stopwatch name="fjbg.lib.timer" />
- <mkdir dir="${build-libs.dir}/classes/fjbg"/>
- <javac
- srcdir="${src.dir}/fjbg"
- destdir="${build-libs.dir}/classes/fjbg"
- classpath="${build-libs.dir}/classes/fjbg"
- includes="**/*.java"
- debug="true"
- target="1.6" source="1.4">
- <compilerarg line="${javac.args} -XDignore.symbol.file"/>
- </javac>
- <touch file="${build-libs.dir}/fjbg.complete" verbose="no"/>
- <stopwatch name="fjbg.lib.timer" action="total"/>
- </target>
+ <path id="quick.compiler.build.path">
+ <path refid="quick.reflect.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/msil"/>
+ <path refid="asm.classpath"/>
+ <path refid="fjbg.classpath"/>
+ <pathelement location="${jline.jar}"/>
+ </path>
- <target name="fjbg.pack" depends="fjbg.lib">
- <jar destfile="${build-libs.dir}/fjbg.jar">
- <fileset dir="${build-libs.dir}/classes/fjbg"/>
- </jar>
- </target>
+ <path id="quick.swing.build.path">
+ <path refid="quick.library.build.path"/>
+ <path refid="quick.actors.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/swing"/>
+ </path>
- <target name="fjbg.done" depends="fjbg.pack">
- <!-- TODO - jar or classfiles? -->
- <path id="fjbg.classpath">
- <pathelement location="${build-libs.dir}/classes/fjbg"/>
+ <path id="quick.plugins.build.path">
+ <path refid="quick.compiler.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
</path>
- </target>
- <target name="fjbg.clean" depends="init">
- <delete dir="${build-libs.dir}/classes/fjbg" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- <delete file="${build-libs.dir}/fjbg.complete" quiet="yes" failonerror="no"/>
- </target>
+ <path id="quick.scalacheck.build.path">
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/actors"/>
+ <pathelement location="${build-quick.dir}/classes/scalacheck"/>
+ </path>
-<!-- ===========================================================================
-LOCAL REFERENCE BUILD (LOCKER)
-============================================================================ -->
+ <path id="quick.scalap.build.path">
+ <path refid="quick.compiler.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
+ </path>
- <target name="locker.start" depends="asm.done, forkjoin.done, fjbg.done">
- <condition property="locker.available">
- <available file="${build-locker.dir}/all.complete"/>
- </condition>
- </target>
+ <path id="quick.partest.build.path">
+ <path refid="quick.scalap.build.path"/>
+ <path refid="partest.extras.classpath"/>
+ <pathelement location="${scalacheck.jar}"/>
+ </path>
- <target name="locker.pre-lib" depends="locker.start" unless="locker.available">
- <condition property="locker.lib.needed">
- <not><available file="${build-locker.dir}/library.complete"/></not>
- </condition>
- </target>
+ <path id="quick.bin.tool.path">
+ <pathelement location="${build-quick.dir}/classes/library"/>
+ <pathelement location="${build-quick.dir}/classes/actors"/>
+ <pathelement location="${build-quick.dir}/classes/reflect"/>
+ <pathelement location="${build-quick.dir}/classes/compiler"/>
+ <pathelement location="${build-quick.dir}/classes/msil"/>
+ <pathelement location="${build-quick.dir}/classes/scalap"/>
+ <pathelement location="${build-quick.dir}/classes/continuations-library"/>
+ <pathelement location="${jline.jar}"/>
+ <path refid="asm.classpath"/>
+ <path refid="forkjoin.classpath"/>
+ <path refid="aux.libs"/>
+ </path>
- <target name="locker.lib" depends="locker.pre-lib" if="locker.lib.needed">
- <stopwatch name="locker.lib.timer"/>
- <mkdir dir="${build-locker.dir}/classes/library"/>
- <javac
- srcdir="${src.dir}/library"
- destdir="${build-locker.dir}/classes/library"
- includes="**/*.java"
- target="1.6" source="1.5">
- <compilerarg line="${javac.args} -XDignore.symbol.file"/>
- <classpath>
- <path refid="forkjoin.classpath"/>
- <pathelement location="${build-locker.dir}/classes/library"/>
- </classpath>
- </javac>
- <!-- NOTE: Potential problem with maximal command line length on Windows
- (32768 characters for XP, since executed with Java's "exec"). See
- src/build/msil.xml in msil branch for more details. -->
- <scalacfork
- destdir="${build-locker.dir}/classes/library"
- compilerpathref="starr.classpath"
- srcpath="${src.dir}/library"
- params="${scalac.args.all}"
- srcdir="${src.dir}/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <path refid="forkjoin.classpath"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-locker.dir}/classes/library/library.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-locker.dir}/classes/library">
- <fileset dir="${src.dir}/library">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- </fileset>
- </copy>
- <touch file="${build-locker.dir}/library.complete" verbose="no"/>
- <stopwatch name="locker.lib.timer" action="total"/>
- </target>
+ <!-- PACK -->
+ <!-- also used for docs.* targets TODO: use separate paths for those -->
+ <path id="pack.compiler.path">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
+ <pathelement location="${ant.jar}"/>
+ <pathelement location="${jline.jar}"/>
+ <path refid="partest.extras.classpath"/>
+ <path refid="aux.libs"/>
+ </path>
+
+ <path id="pack.bin.tool.path">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
+ <pathelement location="${build-pack.dir}/lib/jline.jar"/>
+ <path refid="aux.libs"/>
+ </path>
+ <path id="pack.library.files">
+ <fileset dir="${build-quick.dir}/classes/library"/>
+ <fileset dir="${build-quick.dir}/classes/continuations-library"/>
+ <fileset dir="${forkjoin-classes}"/>
+ </path>
- <target name="locker.pre-reflect" depends="locker.lib" unless="locker.available">
- <condition property="locker.reflect.needed">
- <not><available file="${build-locker.dir}/reflect.complete"/></not>
- </condition>
- </target>
+ <path id="pack.actors.files">
+ <fileset dir="${build-quick.dir}/classes/actors"/>
+ </path>
- <target name="locker.reflect" depends="locker.pre-reflect" if="locker.reflect.needed">
- <stopwatch name="locker.reflect.timer"/>
- <mkdir dir="${build-locker.dir}/classes/reflect"/>
- <scalacfork
- destdir="${build-locker.dir}/classes/reflect"
- compilerpathref="starr.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/reflect"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/reflect"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <!-- TODO - needed? -->
- <propertyfile file="${build-locker.dir}/classes/reflect/reflect.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-locker.dir}/classes/reflect">
- <fileset dir="${src.dir}/reflect">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- </fileset>
- </copy>
- <touch file="${build-locker.dir}/reflect.complete" verbose="no"/>
- <stopwatch name="locker.reflect.timer" action="total"/>
- </target>
+ <path id="pack.compiler.files">
+ <fileset dir="${build-quick.dir}/classes/compiler"/>
+ <fileset dir="${build-quick.dir}/classes/msil"/>
+ <fileset dir="${asm-classes}"/>
+ <fileset dir="${fjbg-classes}"/>
+ </path>
- <target name="locker.pre-comp" depends="locker.reflect" unless="locker.available">
- <condition property="locker.comp.needed">
- <not><available file="${build-locker.dir}/compiler.complete"/></not>
- </condition>
- </target>
+ <path id="pack.swing.files"> <fileset dir="${build-quick.dir}/classes/swing"/> </path>
+ <path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
+ <path id="pack.plugins.files"> <fileset dir="${build-quick.dir}/classes/continuations-plugin"/> </path>
+ <path id="pack.scalacheck.files"> <fileset dir="${build-quick.dir}/classes/scalacheck"/> </path>
+ <path id="pack.scalap.files"> <fileset dir="${build-quick.dir}/classes/scalap"/>
+ <fileset file="${src.dir}/scalap/decoder.properties"/> </path>
+ <path id="pack.partest.files">
+ <fileset dir="${build-quick.dir}/classes/partest">
+ <exclude name="scala/tools/partest/javaagent/**"/>
+ </fileset>
+ </path>
- <target name="locker.comp" depends="locker.pre-comp" if="locker.comp.needed">
- <stopwatch name="locker.comp.timer"/>
- <mkdir dir="${build-locker.dir}/classes/compiler"/>
- <if>
- <equals arg1="${fastlocker}" arg2="true" />
- <then>
- <!-- Fastlocker build: don't compile MSIL, use its starr version.... -->
- <property name="locker.comp.msil" value="${msil.starr.jar}"/>
- </then>
- <else>
- <!-- Regular build: Compile MSIL inside of locker.... -->
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-locker.dir}/classes/compiler"
- classpath="${build-locker.dir}/classes/compiler"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.6" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-locker.dir}/classes/compiler"
- compilerpathref="starr.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/reflect"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <property name="locker.comp.msil" value="${build-locker.dir}/classes/compiler"/>
- </else>
- </if>
- <scalacfork
- destdir="${build-locker.dir}/classes/compiler"
- compilerpathref="starr.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/compiler"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/reflect"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <path refid="asm.classpath"/>
- <pathelement location="${locker.comp.msil}" />
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-locker.dir}/classes/compiler/compiler.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-locker.dir}/classes/compiler">
- <fileset dir="${src.dir}/compiler">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
+ <path id="pack.partest-javaagent.files">
+ <fileset dir="${build-quick.dir}/classes/partest">
+ <include name="scala/tools/partest/javaagent/**"/>
</fileset>
- </copy>
- <touch file="${build-locker.dir}/compiler.complete" verbose="no"/>
- <stopwatch name="locker.comp.timer" action="total"/>
- </target>
+ </path>
- <target name="locker.done" depends="locker.comp">
- <touch file="${build-locker.dir}/all.complete" verbose="no"/>
- <path id="locker.classpath">
- <pathelement location="${build-locker.dir}/classes/library"/>
- <pathelement location="${build-locker.dir}/classes/reflect"/>
- <pathelement location="${build-locker.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
+ <!-- STRAP -->
+ <path id="strap.library.build.path">
+ <pathelement location="${build-strap.dir}/classes/library"/>
<path refid="forkjoin.classpath"/>
- <path refid="asm.classpath"/>
<path refid="aux.libs"/>
</path>
- <!-- TODO - Why is this *here* ? -->
- <path id="quick.classpath">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="forkjoin.classpath"/>
- <path refid="asm.classpath"/>
- <path refid="aux.libs"/>
+
+ <path id="strap.msil.build.path">
+ <path refid="strap.compiler.build.path"/>
+ <pathelement location="${build-strap.dir}/classes/msil"/>
</path>
- </target>
- <target name="locker.clean" depends="palo.clean">
- <delete dir="${build-locker.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
+ <path id="strap.reflect.build.path">
+ <path refid="strap.library.build.path"/>
+ <pathelement location="${build-strap.dir}/classes/reflect"/>
+ </path>
- <target name="locker.unlock.pre-lib">
- <uptodate property="locker.lib.available" targetfile="${build-locker.dir}/library.complete">
- <srcfiles dir="${src.dir}">
- <include name="library/**"/>
- </srcfiles>
- </uptodate>
- </target>
+ <path id="strap.compiler.build.path">
+ <path refid="strap.reflect.build.path"/>
+ <pathelement location="${build-strap.dir}/classes/compiler"/>
+ <pathelement location="${build-strap.dir}/classes/msil"/>
+ <path refid="asm.classpath"/>
+ <path refid="fjbg.classpath"/>
+ <pathelement location="${jline.jar}"/>
+ </path>
- <target name="locker.unlock.lib" depends="locker.unlock.pre-lib" unless="locker.lib.available">
- <delete file="${build-locker.dir}/library.complete"/>
- </target>
+ <!-- MISC -->
+ <path id="sbt.compile.build.path">
+ <path refid="quick.compiler.build.path"/>
+ <pathelement location="${sbt.interface.jar}"/>
+ </path>
- <target name="locker.unlock.pre-reflect" depends="locker.unlock.lib">
- <uptodate property="locker.reflect.available" targetfile="${build-locker.dir}/reflect.complete">
- <srcfiles dir="${src.dir}">
- <include name="reflect/**"/>
- </srcfiles>
- </uptodate>
- </target>
+ <path id="manual.classpath">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build.dir}/manmaker/classes"/>
+ </path>
- <target name="locker.unlock.reflect" depends="locker.unlock.pre-reflect" unless="locker.reflect.available">
- <delete file="${build-locker.dir}/reflect.complete"/>
- </target>
+ <path id="partest.classpath">
+ <path refid="pack.compiler.path"/>
+ <path refid="partest.extras.classpath"/>
+ </path>
- <target name="locker.unlock.pre-comp" depends="locker.unlock.reflect">
- <uptodate property="locker.comp.available" targetfile="${build-locker.dir}/compiler.complete">
- <srcfiles dir="${src.dir}">
- <include name="compiler/**"/>
- </srcfiles>
- </uptodate>
- </target>
+ <path id="partest.build.path">
+ <path refid="pack.compiler.path"/>
+ <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
+ <pathelement location="${pack.dir}/lib/scala-swing.jar"/> <!-- TODO - segregate swing tests (there can't be many) -->
+ </path>
- <target name="locker.unlock.comp" depends="locker.unlock.pre-comp" unless="locker.comp.available">
- <delete file="${build-locker.dir}/compiler.complete"/>
- </target>
+ <path id="test.osgi.compiler.build.path">
+ <pathelement location="${test.osgi.classes}"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-library.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-reflect.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-compiler.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-actors.jar"/>
+ <path refid="pax.exam.classpath"/>
+ <path refid="forkjoin.classpath"/>
+ </path>
+
+ <path id="palo.bin.tool.path">
+ <pathelement location="${build-palo.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-palo.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-palo.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-palo.dir}/lib/jline.jar"/>
+ </path>
- <target name="locker.unlock" depends="locker.unlock.comp">
- <delete file="${build-locker.dir}/all.complete" />
+ <path id="test.positions.sub.build.path" path="${build-quick.dir}/classes/library"/>
+
+ <!-- TODO: consolidate *.includes -->
+ <patternset id="partest.includes">
+ <include name="**/*.xml"/>
+ </patternset>
+
+ <patternset id="lib.includes">
+ <include name="**/*.tmpl"/>
+ <include name="**/*.xml"/>
+ <include name="**/*.js"/>
+ <include name="**/*.css"/>
+ </patternset>
+
+ <patternset id="lib.rootdoc.includes">
+ <include name="**/*.tmpl"/>
+ <include name="**/*.xml"/>
+ <include name="**/*.js"/>
+ <include name="**/*.css"/>
+ <include name="rootdoc.txt"/>
+ </patternset>
+
+ <patternset id="comp.includes">
+ <include name="**/*.tmpl"/>
+ <include name="**/*.xml"/>
+ <include name="**/*.js"/>
+ <include name="**/*.css"/>
+ <include name="**/*.html"/>
+ <include name="**/*.properties"/>
+ <include name="**/*.swf"/>
+ <include name="**/*.png"/>
+ <include name="**/*.gif"/>
+ <include name="**/*.txt"/>
+ </patternset>
+
+ <taskdef resource="scala/tools/ant/sabbus/antlib.xml" classpathref="starr.compiler.path"/>
</target>
<!-- ===========================================================================
-PACKED LOCKER BUILD (PALO)
-============================================================================ -->
-
- <target name="palo.start" depends="locker.done"/>
+ CLEANLINESS
+=============================================================================-->
+ <target name="libs.clean"> <clean build="libs"/> <clean build="asm"/> </target>
+ <target name="quick.clean" depends="libs.clean"> <clean build="quick"/> <clean build="pack"/> <clean build="strap"/> </target>
+ <target name="palo.clean" depends="quick.clean"> <clean build="palo"/> </target>
+ <target name="locker.clean" depends="palo.clean"> <clean build="locker"/> </target>
- <target name="palo.pre-lib" depends="palo.start">
- <uptodate
- property="palo.lib.available"
- targetfile="${build-palo.dir}/lib/scala-library.jar"
- srcfile="${build-locker.dir}/library.complete"/>
- </target>
+ <target name="docs.clean"> <clean build="docs"/> <delete dir="${build.dir}/manmaker" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
+ <target name="dist.clean"> <delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/> </target>
- <target name="palo.lib" depends="palo.pre-lib" unless="palo.lib.available">
- <mkdir dir="${build-palo.dir}/lib"/>
- <jar destfile="${build-palo.dir}/lib/scala-library.jar">
- <fileset dir="${build-locker.dir}/classes/library"/>
- <fileset dir="${build-libs.dir}/classes/forkjoin"/>
- </jar>
- </target>
+ <target name="all.clean" depends="locker.clean, docs.clean"> <clean build="sbt"/> <clean build="osgi"/> </target>
- <target name="palo.pre-reflect" depends="palo.lib">
- <uptodate
- property="palo.reflect.available"
- targetfile="${build-palo.dir}/lib/scala-reflect.jar"
- srcfile="${build-locker.dir}/reflect.complete"/>
- </target>
+ <!-- Used by the scala-installer script -->
+ <target name="allallclean" depends="all.clean, dist.clean"/>
- <target name="palo.reflect" depends="palo.pre-reflect" unless="palo.reflect.available">
- <mkdir dir="${build-palo.dir}/lib"/>
- <jar destfile="${build-palo.dir}/lib/scala-reflect.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
- <fileset dir="${build-locker.dir}/classes/reflect"/>
- </jar>
- </target>
+ <macrodef name="clean">
+ <attribute name="build"/>
+ <sequential>
+ <delete dir="${build-@{build}.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+ </sequential>
+ </macrodef>
- <target name="palo.pre-comp" depends="palo.reflect">
- <uptodate
- property="palo.comp.available"
- targetfile="${build-palo.dir}/lib/scala-compiler.jar"
- srcfile="${build-locker.dir}/compiler.complete"/>
- </target>
+<!-- ===========================================================================
+ LOCAL DEPENDENCIES
+============================================================================ -->
+ <macrodef name="simple-javac" >
+ <attribute name="project"/> <!-- project: fjbg/asm/forkjoin -->
+ <attribute name="args" default=""/>
+ <attribute name="jar" default="yes"/>
+ <sequential>
+ <uptodate property="@{project}.available" targetfile="${build-libs.dir}/@{project}.complete">
+ <srcfiles dir="${src.dir}/@{project}"/></uptodate>
+ <if><not><isset property="@{project}.available"/></not><then>
+ <stopwatch name="@{project}.timer"/>
+ <mkdir dir="${@{project}-classes}"/>
+ <javac
+ debug="true"
+ srcdir="${src.dir}/@{project}"
+ destdir="${@{project}-classes}"
+ classpath="${@{project}-classes}"
+ includes="**/*.java"
+ target="1.6" source="1.5"
+ compiler="javac1.6">
+ <compilerarg line="${javac.args} @{args}"/>
+ </javac>
+ <if><equals arg1="@{jar}" arg2="yes"/><then>
+ <jar whenmanifestonly="fail" destfile="${build-libs.dir}/@{project}.jar" basedir="${@{project}-classes}"/></then></if>
+ <stopwatch name="@{project}.timer" action="total"/>
+ <mkdir dir="${build-libs.dir}"/>
+ <touch file="${build-libs.dir}/@{project}.complete" verbose="no"/>
+ </then></if>
+ </sequential>
+ </macrodef>
+
+ <target name="asm.done" depends="init"> <simple-javac project="asm" jar="no"/> </target>
+ <target name="fjbg.done" depends="init"> <simple-javac project="fjbg"/> </target>
+ <target name="forkjoin.done" depends="init"> <simple-javac project="forkjoin" args="-XDignore.symbol.file"/></target>
- <target name="palo.comp" depends="palo.pre-comp" unless="palo.comp.available">
- <mkdir dir="${build-palo.dir}/lib"/>
- <jar destfile="${build-palo.dir}/lib/scala-compiler.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
- <fileset dir="${build-locker.dir}/classes/compiler"/>
- <fileset dir="${build-asm.dir}/classes/"/>
- <fileset dir="${build-libs.dir}/classes/fjbg"/>
- </jar>
- <copy file="${jline.jar}" toDir="${build-palo.dir}/lib"/>
- </target>
+<!-- ===========================================================================
+ STAGED COMPILATION MACROS
+============================================================================ -->
+ <macrodef name="staged-javac" >
+ <attribute name="stage"/> <!-- current stage (locker, quick, strap) -->
+ <attribute name="project"/> <!-- project: library/reflect/compiler/actors -->
+ <attribute name="destproject" default="@{project}"/> <!-- overrides the output directory; used when building multiple projects into the same directory-->
+ <attribute name="args" default=""/>
+ <attribute name="excludes" default=""/>
+
+ <sequential>
+ <javac
+ debug="true"
+ srcdir="${src.dir}/@{project}"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ includes="**/*.java"
+ excludes="@{excludes}"
+ target="1.6" source="1.5">
+ <compilerarg line="${javac.args} @{args}"/>
+ <classpath refid="@{stage}.@{destproject}.build.path"/>
+ </javac>
+ </sequential>
+ </macrodef>
+
+ <!-- Zinc assumes a one-to-one correspondence of output folder to set of source files.
+ When compiling different sets of source files in multiple compilations to the same output directory,
+ Zinc thinks source files that appeared in an earlier compilation but are absent in the current one,
+ were deleted and thus deletes the corresponding output files.
+
+ Note that zinc also requires each arg to scalac to be prefixed by -S.
+ -->
+ <macrodef name="zinc">
+ <attribute name="compilerpathref" />
+ <attribute name="destdir" />
+ <attribute name="srcdir" />
+ <attribute name="srcpath" default="NOT SET"/> <!-- needed to compile the library, "NOT SET" is just a convention to denote an optional attribute -->
+ <attribute name="buildpathref" />
+ <attribute name="params" default="" />
+ <attribute name="java-excludes" default=""/>
+
+ <sequential>
+ <local name="sources"/>
+ <pathconvert pathsep=" " property="sources">
+ <fileset dir="@{srcdir}">
+ <include name="**/*.java"/>
+ <include name="**/*.scala"/>
+ <exclude name="@{java-excludes}"/>
+ </fileset>
+ </pathconvert>
+ <local name="args"/>
+ <local name="sargs"/>
+ <if><not><equals arg1="@{srcpath}" arg2="NOT SET"/></not><then>
+ <property name="args" value="@{params} -sourcepath @{srcpath}"/>
+ </then></if>
+ <property name="args" value="@{params}"/> <!-- default -->
+
+ <!-- HACK: prefix scalac args by -S -->
+ <script language="javascript">
+ project.setProperty("sargs", project.getProperty("args").trim().replaceAll(" ", " -S"));
+ </script>
+
+ <exec osfamily="unix" executable="tools/zinc" failifexecutionfails="true" failonerror="true">
+ <arg line="-nailed -compile-order JavaThenScala -scala-path ${ant.refid:@{compilerpathref}} -d @{destdir} -classpath ${toString:@{buildpathref}} ${sargs} ${sources}"/>
+ </exec>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-scalac" >
+ <attribute name="with"/> <!-- will use path `@{with}.compiler.path` to locate scalac -->
+ <attribute name="stage"/> <!-- current stage (locker, quick, strap) -->
+ <attribute name="project"/> <!-- project: library/reflect/compiler/actors -->
+ <attribute name="srcpath" default="NOT SET"/> <!-- needed to compile the library -->
+ <attribute name="args" default=""/> <!-- additional args -->
+ <attribute name="destproject" default="@{project}"/> <!-- overrides the output directory; used when building multiple projects into the same directory-->
+ <attribute name="srcdir" default="@{project}"/>
+ <attribute name="java-excludes" default=""/>
+
+ <sequential>
+ <!-- TODO: detect zinc anywhere on PATH
+ use zinc for the quick stage if it's available;
+ would use it for locker but something is iffy in sbt: get a class cast error on global phase -->
+ <if><and> <available file="tools/zinc"/>
+ <equals arg1="@{stage}" arg2="quick"/>
+ <not><equals arg1="@{project}" arg2="plugins"/></not> <!-- doesn't work in zinc because it requires the quick compiler, which isn't jarred up-->
+ </and><then>
+ <zinc taskname="Z.@{stage}.@{project}"
+ compilerpathref="@{with}.compiler.path"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ srcdir="${src.dir}/@{srcdir}"
+ srcpath="@{srcpath}"
+ buildpathref="@{stage}.@{project}.build.path"
+ params="${scalac.args.@{stage}} @{args}"
+ java-excludes="@{java-excludes}"/></then>
+ <else>
+ <if><equals arg1="@{srcpath}" arg2="NOT SET"/><then>
+ <scalacfork taskname="@{stage}.@{project}"
+ jvmargs="${scalacfork.jvmargs}"
+ compilerpathref="@{with}.compiler.path"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ srcdir="${src.dir}/@{srcdir}"
+ params="${scalac.args.@{stage}} @{args}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="@{stage}.@{project}.build.path"/></scalacfork></then>
+ <else>
+ <scalacfork taskname="@{stage}.@{project}"
+ jvmargs="${scalacfork.jvmargs}"
+ compilerpathref="@{with}.compiler.path"
+ destdir="${build-@{stage}.dir}/classes/@{destproject}"
+ srcdir="${src.dir}/@{srcdir}"
+ srcpath="@{srcpath}"
+ params="${scalac.args.@{stage}} @{args}">
+ <include name="**/*.scala"/>
+ <compilationpath refid="@{stage}.@{project}.build.path"/></scalacfork></else>
+ </if>
+ </else></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-uptodate" >
+ <attribute name="stage"/>
+ <attribute name="project"/>
+ <element name="check"/>
+ <element name="do"/>
+
+ <sequential>
+ <uptodate property="@{stage}.@{project}.available" targetfile="${build-@{stage}.dir}/@{project}.complete">
+ <check/>
+ </uptodate>
+ <if><not><isset property="@{stage}.@{project}.available"/></not><then>
+ <do/>
+ <touch file="${build-@{stage}.dir}/@{project}.complete" verbose="no"/>
+ </then></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-build" >
+ <attribute name="with"/> <!-- will use path `@{with}.compiler.path` to locate scalac -->
+ <attribute name="stage"/> <!-- current stage (locker, quick, strap) -->
+ <attribute name="project"/> <!-- project: library/reflect/compiler/actors -->
+ <attribute name="srcpath" default="NOT SET"/> <!-- needed to compile the library -->
+ <attribute name="args" default=""/> <!-- additional args -->
+ <attribute name="includes" default="comp.includes"/>
+ <attribute name="java-excludes" default=""/>
+ <attribute name="version" default=""/> <!-- non-empty for partest and scaladoc: use @{version}.version.number in property file-->
+
+ <sequential>
+ <staged-uptodate stage="@{stage}" project="@{project}">
+ <check><srcfiles dir="${src.dir}/@{project}"/></check>
+ <do>
+ <stopwatch name="@{stage}.@{project}.timer"/>
+ <mkdir dir="${build-@{stage}.dir}/classes/@{project}"/>
+ <staged-javac stage="@{stage}" project="@{project}" excludes="@{java-excludes}"/> <!-- always compile with javac for simplicity and regularity; it's cheap -->
+ <staged-scalac with="@{with}" stage="@{stage}" project="@{project}" srcpath="@{srcpath}" args="@{args}" java-excludes="@{java-excludes}"/>
+ <if><equals arg1="@{version}" arg2=""/><then>
+ <propertyfile file = "${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
+ <entry key = "version.number" value="${version.number}"/>
+ <entry key = "maven.version.number" value="${maven.version.number}"/>
+ <entry key = "osgi.version.number" value="${osgi.version.number}"/>
+ <entry key = "copyright.string" value="${copyright.string}"/>
+ </propertyfile>
+ </then><else>
+ <propertyfile file = "${build-@{stage}.dir}/classes/@{project}/@{project}.properties">
+ <entry key = "version.number" value="${@{version}.version.number}"/>
+ <entry key = "copyright.string" value="${copyright.string}"/>
+ </propertyfile>
+ </else></if>
+ <copy todir="${build-@{stage}.dir}/classes/@{project}">
+ <fileset dir="${src.dir}/@{project}">
+ <patternset refid="@{includes}"/>
+ </fileset>
+ </copy>
+ <stopwatch name="@{stage}.@{project}.timer" action="total"/>
+ </do>
+ </staged-uptodate>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-bin">
+ <attribute name="stage"/>
+ <attribute name="classpathref" default="NOT SET"/>
+ <sequential>
+ <staged-uptodate stage="@{stage}" project="bin">
+ <check>
+ <srcfiles dir="${src.dir}">
+ <include name="compiler/scala/tools/ant/templates/**"/>
+ </srcfiles>
+ </check>
+ <do>
+ <taskdef name="mk-bin" classname="scala.tools.ant.ScalaTool" classpathref="@{stage}.bin.tool.path"/>
+ <mkdir dir="${build-@{stage}.dir}/bin"/>
+ <if><equals arg1="@{classpathref}" arg2="NOT SET"/><then>
+ <mk-bin file="${build-@{stage}.dir}/bin/scala" class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scalac" class="scala.tools.nsc.Main" javaFlags="${java.flags}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc" javaFlags="${java.flags}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/fsc" class="scala.tools.nsc.CompileClient" javaFlags="${java.flags}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scalap" class="scala.tools.scalap.Main" javaFlags="${java.flags}"/>
+ </then><else>
+ <mk-bin file="${build-@{stage}.dir}/bin/scala" class="scala.tools.nsc.MainGenericRunner" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scalac" class="scala.tools.nsc.Main" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scaladoc" class="scala.tools.nsc.ScalaDoc" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/fsc" class="scala.tools.nsc.CompileClient" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ <mk-bin file="${build-@{stage}.dir}/bin/scalap" class="scala.tools.scalap.Main" javaFlags="${java.flags}" classpathref="@{classpathref}"/>
+ </else></if>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scala"/>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalac"/>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scaladoc"/>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/fsc"/>
+ <chmod perm="ugo+rx" file="${build-@{stage}.dir}/bin/scalap"/>
+ </do>
+ </staged-uptodate>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-pack">
+ <attribute name="project"/>
+ <attribute name="targetdir" default="lib"/>
+ <attribute name="targetjar" default="scala-@{project}.jar"/>
+ <attribute name="destfile" default="${build-pack.dir}/@{targetdir}/@{targetjar}"/>
+ <attribute name="manifest" default=""/>
+ <element name="pre" optional="true"/>
+ <element name="jar-opts" optional="true"/>
+
+ <sequential>
+ <uptodate property="pack.@{project}.available" targetfile="@{destfile}">
+ <srcresources>
+ <resources refid="pack.@{project}.files"/>
+ <!-- <path><pathelement location="${build-quick.dir}/@{project}.complete"/></path> -->
+ </srcresources>
+ </uptodate>
+ <if><not><isset property="pack.@{project}.available"/></not><then>
+ <mkdir dir="${build-pack.dir}/@{targetdir}"/>
+ <pre/>
+
+ <if><not><equals arg1="@{manifest}" arg2=""/></not><then>
+ <jar whenmanifestonly="fail" destfile="@{destfile}" manifest="@{manifest}"> <!-- update="true" makes no difference on my machine, so starting from scratch-->
+ <jar-opts/>
+ <path refid="pack.@{project}.files"/>
+ </jar></then>
+ <else>
+ <jar whenmanifestonly="fail" destfile="@{destfile}">
+ <jar-opts/>
+ <path refid="pack.@{project}.files"/>
+ </jar>
+ </else></if>
+ </then></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="staged-docs">
+ <attribute name="project"/>
+ <attribute name="dir" default="@{project}"/>
+ <attribute name="title"/>
+ <attribute name="docroot" default="NOT SET"/>
+ <element name="includes" implicit="true"/>
+
+ <sequential>
+ <staged-uptodate stage="docs" project="@{project}">
+ <check><srcfiles dir="${src.dir}/@{dir}"/></check>
+ <do>
+ <stopwatch name="docs.@{project}.timer"/>
+ <mkdir dir="${build-docs.dir}/@{project}"/>
+ <if><equals arg1="@{docroot}" arg2="NOT SET"/><then>
+ <!-- TODO: introduce docs.@{project}.build.path for classpathref -->
+ <scaladoc
+ destdir="${build-docs.dir}/@{project}"
+ doctitle="@{title}"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="pack.compiler.path"
+ srcdir="${src.dir}/@{dir}"
+ addparams="${scalac.args.all}"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}">
+ <includes/>
+ </scaladoc>
+ </then><else>
+ <scaladoc
+ destdir="${build-docs.dir}/@{project}"
+ doctitle="@{title}"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="pack.compiler.path"
+ srcdir="${src.dir}/@{dir}"
+ docRootContent="${src.dir}/@{project}/@{docroot}"
+ addparams="${scalac.args.all}"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}">
+ <includes/>
+ </scaladoc>
+ </else></if>
+ <stopwatch name="docs.@{project}.timer" action="total"/>
+ </do>
+ </staged-uptodate>
+ </sequential>
+ </macrodef>
- <target name="palo.done" depends="palo.comp">
- </target>
+<!-- ===========================================================================
+ LOCAL REFERENCE BUILD (LOCKER)
+============================================================================ -->
+ <target name="locker.start" depends="asm.done, forkjoin.done, fjbg.done">
+ <condition property="locker.locked"><available file="${build-locker.dir}/locker.locked"/></condition></target>
- <target name="palo.clean" depends="quick.clean">
- <delete dir="${build-palo.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
+ <target name="locker.lib" depends="locker.start" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="library" srcpath="${src.dir}/library" includes="lib.includes"/></target>
- <target name="palo.pre-bin" depends="palo.comp">
- <uptodate property="palo.bin.available" targetfile="${build-locker.dir}/bin.complete">
- <srcfiles dir="${src.dir}">
- <include name="compiler/scala/tools/ant/templates/**"/>
- </srcfiles>
- </uptodate>
- </target>
+ <target name="locker.actors" depends="locker.lib" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="actors"/> </target>
- <target name="palo.bin" depends="palo.pre-bin" unless="palo.bin.available">
- <taskdef name="palo-bin" classname="scala.tools.ant.ScalaTool">
- <classpath>
- <pathelement location="${build-palo.dir}/lib/scala-library.jar"/>
- <pathelement location="${build-palo.dir}/lib/scala-reflect.jar"/>
- <pathelement location="${build-palo.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-palo.dir}/lib/jline.jar"/>
- </classpath>
- </taskdef>
- <mkdir dir="${build-palo.dir}/bin"/>
- <palo-bin
- file="${build-palo.dir}/bin/scala"
- class="scala.tools.nsc.MainGenericRunner"
- javaFlags="${java.flags}"/>
- <palo-bin
- file="${build-palo.dir}/bin/scalac"
- class="scala.tools.nsc.Main"
- javaFlags="${java.flags}"/>
- <palo-bin
- file="${build-palo.dir}/bin/scaladoc"
- class="scala.tools.nsc.ScalaDoc"
- javaFlags="${java.flags}"/>
- <palo-bin
- file="${build-palo.dir}/bin/fsc"
- class="scala.tools.nsc.CompileClient"
- javaFlags="${java.flags}"/>
- <palo-bin
- file="${build-palo.dir}/bin/scalap"
- class="scala.tools.scalap.Main"
- javaFlags="${java.flags}"/>
- <chmod perm="ugo+rx" file="${build-palo.dir}/bin/scala"/>
- <chmod perm="ugo+rx" file="${build-palo.dir}/bin/scalac"/>
- <chmod perm="ugo+rx" file="${build-palo.dir}/bin/scaladoc"/>
- <chmod perm="ugo+rx" file="${build-palo.dir}/bin/fsc"/>
- <chmod perm="ugo+rx" file="${build-palo.dir}/bin/scalap"/>
- <touch file="${build-locker.dir}/bin.complete" verbose="no"/>
- </target>
+ <target name="locker.msil" depends="locker.lib" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="msil" java-excludes="**/tests/**"/> </target>
-<!-- ===========================================================================
-QUICK BUILD (QUICK)
-============================================================================ -->
+ <target name="locker.reflect" depends="locker.lib" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="reflect"/></target>
- <target name="quick.start" depends="locker.done"/>
+ <target name="locker.comp" depends="locker.reflect, locker.msil" unless="locker.locked">
+ <staged-build with="starr" stage="locker" project="compiler"/></target>
- <target name="quick.pre-lib" depends="quick.start">
- <uptodate property="quick.lib.available" targetfile="${build-quick.dir}/library.complete">
- <srcfiles dir="${src.dir}">
- <include name="library/**"/>
- <include name="continuations/**"/>
- <include name="swing/**"/>
- <include name="actors/**"/>
- </srcfiles>
- </uptodate>
+ <target name="locker.done" depends="locker.comp">
+ <mkdir dir="${build-locker.dir}"/>
+ <touch file="${build-locker.dir}/locker.locked" verbose="no"/>
</target>
+ <target name="locker.unlock"> <delete file="${build-locker.dir}/locker.locked"/>
+ <delete file="${build-locker.dir}/*.complete"/></target>
- <target name="quick.lib" depends="quick.pre-lib" unless="quick.lib.available">
- <stopwatch name="quick.lib.timer"/>
- <mkdir dir="${build-quick.dir}/classes/library"/>
- <javac
- srcdir="${src.dir}/library"
- destdir="${build-quick.dir}/classes/library"
- includes="**/*.java"
- target="1.6" source="1.5">
- <compilerarg line="${javac.args} -XDignore.symbol.file"/>
- <classpath>
- <path refid="forkjoin.classpath"/>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </classpath>
- </javac>
- <javac
- srcdir="${src.dir}/actors"
- destdir="${build-quick.dir}/classes/library"
- includes="**/*.java"
- target="1.6" source="1.5">
- <compilerarg line="${javac.args}"/>
- <classpath>
- <path refid="forkjoin.classpath"/>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </classpath>
- </javac>
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- srcpath="${src.dir}/library"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/actors"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- <propertyfile file="${build-quick.dir}/classes/library/library.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-quick.dir}/classes/library">
- <fileset dir="${src.dir}/library">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- </fileset>
- </copy>
- </target>
+<!-- ===========================================================================
+ QUICK BUILD (QUICK)
+============================================================================ -->
+ <target name="quick.start" depends="locker.done"/>
- <target name="quick.swing" depends="quick.lib" if="has.java6" unless="quick.lib.available">
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- </scalacfork>
- </target>
+ <target name="quick.lib" depends="quick.start">
+ <staged-build with="locker" stage="quick" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/></target>
- <target name="quick.lib.done" depends="quick.swing, quick.lib">
- <stopwatch name="quick.lib.timer" action="total"/>
- <touch file="${build-quick.dir}/library.complete" verbose="no"/>
- </target>
+ <target name="quick.actors" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="actors"/> </target>
- <target name="quick.pre-reflect" depends="quick.lib.done">
- <uptodate property="quick.reflect.available" targetfile="${build-quick.dir}/reflect.complete">
- <srcfiles dir="${src.dir}">
- <include name="reflect/**"/>
- </srcfiles>
- </uptodate>
- </target>
+ <target name="quick.msil" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="msil" java-excludes="**/tests/**"/> </target>
- <target name="quick.reflect" depends="quick.pre-reflect" unless="quick.reflect.available">
- <stopwatch name="quick.reflect.timer"/>
- <mkdir dir="${build-quick.dir}/classes/reflect"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/reflect"
- compilerpathref="locker.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/reflect"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <!-- TODO - needed? -->
- <propertyfile file="${build-quick.dir}/classes/reflect/reflect.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-quick.dir}/classes/reflect">
- <fileset dir="${src.dir}/reflect">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- </fileset>
- </copy>
- <touch file="${build-quick.dir}/reflect.complete" verbose="no"/>
- <stopwatch name="quick.reflect.timer" action="total"/>
- </target>
+ <target name="quick.reflect" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="reflect"/> </target>
- <target name="quick.pre-comp" depends="quick.reflect">
- <uptodate property="quick.comp.available" targetfile="${build-quick.dir}/compiler.complete">
- <srcfiles dir="${src.dir}/compiler"/>
- </uptodate>
- </target>
+ <target name="quick.comp" depends="quick.reflect, quick.msil">
+ <staged-build with="locker" stage="quick" project="compiler"/> </target>
- <target name="quick.comp" depends="quick.pre-comp" unless="quick.comp.available">
- <stopwatch name="quick.comp.timer"/>
- <mkdir dir="${build-quick.dir}/classes/compiler"/>
- <!-- Compile MSIL inside of quick.... -->
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-quick.dir}/classes/compiler"
- classpath="${build-quick.dir}/classes/compiler"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.6" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-quick.dir}/classes/compiler"
- compilerpathref="locker.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <scalacfork
- destdir="${build-quick.dir}/classes/compiler"
- compilerpathref="locker.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/compiler"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="aux.libs"/>
- <path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
- <path refid="asm.classpath"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-quick.dir}/classes/compiler/compiler.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-quick.dir}/classes/compiler">
- <fileset dir="${src.dir}/compiler">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- <include name="**/*.html"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- <include name="**/*.txt"/>
- </fileset>
- </copy>
- <touch file="${build-quick.dir}/compiler.complete" verbose="no"/>
- <stopwatch name="quick.comp.timer" action="total"/>
- </target>
+ <target name="quick.scalacheck" depends="quick.actors, quick.lib">
+ <staged-build with="locker" stage="quick" project="scalacheck" args="-nowarn"/> </target>
- <target name="quick.pre-plugins" depends="quick.comp">
- <uptodate property="quick.plugins.available" targetfile="${build-quick.dir}/plugins.complete">
- <srcfiles dir="${src.dir}/continuations"/>
- </uptodate>
- </target>
+ <target name="quick.scalap" depends="quick.comp">
+ <staged-build with="locker" stage="quick" project="scalap"/> </target>
- <target name="quick.plugins" depends="quick.pre-plugins" unless="quick.plugins.available">
- <stopwatch name="quick.plugins.timer"/>
- <mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/continuations-plugin"
- compilerpathref="quick.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/continuations/plugin"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/continuations-plugin"/>
- <path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- </compilationpath>
- </scalacfork>
- <copy
- file="${src.dir}/continuations/plugin/scalac-plugin.xml"
- todir="${build-quick.dir}/classes/continuations-plugin"/>
- <!-- not very nice to create jar here but needed to load plugin -->
- <mkdir dir="${build-quick.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-quick.dir}/misc/scala-devel/plugins/continuations.jar">
- <fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
- </jar>
- <!-- might split off library part into its own ant target -->
- <scalacfork
- destdir="${build-quick.dir}/classes/library"
- compilerpathref="quick.classpath"
- params="${scalac.args.quick} -Xplugin-require:continuations -P:continuations:enable"
- srcdir="${src.dir}/continuations/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="quick.compilation.path"/>
- <compilerarg value="-Xpluginsdir"/>
- <compilerarg file="${build-quick.dir}/misc/scala-devel/plugins"/>
- </scalacfork>
- <touch file="${build-quick.dir}/plugins.complete" verbose="no"/>
- <stopwatch name="quick.plugins.timer" action="total"/>
- </target>
+ <target name="quick.partest" depends="quick.scalap, quick.comp, asm.done">
+ <staged-build with="locker" stage="quick" project="partest" version="partest"/> </target>
- <target name="quick.pre-scalacheck" depends="quick.plugins">
- <uptodate property="quick.scalacheck.available" targetfile="${build-quick.dir}/scalacheck.complete">
- <srcfiles dir="${src.dir}/scalacheck"/>
- </uptodate>
- </target>
+ <target name="quick.swing" depends="quick.actors, quick.lib" if="has.java6">
+ <staged-build with="locker" stage="quick" project="swing"/> </target>
- <target name="quick.scalacheck" depends="quick.pre-scalacheck" unless="quick.scalacheck.available">
- <stopwatch name="quick.scalacheck.timer"/>
- <mkdir dir="${build-quick.dir}/classes/scalacheck"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/scalacheck"
- compilerpathref="quick.classpath"
- params="${scalac.args.quick} -nowarn"
- srcdir="${src.dir}/scalacheck"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/scalacheck"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-quick.dir}/scalacheck.complete" verbose="no"/>
- <stopwatch name="quick.scalacheck.timer" action="total"/>
- </target>
+ <target name="quick.plugins" depends="quick.comp">
+ <staged-uptodate stage="quick" project="plugins">
+ <check><srcfiles dir="${src.dir}/continuations"/></check>
+ <do>
+ <stopwatch name="quick.plugins.timer"/>
- <target name="quick.pre-scalap" depends="quick.scalacheck">
- <uptodate property="quick.scalap.available" targetfile="${build-quick.dir}/scalap.complete">
- <srcfiles dir="${src.dir}/scalap"/>
- </uptodate>
- </target>
+ <mkdir dir="${build-quick.dir}/classes/continuations-plugin"/>
+ <staged-scalac with="locker" stage="quick" project="plugins" srcdir="continuations/plugin" destproject="continuations-plugin"/>
+ <copy
+ file="${src.dir}/continuations/plugin/scalac-plugin.xml"
+ todir="${build-quick.dir}/classes/continuations-plugin"/>
- <target name="quick.scalap" depends="quick.pre-scalap" unless="quick.scalap.available">
- <stopwatch name="quick.scalap.timer"/>
- <mkdir dir="${build-quick.dir}/classes/scalap"/>
- <scalacfork
- destdir="${build-quick.dir}/classes/scalap"
- compilerpathref="quick.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/scalap"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-quick.dir}/scalap.complete" verbose="no"/>
- <stopwatch name="quick.scalap.timer" action="total"/>
- </target>
+ <!-- not very nice to create jar here but needed to load plugin -->
+ <mkdir dir="${build-quick.dir}/misc/scala-devel/plugins"/>
+ <jar whenmanifestonly="fail" destfile="${build-quick.dir}/misc/scala-devel/plugins/continuations.jar" basedir="${build-quick.dir}/classes/continuations-plugin"/>
- <target name="quick.pre-partest" depends="quick.scalap, asm.done">
- <uptodate property="quick.partest.available" targetfile="${build-quick.dir}/partest.complete">
- <srcfiles dir="${src.dir}/partest"/>
- </uptodate>
- </target>
+ <!-- might split off library part into its own ant target -->
+ <mkdir dir="${build-quick.dir}/classes/continuations-library"/>
+ <!-- TODO: must build with quick to avoid
+ [quick.plugins] error: java.lang.NoClassDefFoundError: scala/tools/nsc/transform/patmat/PatternMatching
+ [quick.plugins] at scala.tools.selectivecps.SelectiveCPSTransform.newTransformer(SelectiveCPSTransform.scala:29)
- <target name="quick.partest" depends="quick.pre-partest" unless="quick.partest.available">
- <stopwatch name="quick.partest.timer"/>
- <mkdir dir="${build-quick.dir}/classes/partest"/>
- <javac
- srcdir="${src.dir}/partest"
- destdir="${build-quick.dir}/classes/partest"
- target="1.6" source="1.5">
- <classpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
- <path refid="asm.classpath"/>
- </classpath>
- <include name="**/*.java"/>
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-quick.dir}/classes/partest"
- compilerpathref="quick.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/partest"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
- <path refid="asm.classpath"/>
- <pathelement location="${scalacheck.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-quick.dir}/classes/partest/partest.properties">
- <entry key="version.number" value="${partest.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-quick.dir}/classes/partest">
- <fileset dir="${src.dir}/partest">
- <include name="**/*.xml"/>
- </fileset>
- </copy>
- <touch file="${build-quick.dir}/partest.complete" verbose="no"/>
- <stopwatch name="quick.partest.timer" action="total"/>
- </target>
+ WHY OH WHY!? scala/tools/nsc/transform/patmat/PatternMatching should be on locker.compiler.path
+ -->
+ <staged-scalac with="quick" stage="quick" project="plugins"
+ srcdir="continuations/library" destproject="continuations-library"
+ args="-Xplugin-require:continuations -P:continuations:enable -Xpluginsdir ${build-quick.dir}/misc/scala-devel/plugins"/>
- <target name="quick.pre-bin" depends="quick.partest">
- <condition property="quick.bin.available">
- <isset property="quick.comp.available"/>
- </condition>
+ <stopwatch name="quick.plugins.timer" action="total"/>
+ </do>
+ </staged-uptodate>
</target>
- <target name="quick.bin" depends="quick.pre-bin" unless="quick.bin.available">
- <path id="quick.bin.classpath">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${build-quick.dir}/classes/scalap"/>
- <path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <path refid="asm.classpath"/>
- <pathelement location="${jline.jar}"/>
- </path>
- <taskdef name="quick-bin" classname="scala.tools.ant.ScalaTool" classpathref="quick.bin.classpath"/>
- <mkdir dir="${build-quick.dir}/bin"/>
- <quick-bin
- file="${build-quick.dir}/bin/scala"
- class="scala.tools.nsc.MainGenericRunner"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <quick-bin
- file="${build-quick.dir}/bin/scalac"
- class="scala.tools.nsc.Main"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <quick-bin
- file="${build-quick.dir}/bin/scaladoc"
- class="scala.tools.nsc.ScalaDoc"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <quick-bin
- file="${build-quick.dir}/bin/fsc"
- class="scala.tools.nsc.CompileClient"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <quick-bin
- file="${build-quick.dir}/bin/scalap"
- class="scala.tools.scalap.Main"
- javaFlags="${java.flags}"
- classpathref="quick.bin.classpath"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/scala"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/scalac"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/scaladoc"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/fsc"/>
- <chmod perm="ugo+rx" file="${build-quick.dir}/bin/scalap"/>
- <touch file="${build-quick.dir}/bin.complete" verbose="no"/>
+ <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.scalacheck, quick.scalap, quick.swing, quick.plugins, quick.partest">
+ <staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
</target>
- <target name="quick.done" depends="quick.bin">
- <path id="quick.classpath">
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <path refid="asm.classpath"/>
- <path refid="aux.libs"/>
- </path>
- </target>
+ <target name="quick.done" depends="quick.bin"/>
+ <target name="quick-opt" description="Optimized version of quick.done."> <optimized name="quick.done"/></target>
- <target name="quick.clean" depends="libs.clean">
- <delete dir="${build-quick.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
<!-- ===========================================================================
-PACKED QUICK BUILD (PACK)
+ PACKED QUICK BUILD (PACK)
============================================================================ -->
-
- <target name="pack.start" depends="quick.done"/>
-
- <target name="pack.pre-lib" depends="pack.start">
- <uptodate
- property="pack.lib.available"
- targetfile="${build-pack.dir}/lib/scala-library.jar"
- srcfile="${build-quick.dir}/library.complete"/>
+ <target name="pack.lib" depends="quick.lib, quick.plugins, forkjoin.done">
+ <staged-pack project="library"/></target>
+
+ <target name="pack.actors" depends="quick.lib"> <staged-pack project="actors"/> </target>
+ <target name="pack.swing" if="has.java6" depends="quick.swing"> <staged-pack project="swing"/> </target>
+ <target name="pack.reflect" depends="quick.reflect"> <staged-pack project="reflect"/> </target>
+
+ <target name="pack.comp" depends="quick.comp, asm.done">
+ <staged-pack project="compiler" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
+ <pre> <!-- TODO the files copied here do not influence actuality of this target (nor does the manifest) -->
+ <copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
+ <copy todir="${build-pack.dir}/lib">
+ <fileset dir="${lib-extra.dir}">
+ <include name="**/*.jar"/>
+ </fileset>
+ </copy>
+ <mkdir dir="${build-pack.dir}/META-INF"/>
+ <copy file="${basedir}/META-INF/MANIFEST.MF" toDir="${build-pack.dir}/META-INF"/>
+ <manifest file="${build-pack.dir}/META-INF/MANIFEST.MF" mode="update">
+ <attribute name="Bundle-Version" value="${version.number}"/>
+ <attribute name="Class-Path" value="scala-reflect.jar scala-library.jar"/>
+ </manifest>
+ </pre>
+ <jar-opts>
+ <service type="javax.script.ScriptEngineFactory" provider="scala.tools.nsc.interpreter.IMain$Factory"/>
+ </jar-opts>
+ </staged-pack>
</target>
- <target name="pack.lib" depends="pack.pre-lib" unless="pack.lib.available">
- <mkdir dir="${build-pack.dir}/lib"/>
- <!-- First copy maven dependencies -->
- <copy todir="${build-pack.dir}/lib">
- <fileset refid="dependency.fileset" />
- <mapper type="flatten" />
- </copy>
- <jar destfile="${build-pack.dir}/lib/scala-library.jar">
- <fileset dir="${build-quick.dir}/classes/library">
- <exclude name="scala/swing/**"/>
- <exclude name="scala/actors/**"/>
- </fileset>
- <fileset dir="${build-libs.dir}/classes/forkjoin"/>
- </jar>
- <jar destfile="${build-pack.dir}/lib/scala-actors.jar">
- <fileset dir="${build-quick.dir}/classes/library">
- <include name="scala/actors/**"/>
- </fileset>
- </jar>
- </target>
+ <target name="pack.plugins" depends="quick.plugins"> <staged-pack project="plugins" targetdir="misc/scala-devel/plugins" targetjar="continuations.jar"/> </target>
+ <target name="pack.scalacheck" depends="quick.scalacheck"> <staged-pack project="scalacheck" targetjar="scalacheck.jar"/> </target>
- <target name="pack.swing" depends="pack.lib" if="has.java6">
- <jar destfile="${build-pack.dir}/lib/scala-swing.jar">
- <fileset dir="${build-quick.dir}/classes/library">
- <include name="scala/swing/**"/>
- </fileset>
- </jar>
+ <target name="pack.partest" depends="quick.partest">
+ <staged-pack project="partest"/>
+ <!-- TODO the manifest should influence actuality of this target -->
+ <staged-pack project="partest-javaagent" manifest="${src.dir}/partest/scala/tools/partest/javaagent/MANIFEST.MF"/>
</target>
- <target name="pack.pre-reflect" depends="pack.lib, pack.swing">
- <uptodate
- property="pack.reflect.available"
- targetfile="${build-pack.dir}/lib/scala-reflect.jar"
- srcfile="${build-quick.dir}/reflect.complete"/>
- </target>
-
- <target name="pack.reflect" depends="pack.pre-reflect" unless="pack.reflect.available">
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scala-reflect.jar">
- <fileset dir="${build-quick.dir}/classes/reflect" />
- </jar>
- </target>
+ <target name="pack.scalap" depends="quick.scalap"> <staged-pack project="scalap" targetjar="scalap.jar"/> </target>
- <target name="pack.pre-comp" depends="pack.reflect">
- <uptodate
- property="pack.comp.available"
- targetfile="${build-pack.dir}/lib/scala-compiler.jar"
- srcfile="${build-quick.dir}/compiler.complete"/>
+ <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.partest, pack.plugins, pack.reflect, pack.scalacheck, pack.scalap, pack.swing">
+ <staged-bin stage="pack"/>
</target>
- <target name="pack.comp" depends="pack.pre-comp" unless="pack.comp.available">
- <mkdir dir="${build-pack.dir}/META-INF"/>
- <copy file="META-INF/MANIFEST.MF" toDir="${build-pack.dir}/META-INF"/>
- <manifest file="${build-pack.dir}/META-INF/MANIFEST.MF" mode="update">
- <attribute name="Bundle-Version" value="${version.number}"/>
- </manifest>
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scala-compiler.jar" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
- <fileset dir="${build-quick.dir}/classes/compiler"/>
- <fileset dir="${build-asm.dir}/classes"/>
- <fileset dir="${build-libs.dir}/classes/fjbg"/>
- </jar>
- <copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
+ <!-- depend on quick.done so quick.bin is run when pack.done is -->
+ <target name="pack.done" depends="quick.done, pack.bin">
+ <!-- copy dependencies to build/pack/lib, it only takes a second so don't bother with uptodate checks -->
<copy todir="${build-pack.dir}/lib">
- <fileset dir="${lib-extra.dir}">
- <include name="**/*.jar"/>
- </fileset>
+ <resources refid="partest.extras.fileset"/>
+ <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
+ from="${partest.extras.versions}" to="flatten"/>
</copy>
- </target>
- <target name="pack.pre-plugins" depends="pack.comp">
- <uptodate
- property="pack.plugins.available"
- targetfile="${build-pack.dir}/misc/scala-devel/plugins/continuations.jar"
- srcfile="${build-quick.dir}/plugins.complete"/>
+ <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.compiler.path"/>
+ <taskdef resource="scala/tools/partest/antlib.xml" classpathref="partest.classpath"/>
</target>
- <target name="pack.plugins" depends="pack.pre-plugins" unless="pack.plugins.available">
- <mkdir dir="${build-pack.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-pack.dir}/misc/scala-devel/plugins/continuations.jar">
- <fileset dir="${build-quick.dir}/classes/continuations-plugin"/>
- </jar>
- </target>
-
- <target name="pack.scalacheck" depends="pack.plugins">
- <jar destfile="${build-pack.dir}/lib/scalacheck.jar">
- <fileset dir="${build-quick.dir}/classes/scalacheck"/>
- </jar>
- </target>
- <target name="pack.pre-partest" depends="pack.scalacheck">
- <uptodate
- property="pack.partest.available"
- targetfile="${build-pack.dir}/lib/scala-partest.jar"
- srcfile="${build-quick.dir}/partest.complete"/>
+<!-- ===========================================================================
+ BOOTSTRAPPING BUILD (STRAP)
+============================================================================ -->
+ <target name="strap.done" depends="pack.done">
+ <staged-build with="pack" stage="strap" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/>
+ <staged-build with="pack" stage="strap" project="msil" java-excludes="**/tests/**"/>
+ <staged-build with="pack" stage="strap" project="reflect"/>
+ <staged-build with="pack" stage="strap" project="compiler"/>
</target>
- <target name="pack.partest" depends="pack.pre-partest" unless="pack.partest.available">
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scala-partest.jar">
- <fileset dir="${build-quick.dir}/classes/partest">
- <exclude name="scala/tools/partest/javaagent/**"/>
- </fileset>
- </jar>
- <jar destfile="${build-pack.dir}/lib/scala-partest-javaagent.jar" manifest="${src.dir}/partest/scala/tools/partest/javaagent/MANIFEST.MF">
- <fileset dir="${build-quick.dir}/classes/partest">
- <include name="scala/tools/partest/javaagent/**"/>
- </fileset>
- </jar>
- </target>
+ <target name="strap-opt" description="Optimized version of strap.done."> <optimized name="strap.done"/></target>
- <target name="pack.pre-scalap" depends="pack.partest">
- <uptodate
- property="pack.scalap.available"
- targetfile="${build-pack.dir}/lib/scalap.jar"
- srcfile="${build-quick.dir}/scalap.complete"/>
- </target>
- <target name="pack.scalap" depends="pack.pre-scalap" unless="pack.scalap.available">
- <mkdir dir="${build-pack.dir}/lib"/>
- <jar destfile="${build-pack.dir}/lib/scalap.jar">
- <fileset dir="${build-quick.dir}/classes/scalap"/>
- <fileset file="${src.dir}/scalap/decoder.properties"/>
+<!-- ===========================================================================
+ PACKED LOCKER BUILD (PALO)
+============================================================================ -->
+ <target name="palo.done" depends="locker.done">
+ <mkdir dir="${build-palo.dir}/lib"/>
+ <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-library.jar">
+ <fileset dir="${build-locker.dir}/classes/library"/>
+ <fileset dir="${forkjoin-classes}"/>
+ </jar>
+ <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-reflect.jar" manifest="${basedir}/META-INF/MANIFEST.MF"
+ basedir="${build-locker.dir}/classes/reflect"/>
+ <jar whenmanifestonly="fail" destfile="${build-palo.dir}/lib/scala-compiler.jar" manifest="${basedir}/META-INF/MANIFEST.MF">
+ <fileset dir="${build-locker.dir}/classes/compiler"/>
+ <fileset dir="${asm-classes}"/>
+ <fileset dir="${fjbg-classes}"/>
</jar>
+ <copy file="${jline.jar}" toDir="${build-palo.dir}/lib"/>
</target>
- <target name="pack.pre-bin" depends="pack.scalap">
- <uptodate
- property="pack.bin.available"
- srcfile="${build-pack.dir}/lib/scala-compiler.jar"
- targetfile="${build-pack.dir}/bin.complete"/>
- </target>
+ <target name="palo.bin" depends="palo.done"> <staged-bin stage="palo"/></target>
- <target name="pack.bin" depends="pack.pre-bin" unless="pack.bin.available">
- <taskdef name="pack-bin" classname="scala.tools.ant.ScalaTool">
- <classpath>
- <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/jline.jar"/>
- </classpath>
- </taskdef>
- <mkdir dir="${build-pack.dir}/bin"/>
- <pack-bin
- file="${build-pack.dir}/bin/scala"
- class="scala.tools.nsc.MainGenericRunner"
- javaFlags="${java.flags}"/>
- <pack-bin
- file="${build-pack.dir}/bin/scalac"
- class="scala.tools.nsc.Main"
- javaFlags="${java.flags}"/>
- <pack-bin
- file="${build-pack.dir}/bin/scaladoc"
- class="scala.tools.nsc.ScalaDoc"
- javaFlags="${java.flags}"/>
- <pack-bin
- file="${build-pack.dir}/bin/fsc"
- class="scala.tools.nsc.CompileClient"
- javaFlags="${java.flags}"/>
- <pack-bin
- file="${build-pack.dir}/bin/scalap"
- class="scala.tools.scalap.Main"
- javaFlags="${java.flags}"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/scala"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/scalac"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/scaladoc"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/fsc"/>
- <chmod perm="ugo+rx" file="${build-pack.dir}/bin/scalap"/>
- <touch file="${build-pack.dir}/bin.complete" verbose="no"/>
- </target>
-
- <target name="pack.done" depends="pack.bin">
- <path id="pack.classpath">
- <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
- <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
- <pathelement location="${ant.jar}"/>
- <pathelement location="${jline.jar}"/>
- <path refid="lib.extra"/>
- </path>
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.classpath"/>
- <taskdef resource="scala/tools/partest/antlib.xml" classpathref="pack.classpath"/>
- </target>
-
- <target name="pack.clean" depends="strap.clean">
- <delete dir="${build-pack.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
<!-- ===========================================================================
-OSGi Artifacts
+ OSGi Artifacts
============================================================================ -->
-
- <target name="osgi.init" depends="pack.done">
+ <target name="osgi.done" depends="pack.done">
<mkdir dir="${build-osgi.dir}"/>
- <property name="osgi.test.src" value="${partest.dir}/osgi/src"/>
- <property name="osgi.test.classes" value="${build-osgi.dir}/classes"/>
-
<!-- simplify fixing pom versions -->
<macrodef name="make-bundle">
<attribute name="name" />
@@ -1710,888 +1334,137 @@ OSGi Artifacts
output="${build-osgi.dir}"/>
</sequential>
</macrodef>
+
<uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.complete">
<srcfiles dir="${basedir}">
<include name="build.xml"/>
<include name="src/build/bnd/*.bnd"/>
</srcfiles>
</uptodate>
- </target>
- <target name="osgi.bundles" depends="osgi.init" unless="osgi.bundles.available">
- <stopwatch name="osgi.bundle.timer"/>
- <make-bundle name="scala-library" version="${osgi.version.number}" />
- <make-bundle name="scala-actors" version="${osgi.version.number}" />
- <make-bundle name="scala-reflect" version="${osgi.version.number}" />
- <make-bundle name="scala-compiler" version="${osgi.version.number}" />
- <make-plugin-bundle name="continuations" version="${osgi.version.number}" />
- <touch file="${build-osgi.dir}/bundles.complete" verbose="no"/>
- <stopwatch name="osgi.bundle.timer" action="total"/>
+ <if><not><isset property="osgi.bundles.available"/></not><then>
+ <stopwatch name="osgi.bundle.timer"/>
+ <make-bundle name="scala-library" version="${osgi.version.number}" />
+ <make-bundle name="scala-actors" version="${osgi.version.number}" />
+ <make-bundle name="scala-reflect" version="${osgi.version.number}" />
+ <make-bundle name="scala-compiler" version="${osgi.version.number}" />
+ <make-plugin-bundle name="continuations" version="${osgi.version.number}" />
+ <touch file="${build-osgi.dir}/bundles.complete" verbose="no"/>
+
+ <if><isset property="has.java6"/><then>
+ <make-bundle name="scala-swing" version="${osgi.version.number}"/></then>
+ </if>
+ <stopwatch name="osgi.bundle.timer" action="total"/></then>
+ </if>
</target>
- <target name="osgi.bundles.swing" depends="osgi.init" if="has.java6" unless="osgi.bundles.available">
- <!-- TODO - only if JDK6 -->
- <make-bundle name="scala-swing" version="${osgi.version.number}"/>
- </target>
- <target name="osgi.done" depends="osgi.bundles, osgi.bundles.swing"/>
+<!-- ===========================================================================
+ TEST SUITE
+============================================================================ -->
+ <!-- bootstrapping stability: compare {quick,strap}/(lib|reflect|comp) -->
+ <target name="test.stability" depends="strap.done">
+ <exec osfamily="unix" vmlauncher="false" executable="${basedir}/tools/stability-test.sh" failonerror="true" />
+ <!-- I think doing it this way means it will auto-pass on windows... that's the idea. If not, something like this. -->
+ <!-- <exec osfamily="windows" executable="foo" failonerror="false" failifexecutionfails="false" /> -->
+ </target>
- <target name="osgi.test.init" depends="osgi.done">
- <path id="osgi.bundle.classpath">
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-library.jar"/>
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-reflect.jar"/>
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-compiler.jar"/>
- <pathelement location="${build-osgi.dir}/org.scala-lang.scala-actors.jar"/>
- </path>
+ <target name="test.stability-opt" description="Optimized version of test.stability."> <optimized name="test.stability"/></target>
- <uptodate property="osgi.test.available" targetfile="${build-osgi.dir}/test-compile.complete">
- <srcfiles dir="${osgi.test.src}">
+ <target name="test.osgi.init" depends="osgi.done">
+ <uptodate property="test.osgi.available" targetfile="${build-osgi.dir}/test-compile.complete">
+ <srcfiles dir="${test.osgi.src}">
<include name="**/*.scala"/>
</srcfiles>
</uptodate>
</target>
- <target name="osgi.test.comp" depends="osgi.test.init, quick.done" unless="osgi.test.available">
- <stopwatch name="osgi.test.comp.timer"/>
- <mkdir dir="${osgi.test.classes}"/>
+ <target name="test.osgi.comp" depends="test.osgi.init, quick.done" unless="test.osgi.available">
+ <stopwatch name="test.osgi.compiler.timer"/>
+ <mkdir dir="${test.osgi.classes}"/>
<scalacfork
- destdir="${osgi.test.classes}"
- compilerpathref="quick.classpath"
+ destdir="${test.osgi.classes}"
+ compilerpathref="quick.compiler.path"
params="${scalac.args.quick}"
- srcdir="${osgi.test.src}"
+ srcdir="${test.osgi.src}"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${osgi.test.classes}"/>
- <path refid="osgi.bundle.classpath"/>
- <path refid="pax.exam.classpath"/>
- <path refid="forkjoin.classpath"/>
- </compilationpath>
+ <compilationpath refid="test.osgi.compiler.build.path"/>
</scalacfork>
<touch file="${build-osgi.dir}/test-compile.complete" verbose="no"/>
- <stopwatch name="osgi.test.comp.timer" action="total"/>
+ <stopwatch name="test.osgi.compiler.timer" action="total"/>
</target>
- <target name="osgi.test" depends="osgi.test.comp">
- <stopwatch name="osgi.test.timer"/>
- <mkdir dir="${osgi.test.classes}"/>
+ <target name="test.osgi" depends="test.osgi.comp">
+ <stopwatch name="test.osgi.timer"/>
+ <mkdir dir="${test.osgi.classes}"/>
<junit fork="yes" haltonfailure="yes">
- <classpath>
- <pathelement location="${osgi.test.classes}"/>
- <path refid="osgi.bundle.classpath"/>
- <path refid="pax.exam.classpath"/>
- <path refid="forkjoin.classpath"/>
- </classpath>
+ <classpath refid="test.osgi.compiler.build.path"/>
<batchtest fork="yes" todir="${build-osgi.dir}">
- <fileset dir="${osgi.test.classes}">
+ <fileset dir="${test.osgi.classes}">
<include name="**/*Test.class"/>
</fileset>
</batchtest>
- <formatter type="brief" usefile="false" />
+ <formatter type="brief" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
</junit>
- <stopwatch name="osgi.test.timer" action="total"/>
- </target>
-
- <target name="osgi.clean">
- <delete dir="${build-osgi.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
-
-<!-- ===========================================================================
-BOOTSTRAPPING BUILD (STRAP)
-============================================================================ -->
-
- <target name="strap.start" depends="pack.done"/>
-
- <target name="strap.pre-lib" depends="strap.start">
- <uptodate property="strap.lib.available" targetfile="${build-strap.dir}/library.complete">
- <srcfiles dir="${src.dir}">
- <include name="library/**"/>
- <include name="swing/**"/>
- <include name="actors/**"/>
- </srcfiles>
- </uptodate>
- </target>
-
- <target name="strap.lib" depends="strap.pre-lib" unless="strap.lib.available">
- <stopwatch name="strap.lib.timer"/>
- <mkdir dir="${build-strap.dir}/classes/library"/>
- <javac
- srcdir="${src.dir}/library"
- destdir="${build-strap.dir}/classes/library"
- includes="**/*.java"
- target="1.6" source="1.5">
- <compilerarg line="${javac.args} -XDignore.symbol.file"/>
- <classpath>
- <path refid="forkjoin.classpath"/>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </classpath>
- </javac>
- <javac
- srcdir="${src.dir}/actors"
- destdir="${build-strap.dir}/classes/library"
- includes="**/*.java"
- target="1.6" source="1.5">
- <compilerarg line="${javac.args}"/>
- <classpath>
- <path refid="forkjoin.classpath"/>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </classpath>
- </javac>
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- srcpath="${src.dir}/library"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/actors"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- <propertyfile file="${build-strap.dir}/classes/library/library.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-strap.dir}/classes/library">
- <fileset dir="${src.dir}/library">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- </fileset>
- </copy>
- </target>
-
- <target name="strap.swing" if="has.java6" unless="strap.lib.available" depends="strap.lib">
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/swing"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- </scalacfork>
- </target>
-
- <target name="strap.lib.done" depends="strap.swing, strap.lib">
- <touch file="${build-strap.dir}/library.complete" verbose="no"/>
- <stopwatch name="strap.lib.timer" action="total"/>
- </target>
-
- <target name="strap.pre-reflect" depends="strap.lib.done">
- <uptodate property="strap.reflect.available" targetfile="${build-strap.dir}/reflect.complete">
- <srcfiles dir="${src.dir}/reflect"/>
- </uptodate>
- </target>
-
- <target name="strap.reflect" depends="strap.pre-reflect" unless="strap.reflect.available">
- <stopwatch name="strap.reflect.timer"/>
- <mkdir dir="${build-strap.dir}/classes/reflect"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/reflect"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/reflect"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <path refid="forkjoin.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-strap.dir}/classes/reflect/reflect.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-strap.dir}/classes/reflect">
- <fileset dir="${src.dir}/reflect">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- <include name="**/*.html"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- <include name="**/*.txt"/>
- </fileset>
- </copy>
- <touch file="${build-strap.dir}/reflect.complete" verbose="no"/>
- <stopwatch name="strap.comp.timer" action="total"/>
- </target>
-
- <target name="strap.pre-comp" depends="strap.reflect">
- <uptodate property="strap.comp.available" targetfile="${build-strap.dir}/compiler.complete">
- <srcfiles dir="${src.dir}/compiler"/>
- </uptodate>
- </target>
-
- <target name="strap.comp" depends="strap.pre-comp" unless="strap.comp.available">
- <stopwatch name="strap.comp.timer"/>
- <mkdir dir="${build-strap.dir}/classes/compiler"/>
- <!-- Compile MSIL inside of strap.... -->
- <javac
- srcdir="${src.dir}/msil"
- destdir="${build-strap.dir}/classes/compiler"
- classpath="${build-strap.dir}/classes/compiler"
- includes="**/*.java"
- excludes="**/tests/**"
- debug="true"
- target="1.6" source="1.4">
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-strap.dir}/classes/compiler"
- compilerpathref="pack.classpath"
- params="${scalac.args.all}"
- srcdir="${src.dir}/msil"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <scalacfork
- destdir="${build-strap.dir}/classes/compiler"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/compiler"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <path refid="aux.libs"/>
- <path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
- <path refid="asm.classpath"/>
- <pathelement location="${jline.jar}"/>
- </compilationpath>
- </scalacfork>
- <propertyfile file="${build-strap.dir}/classes/compiler/compiler.properties">
- <entry key="version.number" value="${version.number}"/>
- <entry key="maven.version.number" value="${maven.version.number}"/>
- <entry key="osgi.version.number" value="${osgi.version.number}"/>
- <entry key="copyright.string" value="${copyright.string}"/>
- </propertyfile>
- <copy todir="${build-strap.dir}/classes/compiler">
- <fileset dir="${src.dir}/compiler">
- <include name="**/*.tmpl"/>
- <include name="**/*.xml"/>
- <include name="**/*.js"/>
- <include name="**/*.css"/>
- <include name="**/*.html"/>
- <include name="**/*.properties"/>
- <include name="**/*.swf"/>
- <include name="**/*.png"/>
- <include name="**/*.gif"/>
- <include name="**/*.txt"/>
- </fileset>
- </copy>
- <touch file="${build-strap.dir}/compiler.complete" verbose="no"/>
- <stopwatch name="strap.comp.timer" action="total"/>
- </target>
-
- <target name="strap.pre-plugins" depends="strap.comp">
- <uptodate property="strap.plugins.available" targetfile="${build-strap.dir}/plugins.complete">
- <srcfiles dir="${src.dir}/continuations"/>
- </uptodate>
- </target>
-
- <target name="strap.plugins" depends="strap.pre-plugins" unless="strap.plugins.available">
- <stopwatch name="strap.plugins.timer"/>
- <mkdir dir="${build-strap.dir}/classes/continuations-plugin"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/continuations-plugin"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/continuations/plugin"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/continuations-plugin"/>
- <path refid="forkjoin.classpath"/>
- <path refid="fjbg.classpath"/>
- <path refid="aux.libs"/>
- </compilationpath>
- </scalacfork>
- <copy
- file="${src.dir}/continuations/plugin/scalac-plugin.xml"
- todir="${build-strap.dir}/classes/continuations-plugin"/>
- <!-- not very nice to create jar here but needed to load plugin -->
- <mkdir dir="${build-strap.dir}/misc/scala-devel/plugins"/>
- <jar destfile="${build-strap.dir}/misc/scala-devel/plugins/continuations.jar">
- <fileset dir="${build-strap.dir}/classes/continuations-plugin"/>
- </jar>
- <!-- might split off library part into its own ant target -->
- <scalacfork
- destdir="${build-strap.dir}/classes/library"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick} -Xplugin-require:continuations -P:continuations:enable"
- srcdir="${src.dir}/continuations/library"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath refid="strap.compilation.path"/>
- <compilerarg value="-Xpluginsdir"/>
- <compilerarg file="${build-strap.dir}/misc/scala-devel/plugins"/>
- </scalacfork>
- <touch file="${build-strap.dir}/plugins.complete" verbose="no"/>
- <stopwatch name="strap.plugins.timer" action="total"/>
- </target>
-
- <target name="strap.scalacheck" depends="strap.plugins">
- <mkdir dir="${build-strap.dir}/classes/scalacheck"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/scalacheck"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick} -nowarn"
- srcdir="${src.dir}/scalacheck"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- </compilationpath>
- </scalacfork>
+ <stopwatch name="test.osgi.timer" action="total"/>
</target>
- <target name="strap.pre-scalap" depends="strap.scalacheck">
- <uptodate property="strap.scalap.available" targetfile="${build-strap.dir}/scalap.complete">
- <srcfiles dir="${src.dir}/scalap"/>
- </uptodate>
- </target>
-
- <target name="strap.scalap" depends="strap.pre-scalap" unless="strap.scalap.available">
- <stopwatch name="strap.scalap.timer"/>
- <mkdir dir="${build-strap.dir}/classes/scalap"/>
- <scalacfork
- destdir="${build-strap.dir}/classes/scalap"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/scalap"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- </compilationpath>
- </scalacfork>
- <touch file="${build-strap.dir}/scalap.complete" verbose="no"/>
- <stopwatch name="strap.scalap.timer" action="total"/>
- </target>
-
- <target name="strap.pre-partest" depends="strap.scalap, asm.done">
- <uptodate property="strap.partest.available" targetfile="${build-strap.dir}/partest.complete">
- <srcfiles dir="${src.dir}/partest"/>
- </uptodate>
- </target>
-
- <target name="strap.partest" depends="strap.pre-partest" unless="strap.partest.available">
- <stopwatch name="strap.partest.timer"/>
- <mkdir dir="${build-strap.dir}/classes/partest"/>
- <javac
- srcdir="${src.dir}/partest"
- destdir="${build-strap.dir}/classes/partest"
- target="1.6" source="1.5">
- <classpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <path refid="asm.classpath"/>
- </classpath>
- <include name="**/*.java"/>
- <compilerarg line="${javac.args}"/>
- </javac>
- <scalacfork
- destdir="${build-strap.dir}/classes/partest"
- compilerpathref="pack.classpath"
- params="${scalac.args.quick}"
- srcdir="${src.dir}/partest"
- jvmargs="${scalacfork.jvmargs}">
- <include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-strap.dir}/classes/library"/>
- <pathelement location="${build-strap.dir}/classes/reflect"/>
- <pathelement location="${build-strap.dir}/classes/compiler"/>
- <pathelement location="${build-strap.dir}/classes/scalap"/>
- <pathelement location="${build-strap.dir}/classes/partest"/>
- <pathelement location="${ant.jar}"/>
- <path refid="forkjoin.classpath"/>
- <path refid="asm.classpath"/>
- <pathelement location="${scalacheck.jar}"/>
- </compilationpath>
- </scalacfork>
- <copy todir="${build-strap.dir}/classes/partest">
- <fileset dir="${src.dir}/partest">
- <include name="**/*.xml"/>
- </fileset>
- </copy>
- <touch file="${build-strap.dir}/partest.complete" verbose="no"/>
- <stopwatch name="strap.partest.timer" action="total"/>
- </target>
-
- <target name="strap.done" depends="strap.partest"/>
-
- <target name="strap.clean">
- <delete dir="${build-strap.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
<!-- ===========================================================================
-LIBRARIES (Forkjoin, FJBG, ASM)
+ SBT Compiler Interface
============================================================================ -->
-
-
- <target name="libs.clean" depends="pack.clean, asm.clean">
- <delete dir="${build-libs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
-
-<!-- ===========================================================================
-SBT Compiler Interface
-============================================================================ -->
-
- <target name="sbt.start" depends="init">
- <!-- TODO - Put this in init? Allow this to be overriden simply -->
- <property name="sbt.latest.version" value="0.12.0"/>
-
-
- <property name="sbt.src.dir" value="${build-sbt.dir}/${sbt.latest.version}/src"/>
- <property name="sbt.lib.dir" value="${build-sbt.dir}/${sbt.latest.version}/lib"/>
-
-
-
- <property name="sbt.interface.jar" value="${sbt.lib.dir}/interface.jar"/>
- <property name="sbt.interface.url" value="http://typesafe.artifactoryonline.com/typesafe/ivy-releases/org.scala-sbt/interface/${sbt.latest.version}/jars/interface.jar"/>
- <property name="sbt.interface.src.jar" value="${sbt.src.dir}/compiler-interface-src.jar"/>
- <property name="sbt.interface.src.url" value="http://typesafe.artifactoryonline.com/typesafe/ivy-releases/org.scala-sbt/compiler-interface/${sbt.latest.version}/jars/compiler-interface-src.jar"/>
-
- <!-- Ensure directories exist -->
- <mkdir dir="${sbt.src.dir}"/>
- <mkdir dir="${sbt.lib.dir}"/>
-
- <condition property="sbt.available">
- <and>
+ <target name="test.sbt" depends="quick.done">
+ <if><not><and>
<available file="${sbt.interface.jar}"/>
- <available file="${sbt.interface.src.jar}"/>
- </and>
- </condition>
+ <available file="${sbt.interface.src.jar}"/></and></not>
+ <then>
+ <!-- Ensure directories exist -->
+ <mkdir dir="${sbt.src.dir}"/>
+ <mkdir dir="${sbt.lib.dir}"/>
- </target>
+ <get src="${sbt.interface.url}" dest="${sbt.interface.jar}"/>
+ <get src="${sbt.interface.src.url}" dest="${sbt.interface.src.jar}"/>
- <target name="sbt.libs" depends="sbt.start" unless="sbt.available">
- <get src="${sbt.interface.url}" dest="${sbt.interface.jar}"/>
- <get src="${sbt.interface.src.url}" dest="${sbt.interface.src.jar}"/>
- <!-- Explode sources -->
- <unzip src="${sbt.interface.src.jar}" dest="${sbt.src.dir}"/>
- </target>
+ <!-- Explode sources -->
+ <unzip src="${sbt.interface.src.jar}" dest="${sbt.src.dir}"/>
+ </then></if>
- <target name="sbt.compile" depends="sbt.libs,quick.done">
<stopwatch name="quick.sbt-interface.timer"/>
<mkdir dir="${build-sbt.dir}/classes"/>
<scalacfork
destdir="${build-sbt.dir}/classes"
- compilerpathref="quick.classpath"
+ compilerpathref="quick.compiler.path"
params="${scalac.args.quick}"
srcdir="${sbt.src.dir}"
jvmargs="${scalacfork.jvmargs}">
<include name="**/*.scala"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- <pathelement location="${build-quick.dir}/classes/reflect"/>
- <pathelement location="${build-quick.dir}/classes/compiler"/>
- <pathelement location="${sbt.interface.jar}"/>
- <path refid="forkjoin.classpath"/>
- </compilationpath>
+ <compilationpath refid="sbt.compile.build.path"/>
</scalacfork>
<touch file="${build-sbt.dir}/sbt-interface.complete" verbose="no"/>
<stopwatch name="quick.sbt-interface.timer" action="total"/>
</target>
- <target name="sbt.done" depends="sbt.compile">
- </target>
-
- <target name="sbt.clean" depends="init">
- <delete dir="${build-sbt.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
-
-<!-- ===========================================================================
-DOCUMENTATION
-============================================================================ -->
-
- <target name="docs.start" depends="pack.done">
- <macrodef name="doc-uptodate-check">
- <attribute name="name" />
- <attribute name="srcdir" />
- <element name="source-includes" optional="yes" />
- <sequential>
- <uptodate property="docs.@{name}.available" targetfile="${build-docs.dir}/@{name}.complete">
- <srcfiles dir="@{srcdir}">
- <source-includes/>
- </srcfiles>
- </uptodate>
- </sequential>
- </macrodef>
-
- <!-- Set the github commit scaladoc sources point to -->
- <!-- For releases, look for the tag with the same name as the maven version -->
- <condition property="scaladoc.git.commit" value="v${maven.version.number}">
- <isset property="build.release"/>
- </condition>
- <!-- For snapshots, if we know the commit, point scaladoc to that particular commit instead of master -->
- <condition property="scaladoc.git.commit" value="${git.commit.sha}">
- <not><equals arg1="${git.commit.sha}" arg2="unknown"/></not>
- </condition>
- <!-- Fallback: point scaladoc to master -->
- <property name="scaladoc.git.commit" value="master"/>
- <!-- Compute the URL and show it -->
- <property name="scaladoc.url" value="https://github.com/scala/scala/tree/${scaladoc.git.commit}/src"/>
- <echo message="Scaladoc will point to ${scaladoc.url} for source files."/>
-
- <!-- Unless set with -Dscaladoc.<...>, these won't be activated -->
- <property name="scaladoc.raw.output" value="no"/>
- <property name="scaladoc.no.prefixes" value="no"/>
- </target>
-
- <target name="docs.pre-lib" depends="docs.start">
- <doc-uptodate-check name="library" srcdir="${src.dir}">
- <source-includes>
- <include name="library/**"/>
- <include name="swing/**"/>
- <include name="actors/**"/>
- <include name="reflect/**"/>
- </source-includes>
- </doc-uptodate-check>
- </target>
-
- <target name="docs.lib" depends="docs.pre-lib" unless="docs.library.available">
- <stopwatch name="docs.lib.timer"/>
- <mkdir dir="${build-docs.dir}/library"/>
- <scaladoc
- destdir="${build-docs.dir}/library"
- doctitle="Scala Standard Library API (Scaladoc)"
- docversion="${version.number}"
- docfooter="epfl"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- docUncompilable="${src.dir}/library-aux"
- skipPackages="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io:scala.concurrent.impl"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- addparams="${scalac.args.all}"
- docRootContent="${src.dir}/library/rootdoc.txt"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}">
- <src>
- <files includes="${src.dir}/actors"/>
- <files includes="${src.dir}/library"/>
- <files includes="${src.dir}/reflect"/>
- <files includes="${src.dir}/swing"/>
- <files includes="${src.dir}/continuations/library"/>
- </src>
- <include name="**/*.scala"/>
- <exclude name="reflect/Code.scala"/>
- <exclude name="reflect/Print.scala"/>
- <exclude name="reflect/Symbol.scala"/>
- <exclude name="reflect/Tree.scala"/>
- <exclude name="reflect/Type.scala"/>
- <exclude name="runtime/*$.scala"/>
- <exclude name="runtime/ScalaRunTime.scala"/>
- <exclude name="runtime/StringAdd.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/library.complete" verbose="no"/>
- <stopwatch name="docs.lib.timer" action="total"/>
- </target>
-
- <target name="docs.pre-man" depends="docs.lib">
- <doc-uptodate-check name="manual" srcdir="${src.dir}/manual"/>
- </target>
-
- <target name="docs.manmaker" depends="docs.pre-man" unless="docs.manual.available">
- <mkdir dir="${build.dir}/manmaker/classes"/>
- <scalac
- destdir="${build.dir}/manmaker/classes"
- classpathref="pack.classpath"
- srcdir="${src.dir}/manual"
- includes="**/*.scala"
- addparams="${scalac.args.all}"/>
- <path id="manual.classpath">
- <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build.dir}/manmaker/classes"/>
- </path>
- </target>
-
- <target name="docs.man" depends="docs.manmaker" unless="docs.manual.available">
- <mkdir dir="${build-docs.dir}/manual/man/man1"/>
- <mkdir dir="${build-docs.dir}/manual/html"/>
- <mkdir dir="${build-docs.dir}/manual/genman/man1"/>
- <taskdef name="genman"
- classname="scala.tools.docutil.ManMaker"
- classpathref="manual.classpath"/>
- <genman command="fsc, scala, scalac, scaladoc, scalap"
- htmlout="${build-docs.dir}/manual/html"
- manout="${build-docs.dir}/manual/genman"/>
- <!-- On Windows source and target files can't be the same ! -->
- <fixcrlf
- srcdir="${build-docs.dir}/manual/genman"
- destdir="${build-docs.dir}/manual/man"
- eol="unix" includes="**/*.1"/>
- <copy todir="${build-docs.dir}/manual/html">
- <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
- <include name="**/*.html"/>
- <include name="**/*.css"/>
- <include name="**/*.gif"/>
- <include name="**/*.png"/>
- </fileset>
- </copy>
- <touch file="${build-docs.dir}/manual.complete" verbose="no"/>
- </target>
-
- <target name="docs.pre-comp" depends="docs.man">
- <doc-uptodate-check name="compiler" srcdir="${src.dir}/compiler"/>
- </target>
-
- <target name="docs.comp" depends="docs.pre-comp" unless="docs.compiler.available">
- <stopwatch name="docs.comp.timer"/>
- <mkdir dir="${build-docs.dir}/compiler"/>
- <scaladoc
- destdir="${build-docs.dir}/compiler"
- doctitle="Scala Compiler"
- docversion="${version.number}"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/compiler"
- docRootContent="${src.dir}/compiler/rootdoc.txt"
- addparams="${scalac.args.all}"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}">
- <include name="**/*.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/compiler.complete" verbose="no"/>
- <stopwatch name="docs.comp.timer" action="total"/>
- </target>
-
- <target name="docs.pre-jline" depends="docs.start">
- <doc-uptodate-check name="jline" srcdir="${src.dir}/jline/src/main/java" />
- </target>
-
- <target name="docs.jline" depends="docs.pre-jline" unless="docs.jline.available">
- <stopwatch name="docs.jline.timer"/>
- <mkdir dir="${build-docs.dir}/jline"/>
- <scaladoc
- destdir="${build-docs.dir}/jline"
- doctitle="Scala JLine"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/jline/src/main/java"
- addparams="${scalac.args.all}"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}">
- <include name="**/*.scala"/>
- <include name="**/*.java"/>
- </scaladoc>
- <touch file="${build-docs.dir}/jline.complete" verbose="no"/>
- <stopwatch name="docs.jline.timer" action="total"/>
- </target>
-
- <target name="docs.pre-scalap" depends="docs.start">
- <doc-uptodate-check name="scalap" srcdir="${src.dir}/scalap" />
- </target>
-
- <target name="docs.scalap" depends="docs.pre-scalap" unless="docs.scalap.available">
- <stopwatch name="docs.scalap.timer"/>
- <mkdir dir="${build-docs.dir}/scalap"/>
- <scaladoc
- destdir="${build-docs.dir}/scalap"
- doctitle="Scalap"
- docversion="${version.number}"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/scalap"
- addparams="${scalac.args.all}"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}">
- <include name="**/*.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/scalap.complete" verbose="no"/>
- <stopwatch name="docs.scalap.timer" action="total"/>
- </target>
-
- <target name="docs.pre-partest" depends="docs.start">
- <doc-uptodate-check name="partest" srcdir="${src.dir}/partest" />
- </target>
-
- <target name="docs.partest" depends="docs.pre-partest" unless="docs.partest.available">
- <stopwatch name="docs.partest.timer"/>
- <mkdir dir="${build-docs.dir}/scala-partest"/>
- <scaladoc
- destdir="${build-docs.dir}/scala-partest"
- doctitle="Scala Parallel Testing Framework"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/partest"
- addparams="${scalac.args.all}"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}">
- <include name="**/*.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/partest.complete" verbose="no"/>
- <stopwatch name="docs.partest.timer" action="total"/>
- </target>
-
- <target name="docs.pre-continuations-plugin" depends="docs.start">
- <doc-uptodate-check name="continuations-plugin" srcdir="${src.dir}/continuations/plugin" />
- </target>
-
- <target name="docs.continuations-plugin" depends="docs.pre-continuations-plugin" unless="docs.continuations-plugin.available">
- <stopwatch name="docs.continuations-plugin.timer"/>
- <mkdir dir="${build-docs.dir}/continuations-plugin"/>
- <scaladoc
- destdir="${build-docs.dir}/continuations-plugin"
- doctitle="Delimited Continuations Compiler Plugin"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="pack.classpath"
- srcdir="${src.dir}/continuations/plugin"
- addparams="${scalac.args.all}"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}">
- <include name="**/*.scala"/>
- </scaladoc>
- <touch file="${build-docs.dir}/continuations-plugin.complete" verbose="no"/>
- <stopwatch name="docs.continuations-plugin.timer" action="total"/>
- </target>
-
- <target name="docs.done" depends="docs.man"/>
-
- <target name="docs.all" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin"/>
-
- <target name="docs.clean">
- <delete dir="${build-docs.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- <delete dir="${build.dir}/manmaker" includeemptydirs="yes" quiet="yes" failonerror="no"/>
- </target>
-
-<!-- ===========================================================================
-BOOTRAPING TEST AND TEST SUITE
-============================================================================ -->
-
- <target name="test.stability" depends="strap.done, init">
- <same dir="${build-quick.dir}" todir="${build-strap.dir}" failondifferent="yes">
- <exclude name="**/*.properties"/>
- <exclude name="bin/**"/>
- <exclude name="*.complete"/>
- <exclude name="misc/scala-devel/plugins/*.jar"/>
- </same>
- </target>
-
- <target name="test.classload" depends="pack.done">
- <!-- TODO - Add actors + reflect to this -->
- <classloadVerify classpath="${build-pack.dir}/lib/scala-library.jar" />
- </target>
-
- <!-- this target will run only those tests found in test/debug -->
- <target name="test.sbt" depends="sbt.done">
- </target>
-
- <!-- this target will run only those tests found in test/debug -->
- <target name="test.debug">
- <antcall target="test.suite">
- <param name="partest.srcdir" value="debug" />
- </antcall>
- </target>
+ <property name="partest.srcdir" value="files" /> <!-- TODO: make targets for `pending` and other subdirs -->
<target name="test.run" depends="pack.done">
- <property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="1200000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+
+ <compilationpath refid="partest.build.path"/>
<runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
<jvmtests dir="${partest.dir}/${partest.srcdir}/jvm" includes="*.scala"/>
</partest>
</target>
- <target name="test.ant" depends="pack.done">
- <property name="partest.srcdir" value="files" />
- <partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
- srcdir="${partest.srcdir}"
- scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
- <anttests dir="${partest.dir}/${partest.srcdir}/ant" includes="*build.xml"/>
- </partest>
- </target>
-
<target name="test.suite" depends="pack.done">
- <property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="2400000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise}">
- <compilationpath>
- <path refid="pack.classpath"/>
- <pathelement location="${pack.dir}/lib/scala-swing.jar"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.build.path"/>
<postests dir="${partest.dir}/${partest.srcdir}/pos" includes="*.scala"/>
<negtests dir="${partest.dir}/${partest.srcdir}/neg" includes="*.scala"/>
<runtests dir="${partest.dir}/${partest.srcdir}/run" includes="*.scala"/>
@@ -2608,25 +1481,17 @@ BOOTRAPING TEST AND TEST SUITE
<instrumentedtests dir="${partest.dir}/${partest.srcdir}/instrumented">
<include name="*.scala"/>
</instrumentedtests>
- <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
- <include name="*/*.scala"/>
- </presentationtests>
- <!-- <scripttests dir="${partest.dir}/${partest.srcdir}/script" includes="*.scala"/> -->
</partest>
</target>
<target name="test.continuations.suite" depends="pack.done">
- <property name="partest.srcdir" value="files" />
<partest showlog="yes" erroronfailed="yes" javacmd="${java.home}/bin/java"
timeout="2400000"
srcdir="${partest.srcdir}"
scalacopts="${scalac.args.optimise} -Xplugin-require:continuations -P:continuations:enable">
<compilerarg value="-Xpluginsdir"/>
<compilerarg file="${build-quick.dir}/misc/scala-devel/plugins"/>
- <compilationpath>
- <path refid="pack.classpath"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.build.path"/>
<negtests dir="${partest.dir}/${partest.srcdir}/continuations-neg" includes="*.scala"/>
<runtests dir="${partest.dir}/${partest.srcdir}/continuations-run" includes="*.scala"/>
</partest>
@@ -2634,23 +1499,33 @@ BOOTRAPING TEST AND TEST SUITE
<target name="test.scaladoc" depends="pack.done">
<partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
- <compilationpath>
- <path refid="pack.classpath"/>
- <fileset dir="${partest.dir}/files/lib" includes="*.jar" />
- </compilationpath>
+ <compilationpath refid="partest.build.path"/>
<runtests dir="${partest.dir}/scaladoc/run" includes="*.scala" />
<scalachecktests dir="${partest.dir}/scaladoc/scalacheck" includes="*.scala" />
</partest>
</target>
- <target name="test.done" depends="test.suite, test.continuations.suite, test.scaladoc, test.stability, test.sbt, osgi.test, bc.run"/>
+ <target name="test.interactive" depends="pack.done">
+ <partest erroronfailed="yes" scalacopts="${scalac.args.optimise}" showlog="yes">
+ <compilationpath refid="partest.build.path"/>
+ <presentationtests dir="${partest.dir}/${partest.srcdir}/presentation">
+ <include name="*/*.scala"/>
+ </presentationtests>
+ </partest>
+ </target>
+
+ <!-- for use in PR validation, where stability is rarely broken, so we're going to use starr for locker,
+ and skip test.stability (which requires locker == quick) -->
+ <target name="test.core" depends="test.osgi, test.sbt, test.bc, test.interactive, test.continuations.suite, test.scaladoc, test.suite"/>
+ <target name="test.done" depends="test.core, test.stability"/>
<!-- ===========================================================================
-Binary compatibility testing
+ BINARY COMPATIBILITY TESTING
============================================================================ -->
+ <target name="bc.init" depends="init" unless="maven-deps-done-mima">
+ <property name="bc-reference-version" value="2.10.0"/>
- <target name="bc.init" depends="init">
<property name="bc-build.dir" value="${build.dir}/bc"/>
<!-- Obtain mima -->
<mkdir dir="${bc-build.dir}"/>
@@ -2659,111 +1534,212 @@ Binary compatibility testing
<dependency groupId="com.typesafe" artifactId="mima-reporter_2.9.2" version="0.1.5"/>
</artifact:dependencies>
<artifact:dependencies pathId="old.bc.classpath">
- <dependency groupId="org.scala-lang" artifactId="scala-swing" version="2.10.0"/>
- <dependency groupId="org.scala-lang" artifactId="scala-library" version="2.10.0"/>
- <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="2.10.0"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-swing" version="${bc-reference-version}"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-library" version="${bc-reference-version}"/>
+ <dependency groupId="org.scala-lang" artifactId="scala-reflect" version="${bc-reference-version}"/>
</artifact:dependencies>
+ <property name="maven-deps-done-mima" value="true"/>
+ </target>
+
+ <macrodef name="bc.run-mima">
+ <attribute name="jar-name"/>
+ <attribute name="prev"/>
+ <attribute name="curr"/>
+ <attribute name="direction"/>
+ <sequential>
+ <echo message="Checking @{direction} binary compatibility for @{jar-name} (against ${bc-reference-version})"/>
+ <java taskname="mima"
+ fork="true"
+ failonerror="true"
+ classname="com.typesafe.tools.mima.cli.Main">
+ <arg value="--prev"/>
+ <arg value="@{prev}"/>
+ <arg value="--curr"/>
+ <arg value="@{curr}"/>
+ <arg value="--filters"/>
+ <arg value="${basedir}/bincompat-@{direction}.whitelist.conf"/>
+ <arg value="--generate-filters"/>
+ <classpath>
+ <path refid="mima.classpath"/>
+ </classpath>
+ </java>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="bc.check">
+ <attribute name="jar-name"/>
+ <sequential>
+ <bc.run-mima
+ jar-name="@{jar-name}"
+ prev="${org.scala-lang:@{jar-name}:jar}"
+ curr="${build-pack.dir}/lib/@{jar-name}.jar"
+ direction="backward"/>
+ <bc.run-mima
+ jar-name="@{jar-name}"
+ prev="${build-pack.dir}/lib/@{jar-name}.jar"
+ curr="${org.scala-lang:@{jar-name}:jar}"
+ direction="forward"/>
+ </sequential>
+ </macrodef>
+
+ <target name="test.bc-opt" description="Optimized version of test.bc."> <optimized name="test.bc"/></target>
+ <target name="test.bc" depends="bc.init, pack.lib, pack.reflect, pack.swing">
+ <bc.check jar-name="scala-library"/>
+ <bc.check jar-name="scala-reflect"/>
+ <bc.check jar-name="scala-swing"/>
+ </target>
+
+<!-- ===========================================================================
+ DOCUMENTATION
+============================================================================ -->
+ <target name="docs.start" depends="pack.done">
+ <!-- Set the github commit scaladoc sources point to -->
+ <!-- For releases, look for the tag with the same name as the maven version -->
+ <condition property="scaladoc.git.commit" value="v${maven.version.number}">
+ <isset property="build.release"/>
+ </condition>
+ <!-- For snapshots, if we know the commit, point scaladoc to that particular commit instead of master -->
+ <condition property="scaladoc.git.commit" value="${git.commit.sha}">
+ <not><equals arg1="${git.commit.sha}" arg2="unknown"/></not>
+ </condition>
+ <!-- Fallback: point scaladoc to master -->
+ <property name="scaladoc.git.commit" value="master"/>
+ <!-- Compute the URL and show it -->
+ <property name="scaladoc.url" value="https://github.com/scala/scala/tree/${scaladoc.git.commit}/src"/>
+ <echo message="Scaladoc will point to ${scaladoc.url} for source files."/>
+
+ <!-- Unless set with -Dscaladoc.<...>, these won't be activated -->
+ <property name="scaladoc.raw.output" value="no"/>
+ <property name="scaladoc.no.prefixes" value="no"/>
</target>
- <target name="bc.run" depends="bc.init, pack.done">
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--prev"/>
- <arg value="${org.scala-lang:scala-library:jar}"/>
- <arg value="--curr"/>
- <arg value="${build-pack.dir}/lib/scala-library.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-backward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--prev"/>
- <arg value="${org.scala-lang:scala-reflect:jar}"/>
- <arg value="--curr"/>
- <arg value="${build-pack.dir}/lib/scala-reflect.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-backward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--prev"/>
- <arg value="${org.scala-lang:scala-swing:jar}"/>
- <arg value="--curr"/>
- <arg value="${build-pack.dir}/lib/scala-swing.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-backward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--curr"/>
- <arg value="${org.scala-lang:scala-library:jar}"/>
- <arg value="--prev"/>
- <arg value="${build-pack.dir}/lib/scala-library.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-forward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--curr"/>
- <arg value="${org.scala-lang:scala-reflect:jar}"/>
- <arg value="--prev"/>
- <arg value="${build-pack.dir}/lib/scala-reflect.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-forward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
- <java
- fork="true"
- failonerror="true"
- classname="com.typesafe.tools.mima.cli.Main">
- <arg value="--curr"/>
- <arg value="${org.scala-lang:scala-swing:jar}"/>
- <arg value="--prev"/>
- <arg value="${build-pack.dir}/lib/scala-swing.jar"/>
- <arg value="--filters"/>
- <arg value="${basedir}/bincompat-forward.whitelist.conf"/>
- <arg value="--generate-filters"/>
- <classpath>
- <path refid="mima.classpath"/>
- </classpath>
- </java>
+ <target name="docs.lib" depends="docs.start">
+ <staged-uptodate stage="docs" project="library">
+ <check><srcfiles dir="${src.dir}">
+ <include name="library/**"/>
+ <include name="swing/**"/>
+ <include name="actors/**"/>
+ <include name="reflect/**"/>
+ <include name="continuations/library/**"/>
+ </srcfiles></check>
+ <do>
+ <stopwatch name="docs.lib.timer"/>
+ <mkdir dir="${build-docs.dir}/library"/>
+ <!-- last three attributes not supported by staged-docs: -->
+ <scaladoc
+ destdir="${build-docs.dir}/library"
+ doctitle="Scala Standard Library API (Scaladoc)"
+ docversion="${version.number}"
+ docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
+ sourcepath="${src.dir}"
+ classpathref="pack.compiler.path"
+ addparams="${scalac.args.all}"
+ docRootContent="${src.dir}/library/rootdoc.txt"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}"
+ docfooter="epfl"
+ docUncompilable="${src.dir}/library-aux"
+ skipPackages="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io:scala.concurrent.impl">
+ <src>
+ <files includes="${src.dir}/actors"/>
+ <files includes="${src.dir}/library"/>
+ <files includes="${src.dir}/reflect"/>
+ <files includes="${src.dir}/swing"/>
+ <files includes="${src.dir}/continuations/library"/>
+ </src>
+ <include name="**/*.scala"/>
+ <exclude name="reflect/Code.scala"/>
+ <exclude name="reflect/Print.scala"/>
+ <exclude name="reflect/Symbol.scala"/>
+ <exclude name="reflect/Tree.scala"/>
+ <exclude name="reflect/Type.scala"/>
+ <exclude name="runtime/*$.scala"/>
+ <exclude name="runtime/ScalaRunTime.scala"/>
+ <exclude name="runtime/StringAdd.scala"/>
+ </scaladoc>
+ <stopwatch name="docs.lib.timer" action="total"/>
+ </do>
+ </staged-uptodate>
+ </target>
+
+ <target name="docs.comp" depends="docs.start">
+ <staged-docs project="compiler" title="Scala Compiler" docroot="rootdoc.txt">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.jline" depends="docs.start">
+ <staged-docs project="jline" dir="jline/src/main/java" title="Scala JLine">
+ <include name="**/*.scala"/>
+ <include name="**/*.java"/>
+ </staged-docs>
</target>
+ <target name="docs.scalap" depends="docs.start">
+ <staged-docs project="scalap" title="Scalap">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.partest" depends="docs.start">
+ <staged-docs project="partest" title="Scala Parallel Testing Framework">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.continuations-plugin" depends="docs.start">
+ <staged-docs project="continuations-plugin" dir="continuations/plugin" title="Delimited Continuations Compiler Plugin">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.man" depends="docs.start">
+ <staged-uptodate stage="docs" project="manual">
+ <check><srcfiles dir="${src.dir}/manual"/></check>
+ <do>
+ <mkdir dir="${build.dir}/manmaker/classes"/>
+ <scalac
+ destdir="${build.dir}/manmaker/classes"
+ classpathref="pack.compiler.path"
+ srcdir="${src.dir}/manual"
+ includes="**/*.scala"
+ addparams="${scalac.args.all}"/>
+ <mkdir dir="${build-docs.dir}/manual/man/man1"/>
+ <mkdir dir="${build-docs.dir}/manual/html"/>
+ <mkdir dir="${build-docs.dir}/manual/genman/man1"/>
+ <taskdef name="genman"
+ classname="scala.tools.docutil.ManMaker"
+ classpathref="manual.classpath"/>
+ <genman command="fsc, scala, scalac, scaladoc, scalap"
+ htmlout="${build-docs.dir}/manual/html"
+ manout="${build-docs.dir}/manual/genman"/>
+ <!-- On Windows source and target files can't be the same ! -->
+ <fixcrlf
+ srcdir="${build-docs.dir}/manual/genman"
+ destdir="${build-docs.dir}/manual/man"
+ eol="unix" includes="**/*.1"/>
+ <copy todir="${build-docs.dir}/manual/html">
+ <fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
+ <include name="**/*.html"/>
+ <include name="**/*.css"/>
+ <include name="**/*.gif"/>
+ <include name="**/*.png"/>
+ </fileset>
+ </copy>
+ </do>
+ </staged-uptodate>
+ </target>
+
+ <target name="docs.done" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin"/>
+
<!-- ===========================================================================
-DISTRIBUTION
+ DISTRIBUTION
============================================================================ -->
-
- <target name="dist.start" depends="pack.done, osgi.done">
+ <target name="dist.base" depends="pack.done, osgi.done">
<property name="dist.name" value="scala-${version.number}"/>
<property name="dist.dir" value="${dists.dir}/${dist.name}"/>
@@ -2782,9 +1758,7 @@ DISTRIBUTION
overwrite="yes"/>
</sequential>
</macrodef>
- </target>
- <target name="dist.base" depends="dist.start">
<mkdir dir="${dist.dir}/lib"/>
<copy toDir="${dist.dir}/lib">
<fileset dir="${build-pack.dir}/lib">
@@ -2794,6 +1768,13 @@ DISTRIBUTION
<include name="scalap.jar"/>
</fileset>
</copy>
+
+ <copy todir="${dist.dir}/lib">
+ <resources refid="partest.extras.fileset"/>
+ <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
+ from="${partest.extras.versions}" to="flatten"/>
+ </copy>
+
<mkdir dir="${dist.dir}/bin"/>
<!-- TODO - Stop being inefficient and don't copy OSGi bundles overtop other jars. -->
<copy-bundle name="scala-library"/>
@@ -2835,70 +1816,42 @@ DISTRIBUTION
toFile="${dist.dir}/doc/scala-devel-docs/README.scala-swing"/>
</target>
- <target name="dist.man" depends="dist.doc">
+ <target name="dist.man" depends="dist.base">
<mkdir dir="${dist.dir}/man"/>
<copy toDir="${dist.dir}/man">
<fileset dir="${build-docs.dir}/manual/man"/>
</copy>
</target>
- <target name="dist.src" depends="dist.man">
+ <target name="dist.src" depends="dist.base">
<mkdir dir="${dist.dir}/src"/>
- <jar destfile="${dist.dir}/src/scala-library-src.jar">
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
<fileset dir="${src.dir}/continuations/library"/>
</jar>
- <jar destfile="${dist.dir}/src/scala-reflect-src.jar">
- <fileset dir="${src.dir}/reflect"/>
- </jar>
- <jar destfile="${dist.dir}/src/scala-swing-src.jar">
- <fileset dir="${src.dir}/swing"/>
- </jar>
- <jar destfile="${dist.dir}/src/scala-compiler-src.jar">
- <fileset dir="${src.dir}/compiler"/>
- </jar>
- <jar destfile="${dist.dir}/src/scala-actors-src.jar">
- <fileset dir="${src.dir}/actors"/>
- </jar>
- <jar destfile="${dist.dir}/src/scalap-src.jar">
- <fileset dir="${src.dir}/scalap"/>
- </jar>
- <!-- Needed for Maven distribution -->
- <jar destfile="${dist.dir}/src/scala-partest-src.jar">
- <fileset dir="${src.dir}/partest"/>
- </jar>
- </target>
-
- <target name="dist.latest.unix" depends="dist.base" unless="os.win">
- <symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="yes"/>
- </target>
-
- <target name="dist.latest.win" depends="dist.base" if="os.win">
- <copy todir="${dists.dir}/latest">
- <fileset dir="${dist.dir}"/>
- </copy>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-swing-src.jar" basedir="${src.dir}/swing"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-compiler-src.jar" basedir="${src.dir}/compiler"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/fjbg-src.jar" basedir="${src.dir}/fjbg"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/msil-src.jar" basedir="${src.dir}/msil"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-actors-src.jar" basedir="${src.dir}/actors"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-partest-src.jar" basedir="${src.dir}/partest"/>
</target>
- <target name="dist.latest" depends="dist.latest.unix,dist.latest.win"/>
-
- <target name="dist.partial" depends="dist.base, dist.latest"/>
-
- <target name="dist.done" depends="dist.latest, dist.src"/>
-
- <target name="dist.clean">
- <delete dir="${dists.dir}" includeemptydirs="yes" quiet="yes" failonerror="no"/>
+ <target name="dist.partial" depends="dist.base">
+ <if><not><os family="windows"/></not><then>
+ <symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="yes"/>
+ </then><else> <!-- XXX THIS PROBABLY DOES NOT WORK: copying must happen last during dist.done! is this guaranteed? -->
+ <copydir dest="${dists.dir}/latest" src="${dist.dir}"/>
+ </else></if>
</target>
-<!-- ===========================================================================
-TEST AND DISTRIBUTION BUNDLE (ALL)
-============================================================================ -->
-
- <target name="all.done" depends="dist.done, test.done"/>
+ <target name="dist.done" depends="dist.doc, dist.man, dist.src, dist.partial"/>
- <target name="all.clean" depends="locker.clean, docs.clean, dist.clean, sbt.clean, osgi.clean"/>
<!-- ===========================================================================
-STABLE REFERENCE (STARR)
+ STABLE REFERENCE (STARR)
============================================================================ -->
<!-- Does not use any properties other than ${basedir}, so that it can
run without 'init' (when using 'replacestarrwin') -->
@@ -2906,28 +1859,19 @@ STABLE REFERENCE (STARR)
<target name="starr.start">
<fail message="Library in build/pack not available">
<condition><not><and>
- <available file="${basedir}/build/pack/lib/scala-library.jar"/>
+ <available file="${build-pack.dir}/lib/scala-library.jar"/>
</and></not></condition>
</fail>
<fail message="Compiler in build/quick not available">
<condition><not><and>
- <available file="${basedir}/build/quick/classes/compiler"/>
- <available file="${basedir}/build/quick/compiler.complete"/>
+ <available file="${build-quick.dir}/classes/compiler"/>
+ <available file="${build-quick.dir}/compiler.complete"/>
</and></not></condition>
</fail>
</target>
- <target name="starr.clean" depends="starr.start">
- <delete file="${basedir}/lib/scala-library.jar"/>
- <delete file="${basedir}/lib/scala-reflect.jar"/>
- <delete file="${basedir}/lib/scala-compiler.jar"/>
- <delete file="${basedir}/lib/scala-library-src.jar"/>
- <delete file="${basedir}/lib/scala-reflect-src.jar"/>
- <delete file="${basedir}/lib/scala-compiler-src.jar"/>
- </target>
-
<target name="starr.jars" depends="starr.start">
- <copy toDir="${basedir}/lib/" overwrite="yes">
+ <copy toDir="${lib.dir}" overwrite="yes">
<fileset dir="${build-pack.dir}/lib">
<include name="scala-library.jar"/>
<include name="scala-reflect.jar"/>
@@ -2937,20 +1881,16 @@ STABLE REFERENCE (STARR)
</target>
<target name="starr.src" depends="starr.jars">
- <jar destfile="${basedir}/lib/scala-library-src.jar">
- <fileset dir="${basedir}/src/library"/>
- <fileset dir="${basedir}/src/swing"/>
- <fileset dir="${basedir}/src/actors"/>
- <fileset dir="${basedir}/src/forkjoin"/>
- </jar>
- <jar destfile="${basedir}/lib/scala-reflect-src.jar">
- <fileset dir="${basedir}/src/reflect"/>
+ <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-library-src.jar">
+ <fileset dir="${src.dir}/library"/>
+ <fileset dir="${src.dir}/swing"/>
+ <fileset dir="${src.dir}/actors"/>
+ <fileset dir="${src.dir}/forkjoin"/>
</jar>
- <jar destfile="${basedir}/lib/scala-compiler-src.jar">
- <fileset dir="${basedir}/src/compiler"/>
- <fileset dir="${basedir}/src/asm"/>
- <fileset dir="${basedir}/src/fjbg"/>
- <fileset dir="${basedir}/src/msil"/>
+ <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
+ <jar whenmanifestonly="fail" destfile="${lib.dir}/scala-compiler-src.jar">
+ <fileset dir="${src.dir}/compiler"/>
+ <fileset dir="${src.dir}/asm"/>
</jar>
</target>
@@ -2970,152 +1910,71 @@ STABLE REFERENCE (STARR)
<target name="starr.done" depends="starr.jars, starr.removesha1"/>
-<!-- ===========================================================================
-FORWARDED TARGETS FOR PACKAGING
-============================================================================ -->
-
- <target name="distpack" depends="dist.done, docs.all">
- <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
- </target>
-
- <target name="distpack-maven" depends="dist.done, docs.all">
- <ant antfile="${src.dir}/build/pack.xml" target="pack-maven.done" inheritall="yes" inheritrefs="yes"/>
+ <target name="replacestarr" description="Replaces the Starr compiler and library by fresh ones built from current sources and tests them.">
+ <fail message="This target is not available on Windows. Use 'ant replacestarrwin' instead.">
+ <condition>
+ <os family="windows"/>
+ </condition>
+ </fail>
+ <!-- needs antcall to enforce ordering -->
+ <antcall target="locker.clean"/>
+ <antcall target="pack.done"/>
+ <antcall target="starr.done"/>
+ <antcall target="locker.clean"/>
+ <antcall target="test.done"/>
</target>
- <target name="distpack-opt"
- description="Builds an optimised distribution.">
- <antcall target="distpack">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+ <target name="replacestarr-opt" description="Replaces the Starr compiler and library by fresh, optimised ones built from current sources and tests them.">
+ <optimized name="replacestarr"/></target>
- <target name="distpack-maven-opt"
- description="Builds an optimised maven distribution.">
- <antcall target="distpack-maven">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
+ <!-- Ant on Windows is not able to delete jar files that are referenced in any <path>.
+ See ticket 1290 on trac. -->
+ <target name="replacestarrwin" description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
+ <fail message="This target is only available on Windows. Use 'ant replacestarr' instead.">
+ <condition>
+ <not><os family="windows"/></not>
+ </condition>
+ </fail>
+ <echo message="CAUTION: Make sure to execute 'ant locker.clean build' prior to calling 'replacestarrwin'."/>
+ <antcall target="starr.done"/>
+ <antcall target="locker.clean"/>
+ <antcall target="test.done"/>
</target>
- <!-- Used by the scala-installer script -->
- <target name="allallclean" depends="all.clean"/>
+ <target name="replacestarrwin-opt" description="Creates a new Starr on Windows. Manually execute 'ant locker.clean build' first!">
+ <optimized name="replacestarrwin"/></target>
-<!-- ===========================================================================
-FORWARDED TARGETS FOR NIGHTLY BUILDS
-============================================================================ -->
+ <target name="replacelocker" description="Replaces the Locker compiler and library by fresh ones built from current sources."
+ depends="palo.clean, locker.unlock, palo.done"/>
- <target name="nightly">
- <antcall target="nightly-nopt">
- <param name="scalac.args.optimise" value="-optimise"/>
- </antcall>
- </target>
+ <target name="replacelocker-opt" description="Replaces the Locker compiler and library by fresh, optimised ones built from current sources.">
+ <optimized name="replacelocker"/></target>
- <target name="nightly-nopt" depends="all.done, docs.all">
- <!-- cannot antcall all.done, the properties defined in there (dist.dir) are not returned. need depends. -->
- <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/>
- </target>
+ <target name="buildlocker" description="Does the same for locker as build does for quick." depends="locker.unlock, palo.bin"/>
+ <target name="unlocklocker" description="Same as buildlocker." depends="buildlocker"/> <!-- REMOVE -->
- <target name="nightly.checkall">
- <antcall target="nightly-nopt">
- <param name="partest.scalacopts" value="-Ycheck:all"/>
- </antcall>
+ <target name="fastlocker.lib" description="Buildlocker without extra fuss" depends="locker.unlock, locker.lib">
+ <property name="fastlocker" value="true"/>
</target>
-<!-- ===========================================================================
-POSITIONS
-============================================================================ -->
-
- <target name="test.positions" depends="quick.comp">
- <antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${partest.dir}/files/positions"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/compiler"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/library"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/actors"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/dbc"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/swing"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/partest"/>
- </antcall>
- <antcall target="test.positions.sub" inheritRefs="true">
- <param name="test.srcs" value="${src.dir}/scalap"/>
- </antcall>
- <antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${partest.dir}/files/pos"/>
- </antcall>
- <antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${partest.dir}/files/run"/>
- </antcall>
- <antcall target="test.positions.tests.sub" inheritRefs="true">
- <param name="test.tests.srcs" value="${partest.dir}/files/neg"/>
- </antcall>
+ <target name="fastlocker.reflect" description="Buildlocker without extra fuss" depends="locker.unlock, locker.reflect">
+ <property name="fastlocker" value="true"/>
</target>
- <target name="test.positions.sub">
- <echo message="Validating positions for: ${test.srcs}"/>
- <if>
- <isfileselected file="${test.srcs}">
- <type type="dir"/>
- </isfileselected>
- <then>
- <property name="srcdir" value="${test.srcs}"/>
- <property name="srcs" value="**/*.scala"/>
- </then>
- <else>
- <dirname property="srcdir" file="${test.srcs}"/>
- <basename property="srcs" file="${test.srcs}"/>
- </else>
- </if>
- <scalacfork
- destdir=""
- compilerpathref="locker.classpath"
- srcpath="${srcdir}"
- params="-Xprint-pos -Yide-debug"
- srcdir="${srcdir}"
- jvmargs="${scalacfork.jvmargs}">
- <include name="${srcs}"/>
- <compilationpath>
- <pathelement location="${build-quick.dir}/classes/library"/>
- </compilationpath>
- </scalacfork>
+ <target name="fastlocker.comp" description="Buildlocker without extra fuss" depends="locker.unlock, locker.comp">
+ <property name="fastlocker" value="true"/>
</target>
- <target name="test.positions.tests.sub">
- <foreach target="test.positions.sub"
- inheritAll="true"
- inheritRefs="true"
- param="test.srcs">
- <path>
- <fileset dir="${test.tests.srcs}" includes="*.scala"/>
- <dirset dir="${test.tests.srcs}">
- <include name="*"/>
- </dirset>
- </path>
- </foreach>
- </target>
+ <target name="fastlocker" description="Buildlocker without extra fuss" depends="fastlocker.comp"/>
<!-- ===========================================================================
-MISCELLANEOUS
+ VISUALIZATION
============================================================================ -->
-
<target name="graph.init">
<taskdef name="vizant" classname="vizant.Vizant" classpath="${lib-ant.dir}/vizant.jar"/>
</target>
- <target name="graph.clean" depends="graph.init">
- <vizant antfile="${ant.file}" outfile="${ant.project.name}.dot" from="starr.clean"/>
- </target>
-
<target name="graph.all" depends="graph.init">
<vizant antfile="${ant.file}" outfile="${ant.project.name}.dot" from="all.done"/>
</target>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 79611b55a2..20c4034107 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -112,8 +112,10 @@ MAIN DISTRIBUTION PACKAGING
</target>
<target name="pack-archives.latest.unix" depends="pack-archives.src" unless="os.win">
+ <!-- be sure to use a relative symlink to make the distribution portable,
+ `resource` is relative to directory of `link` -->
<symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
- resource="${dists.dir}/archives/scala-${version.number}-sources.tgz"
+ resource="scala-${version.number}-sources.tgz"
overwrite="yes"/>
</target>
@@ -179,41 +181,41 @@ MAIN DISTRIBUTION PACKAGING
<target name="pack-maven.srcs" depends="pack-maven.libs">
<!-- Add missing src jars. -->
- <jar destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
basedir="${src.dir}/jline/src/main/java">
<include name="**/*"/>
</jar>
<!-- Continuations plugin -->
- <jar destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
basedir="${src.dir}/continuations/plugin">
<include name="**/*"/>
</jar>
</target>
<target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
- <jar destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
basedir="${build-docs.dir}/jline">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
basedir="${build-docs.dir}/library">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
basedir="${build-docs.dir}/compiler">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
basedir="${build-docs.dir}/scalap">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
- basedir="${build-docs.dir}/scala-partest">
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-partest/scala-partest-docs.jar"
+ basedir="${build-docs.dir}/partest">
<include name="**/*"/>
</jar>
- <jar destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
</jar>
diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala
index 7c66d5b9eb..2e57bc59a8 100644
--- a/src/compiler/scala/reflect/reify/Errors.scala
+++ b/src/compiler/scala/reflect/reify/Errors.scala
@@ -27,6 +27,11 @@ trait Errors {
throw new ReificationException(defaultErrorPosition, msg)
}
+ def CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt: CompoundTypeTree) = {
+ val msg = "implementation restriction: cannot reify refinement type trees with non-empty bodies"
+ throw new ReificationException(ctt.pos, msg)
+ }
+
def CannotReifyWeakType(details: Any) = {
val msg = "cannot create a TypeTag" + details + ": use WeakTypeTag instead"
throw new ReificationException(defaultErrorPosition, msg)
diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala
index 7406f5d02d..a320718084 100644
--- a/src/compiler/scala/reflect/reify/phases/Reshape.scala
+++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala
@@ -181,6 +181,7 @@ trait Reshape {
private def toPreTyperCompoundTypeTree(ctt: CompoundTypeTree): Tree = {
val CompoundTypeTree(tmpl @ Template(parents, self, stats)) = ctt
+ if (stats.nonEmpty) CannotReifyCompoundTypeTreeWithNonEmptyBody(ctt)
assert(self eq emptyValDef, self)
val att = tmpl.attachments.get[CompoundTypeTreeOriginalAttachment]
val CompoundTypeTreeOriginalAttachment(parents1, stats1) = att.getOrElse(CompoundTypeTreeOriginalAttachment(parents, stats))
diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala
index 134ae13890..59cd4e5047 100644
--- a/src/compiler/scala/reflect/reify/utils/Extractors.scala
+++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala
@@ -164,51 +164,30 @@ trait Extractors {
}
}
- object FreeDef {
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
- case FreeTermDef(uref, name, binding, flags, origin) =>
- Some((uref, name, binding, flags, origin))
- case FreeTypeDef(uref, name, binding, flags, origin) =>
- Some((uref, name, binding, flags, origin))
- case _ =>
- None
- }
- }
-
- object FreeTermDef {
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
- case
- ValDef(_, name, _, Apply(
- Select(Select(uref1 @ Ident(_), build1), newFreeTerm),
- List(
- _,
- _,
- Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
- Literal(Constant(origin: String)))))
- if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeTerm == nme.newFreeTerm &&
- uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, reifyBinding(tree), flags, origin)
- case _ =>
- None
- }
- }
-
- object FreeTypeDef {
- def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = tree match {
- case
- ValDef(_, name, _, Apply(
- Select(Select(uref1 @ Ident(_), build1), newFreeType),
- List(
- _,
- Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))),
- Literal(Constant(origin: String)))))
- if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && newFreeType == nme.newFreeType &&
- uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
- Some(uref1, name, reifyBinding(tree), flags, origin)
- case _ =>
- None
+ sealed abstract class FreeDefExtractor(acceptTerms: Boolean, acceptTypes: Boolean) {
+ def unapply(tree: Tree): Option[(Tree, TermName, Tree, Long, String)] = {
+ def acceptFreeTermFactory(name: Name) = {
+ (acceptTerms && name == nme.newFreeTerm) ||
+ (acceptTypes && name == nme.newFreeType)
+ }
+ tree match {
+ case
+ ValDef(_, name, _, Apply(
+ Select(Select(uref1 @ Ident(_), build1), freeTermFactory),
+ _ :+
+ Apply(Select(Select(uref2 @ Ident(_), build2), flagsFromBits), List(Literal(Constant(flags: Long)))) :+
+ Literal(Constant(origin: String))))
+ if uref1.name == nme.UNIVERSE_SHORT && build1 == nme.build && acceptFreeTermFactory(freeTermFactory) &&
+ uref2.name == nme.UNIVERSE_SHORT && build2 == nme.build && flagsFromBits == nme.flagsFromBits =>
+ Some(uref1, name, reifyBinding(tree), flags, origin)
+ case _ =>
+ None
+ }
}
}
+ object FreeDef extends FreeDefExtractor(acceptTerms = true, acceptTypes = true)
+ object FreeTermDef extends FreeDefExtractor(acceptTerms = true, acceptTypes = false)
+ object FreeTypeDef extends FreeDefExtractor(acceptTerms = false, acceptTypes = true)
object FreeRef {
def unapply(tree: Tree): Option[(Tree, TermName)] = tree match {
diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
index 411a27b9c7..bd6cf561b9 100644
--- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl
@@ -14,8 +14,9 @@ set _LINE_TOOLCP=
:another_param
-if "%1%"=="-toolcp" (
- set _LINE_TOOLCP=%2%
+rem Use "%~1" to handle spaces in paths. See http://ss64.com/nt/syntax-args.html
+if "%~1"=="-toolcp" (
+ set _LINE_TOOLCP=%~2
shift
shift
goto another_param
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 116684c161..aea3e0d930 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -23,6 +23,7 @@ import plugins.Plugins
import ast._
import ast.parser._
import typechecker._
+import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
import backend.{ ScalaPrimitives, Platform, MSILPlatform, JavaPlatform }
@@ -1179,7 +1180,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
("\n" + info1) :: info2 :: info3 mkString "\n\n"
}
- catch { case x: Exception => errorMessage }
+ catch { case _: Exception | _: TypeError => errorMessage }
/** The id of the currently active run
*/
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index e635c5e87d..6e39fc9aa1 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -56,6 +56,11 @@ trait DocComments { self: Global =>
else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
}
+ def fillDocComment(sym: Symbol, comment: DocComment) {
+ docComments(sym) = comment
+ comment.defineVariables(sym)
+ }
+
/** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
* missing sections of an inherited doc comment.
* If a symbol does not have a doc comment but some overridden version of it does,
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 2ad762fd55..0a12737572 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -327,22 +327,61 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
case tpt: TypeTree =>
if (tpt.original != null)
transform(tpt.original)
- else if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => locals contains tp.typeSymbol)))) {
- val dupl = tpt.duplicate
- dupl.tpe = null
- dupl
+ else {
+ val refersToLocalSymbols = tpt.tpe != null && (tpt.tpe exists (tp => locals contains tp.typeSymbol))
+ val isInferred = tpt.wasEmpty
+ if (refersToLocalSymbols || isInferred) {
+ val dupl = tpt.duplicate
+ dupl.tpe = null
+ dupl
+ } else {
+ tpt
+ }
}
- else tree
+ // If one of the type arguments of a TypeApply gets reset to an empty TypeTree, then this means that:
+ // 1) It isn't empty now (tpt.tpe != null), but it was empty before (tpt.wasEmpty).
+ // 2) Thus, its argument got inferred during a preceding typecheck.
+ // 3) Thus, all its arguments were inferred (because scalac can only infer all or nothing).
+ // Therefore, we can safely erase the TypeApply altogether and have it inferred once again in a subsequent typecheck.
+ // UPD: Actually there's another reason for erasing a type behind the TypeTree
+ // is when this type refers to symbols defined in the tree being processed.
+ // These symbols will be erased, because we can't leave alive a type referring to them.
+ // Here we can only hope that everything will work fine afterwards.
case TypeApply(fn, args) if args map transform exists (_.isEmpty) =>
transform(fn)
- case This(_) if tree.symbol != null && tree.symbol.isPackageClass =>
- tree
case EmptyTree =>
tree
case _ =>
val dupl = tree.duplicate
- if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol)) && !(keepLabels && tree.symbol.isLabel))
- dupl.symbol = NoSymbol
+ // Typically the resetAttrs transformer cleans both symbols and types.
+ // However there are exceptions when we cannot erase symbols due to idiosyncrasies of the typer.
+ // vetoXXX local variables declared below describe the conditions under which we cannot erase symbols.
+ //
+ // The first reason to not erase symbols is the threat of non-idempotency (SI-5464).
+ // Here we take care of labels (SI-5562) and references to package classes (SI-5705).
+ // There are other non-idempotencies, but they are not worked around yet.
+ //
+ // The second reason has to do with the fact that resetAttrs itself has limited usefulness.
+ //
+ // First of all, why do we need resetAttrs? Gor one, it's absolutely required to move trees around.
+ // One cannot just take a typed tree from one lexical context and transplant it somewhere else.
+ // Most likely symbols defined by those trees will become borked and the compiler will blow up (SI-5797).
+ // To work around we just erase all symbols and types and then hope that we'll be able to correctly retypecheck.
+ // For ones who're not affected by scalac Stockholm syndrome, this might seem to be an extremely naive fix, but well...
+ //
+ // Of course, sometimes erasing everything won't work, because if a given identifier got resolved to something
+ // in one lexical scope, it can get resolved to something else.
+ //
+ // What do we do in these cases? Enter the workaround for the workaround: resetLocalAttrs, which only destroys
+ // locally defined symbols, but doesn't touch references to stuff declared outside of a given tree.
+ // That's what localOnly and vetoScope are for.
+ if (dupl.hasSymbol) {
+ val sym = dupl.symbol
+ val vetoScope = localOnly && !(locals contains sym)
+ val vetoLabel = keepLabels && sym.isLabel
+ val vetoThis = dupl.isInstanceOf[This] && sym.isPackageClass
+ if (!(vetoScope || vetoLabel || vetoThis)) dupl.symbol = NoSymbol
+ }
dupl.tpe = null
dupl
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 6f79f639b9..b8791c15dc 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -1107,32 +1107,33 @@ self =>
* | symbol
* | null
* }}}
- * @note The returned tree does not yet have a position
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false): Tree = {
- def finish(value: Any): Tree = {
- val t = Literal(Constant(value))
- in.nextToken()
- t
- }
- if (in.token == SYMBOLLIT)
- Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
- else if (in.token == INTERPOLATIONID)
- interpolatedString(inPattern = inPattern)
- else finish(in.token match {
- case CHARLIT => in.charVal
- case INTLIT => in.intVal(isNegated).toInt
- case LONGLIT => in.intVal(isNegated)
- case FLOATLIT => in.floatVal(isNegated).toFloat
- case DOUBLELIT => in.floatVal(isNegated)
- case STRINGLIT | STRINGPART => in.strVal.intern()
- case TRUE => true
- case FALSE => false
- case NULL => null
- case _ =>
- syntaxErrorOrIncomplete("illegal literal", true)
- null
- })
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = {
+ atPos(start) {
+ def finish(value: Any): Tree = {
+ val t = Literal(Constant(value))
+ in.nextToken()
+ t
+ }
+ if (in.token == SYMBOLLIT)
+ Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
+ else if (in.token == INTERPOLATIONID)
+ interpolatedString(inPattern = inPattern)
+ else finish(in.token match {
+ case CHARLIT => in.charVal
+ case INTLIT => in.intVal(isNegated).toInt
+ case LONGLIT => in.intVal(isNegated)
+ case FLOATLIT => in.floatVal(isNegated).toFloat
+ case DOUBLELIT => in.floatVal(isNegated)
+ case STRINGLIT | STRINGPART => in.strVal.intern()
+ case TRUE => true
+ case FALSE => false
+ case NULL => null
+ case _ =>
+ syntaxErrorOrIncomplete("illegal literal", true)
+ null
+ })
+ }
}
private def stringOp(t: Tree, op: TermName) = {
@@ -1332,11 +1333,10 @@ self =>
def parseWhile = {
val start = in.offset
atPos(in.skipToken()) {
- val lname: Name = freshTermName(nme.WHILE_PREFIX)
val cond = condExpr()
newLinesOpt()
val body = expr()
- makeWhile(lname, cond, body)
+ makeWhile(start, cond, body)
}
}
parseWhile
@@ -1510,7 +1510,7 @@ self =>
atPos(in.offset) {
val name = nme.toUnaryName(rawIdent())
if (name == nme.UNARY_- && isNumericLit)
- simpleExprRest(atPos(in.offset)(literal(isNegated = true)), canApply = true)
+ simpleExprRest(literal(isNegated = true), canApply = true)
else
Select(stripParens(simpleExpr()), name)
}
@@ -1535,7 +1535,7 @@ self =>
def simpleExpr(): Tree = {
var canApply = true
val t =
- if (isLiteral) atPos(in.offset)(literal())
+ if (isLiteral) literal()
else in.token match {
case XMLSTART =>
xmlLiteral()
@@ -1924,7 +1924,7 @@ self =>
case INTLIT | LONGLIT | FLOATLIT | DOUBLELIT =>
t match {
case Ident(nme.MINUS) =>
- return atPos(start) { literal(isNegated = true, inPattern = true) }
+ return literal(isNegated = true, inPattern = true, start = start)
case _ =>
}
case _ =>
@@ -1942,7 +1942,7 @@ self =>
atPos(start, start) { Ident(nme.WILDCARD) }
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL =>
- atPos(start) { literal(inPattern = true) }
+ literal(inPattern = true)
case LPAREN =>
atPos(start)(makeParens(noSeq.patterns()))
case XMLSTART =>
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 79f0bcf149..1aa50be83a 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -142,24 +142,16 @@ trait Scanners extends ScannersCommon {
/** Should doc comments be built? */
def buildDocs: Boolean = forScaladoc
- /** buffer for the documentation comment
+ /** holder for the documentation comment
*/
- var docBuffer: StringBuilder = null
- var docPos: Position = null
+ var docComment: DocComment = null
- /** Return current docBuffer and set docBuffer to null */
def flushDoc: DocComment = {
- val ret = if (docBuffer != null) DocComment(docBuffer.toString, docPos) else null
- docBuffer = null
+ val ret = docComment
+ docComment = null
ret
}
- /** add the given character to the documentation buffer
- */
- protected def putDocChar(c: Char) {
- if (docBuffer ne null) docBuffer.append(c)
- }
-
protected def foundComment(value: String, start: Int, end: Int) = ()
protected def foundDocComment(value: String, start: Int, end: Int) = ()
@@ -236,11 +228,11 @@ trait Scanners extends ScannersCommon {
while (!sepRegions.isEmpty && sepRegions.head != RBRACE)
sepRegions = sepRegions.tail
if (!sepRegions.isEmpty) sepRegions = sepRegions.tail
- docBuffer = null
+ docComment = null
case RBRACKET | RPAREN =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
- docBuffer = null
+ docComment = null
case ARROW =>
if (!sepRegions.isEmpty && sepRegions.head == lastToken)
sepRegions = sepRegions.tail
@@ -433,6 +425,7 @@ trait Scanners extends ScannersCommon {
if (ch == '\"') {
nextRawChar()
if (ch == '\"') {
+ offset += 3
nextRawChar()
getStringPart(multiLine = true)
sepRegions = STRINGPART :: sepRegions // indicate string part
@@ -442,6 +435,7 @@ trait Scanners extends ScannersCommon {
strVal = ""
}
} else {
+ offset += 1
getStringPart(multiLine = false)
sepRegions = STRINGLIT :: sepRegions // indicate single line string part
}
@@ -546,7 +540,7 @@ trait Scanners extends ScannersCommon {
nextChar()
} while ((ch != CR) && (ch != LF) && (ch != SU))
} else {
- docBuffer = null
+ docComment = null
var openComments = 1
appendToComment()
nextChar()
@@ -554,24 +548,23 @@ trait Scanners extends ScannersCommon {
var buildingDocComment = false
if (ch == '*' && buildDocs) {
buildingDocComment = true
- docBuffer = new StringBuilder("/**")
}
while (openComments > 0) {
do {
do {
if (ch == '/') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
if (ch == '*') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
openComments += 1
}
}
if (ch != '*' && ch != SU) {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
}
} while (ch != '*' && ch != SU)
while (ch == '*') {
- nextChar(); putDocChar(ch); appendToComment()
+ nextChar(); appendToComment()
}
} while (ch != '/' && ch != SU)
if (ch == '/') nextChar()
@@ -1310,7 +1303,8 @@ trait Scanners extends ScannersCommon {
}
override def foundDocComment(value: String, start: Int, end: Int) {
- docPos = new RangePosition(unit.source, start, start, end)
+ val docPos = new RangePosition(unit.source, start, start, end)
+ docComment = new DocComment(value, docPos)
unit.comment(docPos, value)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 7969bb9c20..b5771454f8 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -251,8 +251,13 @@ abstract class TreeBuilder {
else CompoundTypeTree(Template(tps, emptyValDef, Nil))
/** Create tree representing a while loop */
- def makeWhile(lname: TermName, cond: Tree, body: Tree): Tree = {
- val continu = atPos(o2p(body.pos pointOrElse wrappingPos(List(cond, body)).pos.endOrPoint)) { Apply(Ident(lname), Nil) }
+ def makeWhile(startPos: Int, cond: Tree, body: Tree): Tree = {
+ val lname = freshTermName(nme.WHILE_PREFIX)
+ def default = wrappingPos(List(cond, body)) match {
+ case p if p.isDefined => p.endOrPoint
+ case _ => startPos
+ }
+ val continu = atPos(o2p(body.pos pointOrElse default)) { Apply(Ident(lname), Nil) }
val rhs = If(cond, Block(List(body), continu), Literal(Constant()))
LabelDef(lname, Nil, rhs)
}
@@ -589,7 +594,7 @@ abstract class TreeBuilder {
if (contextBounds.isEmpty) vparamss
else {
val mods = Modifiers(if (owner.isTypeName) PARAMACCESSOR | LOCAL | PRIVATE else PARAM)
- def makeEvidenceParam(tpt: Tree) = ValDef(mods | IMPLICIT, freshTermName(nme.EVIDENCE_PARAM_PREFIX), tpt, EmptyTree)
+ def makeEvidenceParam(tpt: Tree) = ValDef(mods | IMPLICIT | SYNTHETIC, freshTermName(nme.EVIDENCE_PARAM_PREFIX), tpt, EmptyTree)
val evidenceParams = contextBounds map makeEvidenceParam
val vparamssLast = if(vparamss.nonEmpty) vparamss.last else Nil
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 7ba212f42e..b74770f051 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -171,6 +171,7 @@ trait Members {
var returnType: TypeKind = _
var recursive: Boolean = false
var bytecodeHasEHs = false // set by ICodeReader only, used by Inliner to prevent inlining (SI-6188)
+ var bytecodeHasInvokeDynamic = false // set by ICodeReader only, used by Inliner to prevent inlining until we have proper invoke dynamic support
/** local variables and method parameters */
var locals: List[Local] = Nil
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 8c9a72638d..a3a0edb35d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -409,6 +409,25 @@ trait Opcodes { self: ICodes =>
override def category = mthdsCat
}
+
+ /**
+ * A place holder entry that allows us to parse class files with invoke dynamic
+ * instructions. Because the compiler doesn't yet really understand the
+ * behavior of invokeDynamic, this op acts as a poison pill. Any attempt to analyze
+ * this instruction will cause a failure. The only optimization that
+ * should ever look at non-Scala generated icode is the inliner, and it
+ * has been modified to not examine any method with invokeDynamic
+ * instructions. So if this poison pill ever causes problems then
+ * there's been a serious misunderstanding
+ */
+ // TODO do the real thing
+ case class INVOKE_DYNAMIC(poolEntry: Char) extends Instruction {
+ private def error = sys.error("INVOKE_DYNAMIC is not fully implemented and should not be analyzed")
+ override def consumed = error
+ override def produced = error
+ override def producedTypes = error
+ override def category = error
+ }
case class BOX(boxType: TypeKind) extends Instruction {
assert(boxType.isValueType && (boxType ne UNIT)) // documentation
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 0abbe44b02..7c46d648fe 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -2143,8 +2143,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
val start = st.pop()
seen ::= LocVarEntry(lv, start, end)
case _ =>
- // TODO SI-6049
- getCurrentCUnit().warning(iPos, "Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6049")
+ // TODO SI-6049 track down the cause for these.
+ debugwarn(s"$iPos: Visited SCOPE_EXIT before visiting corresponding SCOPE_ENTER. SI-6191")
}
}
@@ -2259,16 +2259,16 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
// info calls so that types are up to date; erasure may add lateINTERFACE to traits
hostSymbol.info ; methodOwner.info
- def isInterfaceCall(sym: Symbol) = (
- sym.isInterface && methodOwner != ObjectClass
+ def needsInterfaceCall(sym: Symbol) = (
+ sym.isInterface
|| sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
)
// whether to reference the type of the receiver or
- // the type of the method owner (if not an interface!)
+ // the type of the method owner
val useMethodOwner = (
style != Dynamic
- || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol)
|| hostSymbol.isBottomClass
+ || methodOwner == ObjectClass
)
val receiver = if (useMethodOwner) methodOwner else hostSymbol
val jowner = javaName(receiver)
@@ -2291,11 +2291,11 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
}
style match {
- case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype)
- case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype)
- case Dynamic if isInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype)
- case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype)
- case SuperCall(_) =>
+ case Static(true) => dbg("invokespecial"); jcode.invokespecial (jowner, jname, jtype)
+ case Static(false) => dbg("invokestatic"); jcode.invokestatic (jowner, jname, jtype)
+ case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.invokeinterface(jowner, jname, jtype)
+ case Dynamic => dbg("invokevirtual"); jcode.invokevirtual (jowner, jname, jtype)
+ case SuperCall(_) =>
dbg("invokespecial")
jcode.invokespecial(jowner, jname, jtype)
initModule()
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 598965b982..36b294b289 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -1196,16 +1196,16 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
// info calls so that types are up to date; erasure may add lateINTERFACE to traits
hostSymbol.info ; methodOwner.info
- def isInterfaceCall(sym: Symbol) = (
- sym.isInterface && methodOwner != ObjectClass
+ def needsInterfaceCall(sym: Symbol) = (
+ sym.isInterface
|| sym.isJavaDefined && sym.isNonBottomSubClass(ClassfileAnnotationClass)
)
// whether to reference the type of the receiver or
- // the type of the method owner (if not an interface!)
+ // the type of the method owner
val useMethodOwner = (
style != Dynamic
- || !isInterfaceCall(hostSymbol) && isAccessibleFrom(methodOwner, siteSymbol)
|| hostSymbol.isBottomClass
+ || methodOwner == ObjectClass
)
val receiver = if (useMethodOwner) methodOwner else hostSymbol
val jowner = javaName(receiver)
@@ -1230,11 +1230,11 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with
}
style match {
- case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
- case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
- case Dynamic if isInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
- case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
- case SuperCall(_) =>
+ case Static(true) => dbg("invokespecial"); jcode.emitINVOKESPECIAL(jowner, jname, jtype)
+ case Static(false) => dbg("invokestatic"); jcode.emitINVOKESTATIC(jowner, jname, jtype)
+ case Dynamic if needsInterfaceCall(receiver) => dbg("invokinterface"); jcode.emitINVOKEINTERFACE(jowner, jname, jtype)
+ case Dynamic => dbg("invokevirtual"); jcode.emitINVOKEVIRTUAL(jowner, jname, jtype)
+ case SuperCall(_) =>
dbg("invokespecial")
jcode.emitINVOKESPECIAL(jowner, jname, jtype)
initModule()
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 521b6cc132..498db78636 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -961,6 +961,7 @@ abstract class Inliners extends SubComponent {
if(isInlineForbidden) { rs ::= "is annotated @noinline" }
if(inc.isSynchronized) { rs ::= "is synchronized method" }
if(inc.m.bytecodeHasEHs) { rs ::= "bytecode contains exception handlers / finally clause" } // SI-6188
+ if(inc.m.bytecodeHasInvokeDynamic) { rs ::= "bytecode contains invoke dynamic" }
if(rs.isEmpty) null else rs.mkString("", ", and ", "")
}
diff --git a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
index 35390adcd9..cdcfeaae81 100755
--- a/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
+++ b/src/compiler/scala/tools/nsc/doc/base/MemberLookupBase.scala
@@ -10,15 +10,15 @@ import comment._
trait MemberLookupBase {
val global: Global
- val settings: doc.Settings
-
import global._
+
def internalLink(sym: Symbol, site: Symbol): Option[LinkTo]
def chooseLink(links: List[LinkTo]): LinkTo
def toString(link: LinkTo): String
+ def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal]
+ def warnNoLink: Boolean
import global._
- import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
import rootMirror.{RootPackage, EmptyPackage}
private def isRoot(s: Symbol) = s.isRootSymbol || s.isEmptyPackage || s.isEmptyPackageClass
@@ -83,7 +83,7 @@ trait MemberLookupBase {
}
links match {
case Nil =>
- if (!settings.docNoLinkWarnings.value)
+ if (warnNoLink)
reporter.warning(pos, "Could not find any member to link for \"" + query + "\".")
// (4) if we still haven't found anything, create a tooltip
Tooltip(query)
@@ -95,7 +95,7 @@ trait MemberLookupBase {
if (link == chosen) " [chosen]" else ""
toString(link) + chosenInfo + "\n"
}
- if (!settings.docNoLinkWarnings.value) {
+ if (warnNoLink) {
val allLinks = links.map(linkToString).mkString
reporter.warning(pos,
s"""The link target \"$query\" is ambiguous. Several members fit the target:
@@ -199,29 +199,6 @@ trait MemberLookupBase {
members.reverse
}
-
- def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
- val sym1 =
- if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
- else if (sym.isPackage)
- /* Get package object which has associatedFile ne null */
- sym.info.member(newTermName("package"))
- else sym
- Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
- val path = src.path
- settings.extUrlMapping get path map { url =>
- LinkToExternal(name, url + "#" + name)
- }
- } orElse {
- // Deprecated option.
- settings.extUrlPackageMapping find {
- case (pkg, _) => name startsWith pkg
- } map {
- case (_, url) => LinkToExternal(name, url + "#" + name)
- }
- }
- }
-
def externalSignature(sym: Symbol) = {
sym.info // force it, otherwise we see lazy types
(sym.nameString + sym.signatureString).replaceAll("\\s", "")
diff --git a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
index 02e662da85..eb0d751f3e 100755
--- a/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
+++ b/src/compiler/scala/tools/nsc/doc/base/comment/Body.scala
@@ -75,16 +75,20 @@ object EntityLink {
def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
}
final case class HtmlTag(data: String) extends Inline {
- def canClose(open: HtmlTag) = {
- open.data.stripPrefix("<") == data.stripPrefix("</")
+ private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
+ private val (isEnd, tagName) = data match {
+ case Pattern(s1, s2) =>
+ (! s1.isEmpty, Some(s2.toLowerCase))
+ case _ =>
+ (false, None)
}
- def close = {
- if (data.indexOf("</") == -1)
- Some(HtmlTag("</" + data.stripPrefix("<")))
- else
- None
+ def canClose(open: HtmlTag) = {
+ isEnd && tagName == open.tagName
}
+
+ private val TagsNotToClose = Set("br", "img")
+ def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
}
/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
index ee78f4ea7a..6fdaaed75f 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -6,6 +6,7 @@
package scala.tools.nsc.doc.html
import scala.xml.NodeSeq
+import scala.annotation.tailrec
/** Highlight the syntax of Scala code appearing in a `{{{` wiki block
* (see method `HtmlPage.blockToHtml`).
@@ -209,9 +210,9 @@ private[html] object SyntaxHigh {
out.toString
}
- def parse(pre: String, i: Int): Int = {
+ @tailrec def parse(pre: String, i: Int): Unit = {
out append pre
- if (i == buf.length) return i
+ if (i == buf.length) return
buf(i) match {
case '\n' =>
parse("\n", i+1)
@@ -277,7 +278,6 @@ private[html] object SyntaxHigh {
} else
parse(buf(i).toChar.toString, i+1)
}
- i
}
parse("", 0)
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
index c76bdc58d9..8802d7c35c 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Index.scala
@@ -27,13 +27,19 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
val headers =
<xml:group>
<link href={ relativeLinkTo{List("index.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.layout.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("index.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("scheduler.js", "lib")} }></script>
</xml:group>
+ private val scripts = {
+ val sources =
+ (List("jquery.js", "jquery-ui.js", "jquery.layout.js", "scheduler.js", "index.js").map {
+ x => relativeLinkTo(List(x, "lib"))
+ }) :+ "index.js"
+
+ sources map {
+ src => <script defer="defer" type="text/javascript" src={src}></script>
+ }
+ }
+
val body =
<body>
<div id="library">
@@ -46,6 +52,7 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
<div id="content" class="ui-layout-center">
<iframe id="template" name="template" src={ relativeLinkTo{List("package.html")} }/>
</div>
+ { scripts }
</body>
def letters: NodeSeq =
@@ -125,7 +132,7 @@ class Index(universe: doc.Universe, val index: doc.Index) extends HtmlPage {
</xml:group>
}
packageElem(universe.rootPackage)
- }</div></div><script src="index.js"></script>
+ }</div></div>
</div>
def packageQualifiedName(ety: DocTemplateEntity): String =
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index ea07ff29c4..63c77e7bb3 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -40,14 +40,6 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
<xml:group>
<link href={ relativeLinkTo{List("template.css", "lib")} } media="screen" type="text/css" rel="stylesheet"/>
<link href={ relativeLinkTo{List("diagrams.css", "lib")} } media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
- <script type="text/javascript" src={ relativeLinkTo{List("jquery.js", "lib")} } id="jquery-js"></script>
- <script type="text/javascript" src={ relativeLinkTo{List("jquery-ui.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("template.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("tools.tooltip.js", "lib")} }></script>
- { if (universe.settings.docDiagrams.value) {
- <script type="text/javascript" src={ relativeLinkTo{List("modernizr.custom.js", "lib")} }></script>
- <script type="text/javascript" src={ relativeLinkTo{List("diagrams.js", "lib")} } id="diagrams-js"></script>
- } else NodeSeq.Empty }
<script type="text/javascript">
if(top === self) {{
var url = '{ val p = templateToPath(tpl); "../" * (p.size - 1) + "index.html" }';
@@ -61,6 +53,22 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
</script>
</xml:group>
+ private val scripts = {
+ val sources = {
+ val default = List("jquery.js", "jquery-ui.js", "tools.tooltip.js", "template.js")
+ val forDiagrams = List("modernizr.custom.js", "diagrams.js")
+
+ (default ++ (if (universe.settings.docDiagrams.value) forDiagrams else Nil)) map {
+ x => x.replace('.', '-') -> relativeLinkTo(List(x, "lib"))
+ }
+ }
+
+ sources map {
+ case (id, src) =>
+ <script defer="defer" type="text/javascript" id={id} src={src}></script>
+ }
+ }
+
val valueMembers =
tpl.methods ++ tpl.values ++ tpl.templates.filter(x => x.isObject || x.isPackage) sorted
@@ -215,7 +223,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
{ if (shadowedImplicitMembers.isEmpty) NodeSeq.Empty else
<div id="values" class="values members">
- <h3>Shadowed Implict Value Members</h3>
+ <h3>Shadowed Implicit Value Members</h3>
<ol>{ shadowedImplicitMembers map (memberToHtml(_, tpl)) }</ol>
</div>
}
@@ -280,8 +288,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp
else
<div id="footer"> { tpl.universe.settings.docfooter.value } </div>
}
-
-
+ { scripts }
</body>
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
index c7a767f992..23259a4ae8 100644
--- a/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/MemberLookup.scala
@@ -9,6 +9,7 @@ trait MemberLookup extends base.MemberLookupBase {
thisFactory: ModelFactory =>
import global._
+ import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass }
override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] =
findTemplateMaybe(sym) match {
@@ -35,4 +36,28 @@ trait MemberLookup extends base.MemberLookupBase {
mbr.sym.signatureString + " in " + inTpl.sym.toString
case _ => link.toString
}
+
+ override def findExternalLink(sym: Symbol, name: String): Option[LinkToExternal] = {
+ val sym1 =
+ if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass
+ else if (sym.isPackage)
+ /* Get package object which has associatedFile ne null */
+ sym.info.member(newTermName("package"))
+ else sym
+ Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src =>
+ val path = src.path
+ settings.extUrlMapping get path map { url =>
+ LinkToExternal(name, url + "#" + name)
+ }
+ } orElse {
+ // Deprecated option.
+ settings.extUrlPackageMapping find {
+ case (pkg, _) => name startsWith pkg
+ } map {
+ case (_, url) => LinkToExternal(name, url + "#" + name)
+ }
+ }
+ }
+
+ override def warnNoLink = !settings.docNoLinkWarnings.value
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
index 0a469c9227..d9b173bc43 100644
--- a/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/ModelFactory.scala
@@ -897,32 +897,36 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
}
}
- /** */
def makeAnnotation(annot: AnnotationInfo): scala.tools.nsc.doc.model.Annotation = {
val aSym = annot.symbol
new EntityImpl(aSym, makeTemplate(aSym.owner)) with scala.tools.nsc.doc.model.Annotation {
lazy val annotationClass =
makeTemplate(annot.symbol)
- val arguments = { // lazy
- def noParams = annot.args map { _ => None }
- val params: List[Option[ValueParam]] = annotationClass match {
+ val arguments = {
+ val paramsOpt: Option[List[ValueParam]] = annotationClass match {
case aClass: DocTemplateEntity with Class =>
- (aClass.primaryConstructor map { _.valueParams.head }) match {
- case Some(vps) => vps map { Some(_) }
- case None => noParams
+ val constr = aClass.constructors collectFirst {
+ case c: MemberImpl if c.sym == annot.original.symbol => c
}
- case _ => noParams
+ constr flatMap (_.valueParams.headOption)
+ case _ => None
}
- assert(params.length == annot.args.length)
- (params zip annot.args) flatMap { case (param, arg) =>
- makeTree(arg) match {
- case Some(tree) =>
- Some(new ValueArgument {
- def parameter = param
+ val argTrees = annot.args map makeTree
+ paramsOpt match {
+ case Some (params) =>
+ params zip argTrees map { case (param, tree) =>
+ new ValueArgument {
+ def parameter = Some(param)
def value = tree
- })
- case None => None
- }
+ }
+ }
+ case None =>
+ argTrees map { tree =>
+ new ValueArgument {
+ def parameter = None
+ def value = tree
+ }
+ }
}
}
}
@@ -962,9 +966,8 @@ class ModelFactory(val global: Global, val settings: doc.Settings) {
sym.name == aSym.name &&
sym.isParamWithDefault
)
- (unit.body find (t => isCorrespondingParam(t.symbol))) match {
- case Some(ValDef(_,_,_,rhs)) => makeTree(rhs)
- case _ => None
+ unit.body find (t => isCorrespondingParam(t.symbol)) collect {
+ case ValDef(_,_,_,rhs) if rhs ne EmptyTree => makeTree(rhs)
}
case _ => None
}
diff --git a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
index bd7534ded4..fdad84d0bc 100755
--- a/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/TreeFactory.scala
@@ -19,7 +19,7 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
val global: Global
import global._
- def makeTree(rhs: Tree): Option[TreeEntity] = {
+ def makeTree(rhs: Tree): TreeEntity = {
var expr = new StringBuilder
var refs = new immutable.TreeMap[Int, (Entity, Int)] // start, (Entity to be linked to , end)
@@ -80,17 +80,16 @@ trait TreeFactory { thisTreeFactory: ModelFactory with TreeFactory =>
traverser.traverse(rhs)
- Some(new TreeEntity {
+ new TreeEntity {
val expression = expr.toString
val refEntity = refs
- })
+ }
}
- case pos: OffsetPosition =>
- Some(new TreeEntity {
+ case _ =>
+ new TreeEntity {
val expression = rhs.toString
val refEntity = new immutable.TreeMap[Int, (Entity, Int)]
- })
- case _ => None
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
index 73738ebd21..af82957a2e 100644
--- a/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
+++ b/src/compiler/scala/tools/nsc/interactive/CompilerControl.scala
@@ -164,17 +164,22 @@ trait CompilerControl { self: Global =>
/** Sets sync var `response` to doc comment information for a given symbol.
*
- * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
- * @param site The place where sym is observed.
- * @param source The source file that's supposed to contain the definition
- * @param response A response that will be set to the following:
- * If `source` contains a definition of a given symbol that has a doc comment,
- * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
- * Note: This operation does not automatically load `source`. If `source`
- * is unloaded, it stays that way.
+ * @param sym The symbol whose doc comment should be retrieved (might come from a classfile)
+ * @param source The source file that's supposed to contain the definition
+ * @param site The symbol where 'sym' is observed
+ * @param fragments All symbols that can contribute to the generated documentation
+ * together with their source files.
+ * @param response A response that will be set to the following:
+ * If `source` contains a definition of a given symbol that has a doc comment,
+ * the (expanded, raw, position) triplet for a comment, otherwise ("", "", NoPosition).
+ * Note: This operation does not automatically load sources that are not yet loaded.
*/
- def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) =
- postWorkItem(new AskDocCommentItem(sym, site, source, response))
+ def askDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]): Unit =
+ postWorkItem(new AskDocCommentItem(sym, source, site, fragments, response))
+
+ @deprecated("Use method that accepts fragments", "2.10.2")
+ def askDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]): Unit =
+ askDocComment(sym, source, site, (sym,source)::Nil, response)
/** Sets sync var `response` to list of members that are visible
* as members of the tree enclosing `pos`, possibly reachable by an implicit.
@@ -390,9 +395,9 @@ trait CompilerControl { self: Global =>
response raise new MissingResponse
}
- case class AskDocCommentItem(val sym: Symbol, val site: Symbol, val source: SourceFile, response: Response[(String, String, Position)]) extends WorkItem {
- def apply() = self.getDocComment(sym, site, source, response)
- override def toString = "doc comment "+sym+" in "+source
+ case class AskDocCommentItem(val sym: Symbol, val source: SourceFile, val site: Symbol, val fragments: List[(Symbol,SourceFile)], response: Response[(String, String, Position)]) extends WorkItem {
+ def apply() = self.getDocComment(sym, source, site, fragments, response)
+ override def toString = "doc comment "+sym+" in "+source+" with fragments:"+fragments.mkString("(", ",", ")")
def raiseMissing() =
response raise new MissingResponse
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index 105b0e4833..c63cca9b88 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -73,7 +73,6 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
if (verboseIDE) println("[%s][%s]".format(projectName, msg))
override def forInteractive = true
- override def forScaladoc = settings.isScaladoc
/** A map of all loaded files to the rich compilation units that correspond to them.
*/
@@ -705,8 +704,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
* If we do just removeUnit, some problems with default parameters can ensue.
* Calls to this method could probably be replaced by removeUnit once default parameters are handled more robustly.
*/
- private def afterRunRemoveUnitOf(source: SourceFile) {
- toBeRemovedAfterRun += source.file
+ private def afterRunRemoveUnitsOf(sources: List[SourceFile]) {
+ toBeRemovedAfterRun ++= sources map (_.file)
}
/** A fully attributed tree located at position `pos` */
@@ -714,7 +713,7 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
case None =>
reloadSources(List(pos.source))
try typedTreeAt(pos)
- finally afterRunRemoveUnitOf(pos.source)
+ finally afterRunRemoveUnitsOf(List(pos.source))
case Some(unit) =>
informIDE("typedTreeAt " + pos)
parseAndEnter(unit)
@@ -764,14 +763,23 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
respond(response)(typedTree(source, forceReload))
}
+ private def withTempUnits[T](sources: List[SourceFile])(f: (SourceFile => RichCompilationUnit) => T): T = {
+ val unitOfSrc: SourceFile => RichCompilationUnit = src => unitOfFile(src.file)
+ sources filterNot (getUnit(_).isDefined) match {
+ case Nil =>
+ f(unitOfSrc)
+ case unknown =>
+ reloadSources(unknown)
+ try {
+ f(unitOfSrc)
+ } finally
+ afterRunRemoveUnitsOf(unknown)
+ }
+ }
+
private def withTempUnit[T](source: SourceFile)(f: RichCompilationUnit => T): T =
- getUnit(source) match {
- case None =>
- reloadSources(List(source))
- try f(getUnit(source).get)
- finally afterRunRemoveUnitOf(source)
- case Some(unit) =>
- f(unit)
+ withTempUnits(List(source)){ srcToUnit =>
+ f(srcToUnit(source))
}
/** Find a 'mirror' of symbol `sym` in unit `unit`. Pre: `unit is loaded. */
@@ -835,48 +843,36 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "")
}
}
+ private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
+ unit.body foreachPartial {
+ case DocDef(comment, defn) if defn.symbol == sym =>
+ fillDocComment(defn.symbol, comment)
+ EmptyTree
+ case _: ValOrDefDef =>
+ EmptyTree
+ }
+ }
+
/** Implements CompilerControl.askDocComment */
- private[interactive] def getDocComment(sym: Symbol, site: Symbol, source: SourceFile, response: Response[(String, String, Position)]) {
- informIDE("getDocComment "+sym+" "+source)
+ private[interactive] def getDocComment(sym: Symbol, source: SourceFile, site: Symbol, fragments: List[(Symbol,SourceFile)],
+ response: Response[(String, String, Position)]) {
+ informIDE(s"getDocComment $sym at $source, site $site")
respond(response) {
- withTempUnit(source){ u =>
- val mirror = findMirrorSymbol(sym, u)
+ withTempUnits(fragments.unzip._2){ units =>
+ for((sym, src) <- fragments) {
+ val mirror = findMirrorSymbol(sym, units(src))
+ if (mirror ne NoSymbol) forceDocComment(mirror, units(src))
+ }
+ val mirror = findMirrorSymbol(sym, units(source))
if (mirror eq NoSymbol)
("", "", NoPosition)
else {
- forceDocComment(mirror, u)
- (expandedDocComment(mirror), rawDocComment(mirror), docCommentPos(mirror))
+ (expandedDocComment(mirror, site), rawDocComment(mirror), docCommentPos(mirror))
}
}
}
- }
-
- private def forceDocComment(sym: Symbol, unit: RichCompilationUnit) {
- // Either typer has been run and we don't find DocDef,
- // or we force the targeted typecheck here.
- // In both cases doc comment maps should be filled for the subject symbol.
- val docTree =
- unit.body find {
- case DocDef(_, defn) if defn.symbol eq sym => true
- case _ => false
- }
-
- for (t <- docTree) {
- debugLog("Found DocDef tree for "+sym)
- // Cannot get a typed tree at position since DocDef range is transparent.
- val prevPos = unit.targetPos
- val prevInterruptsEnabled = interruptsEnabled
- try {
- unit.targetPos = t.pos
- interruptsEnabled = true
- typeCheck(unit)
- } catch {
- case _: TyperResult => // ignore since we are after the side effect.
- } finally {
- unit.targetPos = prevPos
- interruptsEnabled = prevInterruptsEnabled
- }
- }
+ // New typer run to remove temp units and drop per-run caches that might refer to symbols entered from temp units.
+ newTyperRun()
}
def stabilizedType(tree: Tree): Type = tree match {
diff --git a/src/compiler/scala/tools/nsc/interactive/Picklers.scala b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
index 84cb03c140..2b389158c3 100644
--- a/src/compiler/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Picklers.scala
@@ -166,8 +166,8 @@ trait Picklers { self: Global =>
.asClass (classOf[AskLinkPosItem])
implicit def askDocCommentItem: CondPickler[AskDocCommentItem] =
- (pkl[Symbol] ~ pkl[Symbol] ~ pkl[SourceFile])
- .wrapped { case sym ~ site ~ source => new AskDocCommentItem(sym, site, source, new Response) } { item => item.sym ~ item.site ~ item.source }
+ (pkl[Symbol] ~ pkl[SourceFile] ~ pkl[Symbol] ~ pkl[List[(Symbol,SourceFile)]])
+ .wrapped { case sym ~ source ~ site ~ fragments => new AskDocCommentItem(sym, source, site, fragments, new Response) } { item => item.sym ~ item.source ~ item.site ~ item.fragments }
.asClass (classOf[AskDocCommentItem])
implicit def askLoadedTypedItem: CondPickler[AskLoadedTypedItem] =
diff --git a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
index 7bb2b7b7c8..f304eda753 100644
--- a/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
+++ b/src/compiler/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala
@@ -8,7 +8,7 @@ import scala.reflect.internal.util.Position
/** Trait encapsulating the creation of a presentation compiler's instance.*/
private[tests] trait PresentationCompilerInstance extends TestSettings {
protected val settings = new Settings
- protected val docSettings = new doc.Settings(_ => ())
+ protected val withDocComments = false
protected val compilerReporter: CompilerReporter = new InteractiveReporter {
override def compiler = PresentationCompilerInstance.this.compiler
@@ -16,7 +16,9 @@ private[tests] trait PresentationCompilerInstance extends TestSettings {
protected lazy val compiler: Global = {
prepareSettings(settings)
- new Global(settings, compilerReporter)
+ new Global(settings, compilerReporter) {
+ override def forScaladoc = withDocComments
+ }
}
/**
diff --git a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
index c4a672ac37..827ebe1678 100644
--- a/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/ExprTyper.scala
@@ -57,15 +57,13 @@ trait ExprTyper {
// Typing it with a lazy val would give us the right type, but runs
// into compiler bugs with things like existentials, so we compile it
// behind a def and strip the NullaryMethodType which wraps the expr.
- val line = "def " + name + " = {\n" + code + "\n}"
+ val line = "def " + name + " = " + code
interpretSynthetic(line) match {
case IR.Success =>
val sym0 = symbolOfTerm(name)
// drop NullaryMethodType
- val sym = sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
- if (sym.info.typeSymbol eq UnitClass) NoSymbol
- else sym
+ sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
case _ => NoSymbol
}
}
@@ -82,7 +80,11 @@ trait ExprTyper {
case _ => NoSymbol
}
}
- beQuietDuring(asExpr()) orElse beQuietDuring(asDefn())
+ def asError(): Symbol = {
+ interpretSynthetic(code)
+ NoSymbol
+ }
+ beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError()
}
private var typeOfExpressionDepth = 0
diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
index 43a8402fc7..0779e648cd 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala
@@ -420,6 +420,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
case FINAL =>
flags |= Flags.FINAL
in.nextToken
+ case DEFAULT =>
+ flags |= Flags.DEFAULTMETHOD
+ in.nextToken()
case NATIVE =>
addAnnot(NativeAttr)
in.nextToken
@@ -544,8 +547,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val vparams = formalParams()
if (!isVoid) rtpt = optArrayBrackets(rtpt)
optThrows()
+ val bodyOk = !inInterface || (mods hasFlag Flags.DEFAULTMETHOD)
val body =
- if (!inInterface && in.token == LBRACE) {
+ if (bodyOk && in.token == LBRACE) {
methodBody()
} else {
if (parentToken == AT && in.token == DEFAULT) {
@@ -800,13 +804,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners {
val pos = in.currentPos
val name = identForType()
val (statics, body) = typeBody(AT, name)
- def getValueMethodType(tree: Tree) = tree match {
- case DefDef(_, nme.value, _, _, tpt, _) => Some(tpt.duplicate)
- case _ => None
- }
- var templ = makeTemplate(annotationParents, body)
- for (stat <- templ.body; tpt <- getValueMethodType(stat))
- templ = makeTemplate(annotationParents, makeConstructor(List(tpt)) :: templ.body)
+ val templ = makeTemplate(annotationParents, body)
addCompanionObject(statics, atPos(pos) {
ClassDef(mods, name, List(), templ)
})
diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala
index f116a7c4c7..ef41246af9 100644
--- a/src/compiler/scala/tools/nsc/matching/Patterns.scala
+++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala
@@ -402,7 +402,7 @@ trait Patterns extends ast.TreeDSL {
case _ => toPats(args)
}
- def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args.length)
+ def resTypes = analyzer.unapplyTypeList(unfn.pos, unfn.symbol, unfn.tpe, args)
def resTypesString = resTypes match {
case Nil => "Boolean"
case xs => xs.mkString(", ")
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 348c7f688f..9e5186b731 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -97,8 +97,15 @@ abstract class SymbolLoaders {
val clazz = enterClass(root, name, completer)
val module = enterModule(root, name, completer)
if (!clazz.isAnonymousClass) {
- assert(clazz.companionModule == module, module)
- assert(module.companionClass == clazz, clazz)
+ // Diagnostic for SI-7147
+ def msg: String = {
+ def symLocation(sym: Symbol) = if (sym == null) "null" else s"${clazz.fullLocationString} (from ${clazz.associatedFile})"
+ sm"""Inconsistent class/module symbol pair for `$name` loaded from ${symLocation(root)}.
+ |clazz = ${symLocation(clazz)}; clazz.companionModule = ${clazz.companionModule}
+ |module = ${symLocation(module)}; module.companionClass = ${module.companionClass}"""
+ }
+ assert(clazz.companionModule == module, msg)
+ assert(module.companionClass == clazz, msg)
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index a517a33279..da117540b4 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -44,7 +44,7 @@ abstract class ClassfileParser {
def srcfile = srcfile0
- private def currentIsTopLevel = currentClass.toString.indexOf('$') < 0
+ private def currentIsTopLevel = !(currentClass.decodedName containsChar '$')
private object unpickler extends scala.reflect.internal.pickling.UnPickler {
val global: ClassfileParser.this.global.type = ClassfileParser.this.global
@@ -52,18 +52,14 @@ abstract class ClassfileParser {
private def handleMissing(e: MissingRequirementError) = {
if (settings.debug.value) e.printStackTrace
- throw new IOException("Missing dependency '" + e.req + "', required by " + in.file)
+ throw new IOException(s"Missing dependency '${e.req}', required by ${in.file}")
}
private def handleError(e: Exception) = {
if (settings.debug.value) e.printStackTrace()
- throw new IOException("class file '%s' is broken\n(%s/%s)".format(
- in.file,
- e.getClass,
- if (e.getMessage eq null) "" else e.getMessage)
- )
+ throw new IOException(s"class file '${in.file}' is broken\n(${e.getClass}/${e.getMessage})")
}
private def mismatchError(c: Symbol) = {
- throw new IOException("class file '%s' has location not matching its contents: contains ".format(in.file) + c)
+ throw new IOException(s"class file '${in.file}' has location not matching its contents: contains $c")
}
private def parseErrorHandler[T]: PartialFunction[Throwable, T] = {
@@ -72,8 +68,8 @@ abstract class ClassfileParser {
}
@inline private def pushBusy[T](sym: Symbol)(body: => T): T = {
busy match {
- case Some(`sym`) => throw new IOException("unsatisfiable cyclic dependency in '%s'".format(sym))
- case Some(sym1) => throw new IOException("illegal class file dependency between '%s' and '%s'".format(sym, sym1))
+ case Some(`sym`) => throw new IOException(s"unsatisfiable cyclic dependency in '$sym'")
+ case Some(sym1) => throw new IOException(s"illegal class file dependency between '$sym' and '$sym1'")
case _ => ()
}
@@ -137,10 +133,13 @@ abstract class ClassfileParser {
(in.nextByte.toInt: @switch) match {
case CONSTANT_UTF8 | CONSTANT_UNICODE =>
in.skip(in.nextChar)
- case CONSTANT_CLASS | CONSTANT_STRING =>
+ case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE=>
in.skip(2)
+ case CONSTANT_METHODHANDLE =>
+ in.skip(3)
case CONSTANT_FIELDREF | CONSTANT_METHODREF | CONSTANT_INTFMETHODREF
- | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT =>
+ | CONSTANT_NAMEANDTYPE | CONSTANT_INTEGER | CONSTANT_FLOAT
+ | CONSTANT_INVOKEDYNAMIC =>
in.skip(4)
case CONSTANT_LONG | CONSTANT_DOUBLE =>
in.skip(8)
@@ -229,8 +228,6 @@ abstract class ClassfileParser {
forceMangledName(tpe0.typeSymbol.name, false)
val (name, tpe) = getNameAndType(in.getChar(start + 3), ownerTpe)
-// println("new tpe: " + tpe + " at phase: " + phase)
-
if (name == nme.MODULE_INSTANCE_FIELD) {
val index = in.getChar(start + 1)
val name = getExternalName(in.getChar(starts(index) + 1))
@@ -241,14 +238,12 @@ abstract class ClassfileParser {
} else {
val origName = nme.originalName(name)
val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol
-// println("\t" + owner.info.member(name).tpe.widen + " =:= " + tpe)
f = owner.info.findMember(origName, 0, 0, false).suchThat(_.tpe.widen =:= tpe)
if (f == NoSymbol)
f = owner.info.findMember(newTermName(origName + nme.LOCAL_SUFFIX_STRING), 0, 0, false).suchThat(_.tpe =:= tpe)
if (f == NoSymbol) {
// if it's an impl class, try to find it's static member inside the class
if (ownerTpe.typeSymbol.isImplClass) {
-// println("impl class, member: " + owner.tpe.member(origName) + ": " + owner.tpe.member(origName).tpe)
f = ownerTpe.findMember(origName, 0, 0, false).suchThat(_.tpe =:= tpe)
} else {
log("Couldn't find " + name + ": " + tpe + " inside: \n" + ownerTpe)
@@ -259,11 +254,13 @@ abstract class ClassfileParser {
f setInfo tpe
log("created fake member " + f.fullName)
}
-// println("\townerTpe.decls: " + ownerTpe.decls)
-// println("Looking for: " + name + ": " + tpe + " inside: " + ownerTpe.typeSymbol + "\n\tand found: " + ownerTpe.members)
}
}
- assert(f != NoSymbol, "could not find\n " + name + ": " + tpe + "\ninside:\n " + ownerTpe.members.mkString(", "))
+ assert(f != NoSymbol,
+ s"could not find $name: $tpe in $ownerTpe" + (
+ if (settings.debug.value) ownerTpe.members.mkString(", members are:\n ", "\n ", "") else ""
+ )
+ )
values(index) = f
}
f
@@ -436,63 +433,58 @@ abstract class ClassfileParser {
sym
}
- /** Return the class symbol of the given name. */
- def classNameToSymbol(name: Name): Symbol = {
- def loadClassSymbol(name: Name): Symbol = {
- val file = global.classPath findSourceFile ("" +name) getOrElse {
- // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
- // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
- // that are not in their correct place (see bug for details)
- if (!settings.isScaladoc)
- warning("Class " + name + " not found - continuing with a stub.")
- return NoSymbol.newClass(name.toTypeName)
- }
- val completer = new global.loaders.ClassfileLoader(file)
- var owner: Symbol = rootMirror.RootClass
- var sym: Symbol = NoSymbol
- var ss: Name = null
- var start = 0
- var end = name indexOf '.'
-
- while (end > 0) {
- ss = name.subName(start, end)
- sym = owner.info.decls lookup ss
- if (sym == NoSymbol) {
- sym = owner.newPackage(ss) setInfo completer
- sym.moduleClass setInfo completer
- owner.info.decls enter sym
- }
- owner = sym.moduleClass
- start = end + 1
- end = name.indexOf('.', start)
- }
- ss = name.subName(0, start)
- owner.info.decls lookup ss orElse {
- sym = owner.newClass(ss.toTypeName) setInfoAndEnter completer
- debuglog("loaded "+sym+" from file "+file)
- sym
+ private def loadClassSymbol(name: Name): Symbol = {
+ val file = global.classPath findSourceFile ("" +name) getOrElse {
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ if (!settings.isScaladoc)
+ warning(s"Class $name not found - continuing with a stub.")
+ return NoSymbol.newClass(name.toTypeName)
+ }
+ val completer = new global.loaders.ClassfileLoader(file)
+ var owner: Symbol = rootMirror.RootClass
+ var sym: Symbol = NoSymbol
+ var ss: Name = null
+ var start = 0
+ var end = name indexOf '.'
+
+ while (end > 0) {
+ ss = name.subName(start, end)
+ sym = owner.info.decls lookup ss
+ if (sym == NoSymbol) {
+ sym = owner.newPackage(ss) setInfo completer
+ sym.moduleClass setInfo completer
+ owner.info.decls enter sym
}
+ owner = sym.moduleClass
+ start = end + 1
+ end = name.indexOf('.', start)
}
-
- def lookupClass(name: Name) = try {
- if (name.pos('.') == name.length)
- definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName)
- else
- rootMirror.getClass(name) // see tickets #2464, #3756
- } catch {
- case _: FatalError => loadClassSymbol(name)
+ ss = name.subName(0, start)
+ owner.info.decls lookup ss orElse {
+ sym = owner.newClass(ss.toTypeName) setInfoAndEnter completer
+ debuglog("loaded "+sym+" from file "+file)
+ sym
}
+ }
+ /** FIXME - we shouldn't be doing ad hoc lookups in the empty package.
+ * The method called "getClassByName" should either return the class or not.
+ */
+ private def lookupClass(name: Name) = (
+ if (name containsChar '.')
+ rootMirror getClassByName name // see tickets #2464, #3756
+ else
+ definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName)
+ )
- innerClasses.get(name) match {
- case Some(entry) =>
- //println("found inner class " + name)
- val res = innerClasses.classSymbol(entry.externalName)
- //println("\trouted to: " + res)
- res
- case None =>
- //if (name.toString.contains("$")) println("No inner class: " + name + innerClasses + " while parsing " + in.file.name)
- lookupClass(name)
- }
+ /** Return the class symbol of the given name. */
+ def classNameToSymbol(name: Name): Symbol = {
+ if (innerClasses contains name)
+ innerClasses innerSymbol name
+ else
+ try lookupClass(name)
+ catch { case _: FatalError => loadClassSymbol(name) }
}
var sawPrivateConstructor = false
@@ -588,11 +580,9 @@ abstract class ClassfileParser {
def addEnclosingTParams(clazz: Symbol) {
var sym = clazz.owner
while (sym.isClass && !sym.isModuleClass) {
-// println("adding tparams of " + sym)
- for (t <- sym.tpe.typeArgs) {
-// println("\tadding " + (t.typeSymbol.name + "->" + t.typeSymbol))
+ for (t <- sym.tpe.typeArgs)
classTParams = classTParams + (t.typeSymbol.name -> t.typeSymbol)
- }
+
sym = sym.owner
}
}
@@ -646,7 +636,7 @@ abstract class ClassfileParser {
info match {
case MethodType(params, restpe) =>
// if this is a non-static inner class, remove the explicit outer parameter
- val newParams = innerClasses.get(currentClass) match {
+ val newParams = innerClasses getEntry currentClass match {
case Some(entry) if !isScalaRaw && !isStatic(entry.jflags) =>
/* About `clazz.owner.isPackage` below: SI-5957
* For every nested java class A$B, there are two symbols in the scala compiler.
@@ -742,13 +732,10 @@ abstract class ClassfileParser {
// raw type - existentially quantify all type parameters
val eparams = typeParamsToExistentials(classSym, classSym.unsafeTypeParams)
val t = typeRef(pre, classSym, eparams.map(_.tpeHK))
- val res = newExistentialType(eparams, t)
- if (settings.debug.value && settings.verbose.value)
- println("raw type " + classSym + " -> " + res)
- res
+ newExistentialType(eparams, t)
}
case tp =>
- assert(sig.charAt(index) != '<', tp)
+ assert(sig.charAt(index) != '<', s"sig=$sig, index=$index, tp=$tp")
tp
}
@@ -867,8 +854,6 @@ abstract class ClassfileParser {
val sig = pool.getExternalName(in.nextChar)
val newType = sigToType(sym, sig)
sym.setInfo(newType)
- if (settings.debug.value && settings.verbose.value)
- println("" + sym + "; signature = " + sig + " type = " + newType)
}
else in.skip(attrLen)
case tpnme.SyntheticATTR =>
@@ -885,10 +870,10 @@ abstract class ClassfileParser {
val c = pool.getConstant(in.nextChar)
val c1 = convertTo(c, symtype)
if (c1 ne null) sym.setInfo(ConstantType(c1))
- else println("failure to convert " + c + " to " + symtype); //debug
+ else debugwarn(s"failure to convert $c to $symtype")
case tpnme.ScalaSignatureATTR =>
if (!isScalaAnnot) {
- debuglog("warning: symbol " + sym.fullName + " has pickled signature in attribute")
+ debugwarn(s"symbol ${sym.fullName} has pickled signature in attribute")
unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name)
}
in.skip(attrLen)
@@ -933,6 +918,12 @@ abstract class ClassfileParser {
case pkg => pkg.fullName(File.separatorChar)+File.separator+srcfileLeaf
}
srcfile0 = settings.outputDirs.srcFilesFor(in.file, srcpath).find(_.exists)
+ case tpnme.CodeATTR =>
+ if (sym.owner.isInterface) {
+ sym setFlag DEFAULTMETHOD
+ log(s"$sym in ${sym.owner} is a java8+ default method.")
+ }
+ in.skip(attrLen)
case _ =>
in.skip(attrLen)
}
@@ -1020,16 +1011,18 @@ abstract class ClassfileParser {
}
if (hasError) None
else Some(AnnotationInfo(attrType, List(), nvpairs.toList))
- } catch {
- case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
- case ex: Throwable =>
+ }
+ catch {
+ case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found
+ case ex: java.lang.Error => throw ex
+ case ex: Throwable =>
// We want to be robust when annotations are unavailable, so the very least
// we can do is warn the user about the exception
// There was a reference to ticket 1135, but that is outdated: a reference to a class not on
// the classpath would *not* end up here. A class not found is signaled
// with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example),
// and that should never be swallowed silently.
- warning("Caught: " + ex + " while parsing annotations in " + in.file)
+ warning(s"Caught: $ex while parsing annotations in ${in.file}")
if (settings.debug.value) ex.printStackTrace()
None // ignore malformed annotations
@@ -1044,6 +1037,9 @@ abstract class ClassfileParser {
for (n <- 0 until nClasses) {
// FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065)
val cls = pool.getClassSymbol(in.nextChar.toInt)
+ // we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation
+ // and that method requires Symbol to be forced to give the right answers, see SI-7107 for details
+ cls.initialize
sym.addThrowsAnnotation(cls)
}
}
@@ -1104,7 +1100,7 @@ abstract class ClassfileParser {
unlinkIfPresent(cName.toTypeName)
}
- for (entry <- innerClasses.values) {
+ for (entry <- innerClasses.entries) {
// create a new class member for immediate inner classes
if (entry.outerName == currentClass) {
val file = global.classPath.findSourceFile(entry.externalName.toString) getOrElse {
@@ -1142,14 +1138,9 @@ abstract class ClassfileParser {
case tpnme.InnerClassesATTR if !isScala =>
val entries = in.nextChar.toInt
for (i <- 0 until entries) {
- val innerIndex = in.nextChar.toInt
- val outerIndex = in.nextChar.toInt
- val nameIndex = in.nextChar.toInt
- val jflags = in.nextChar.toInt
- if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) {
- val entry = InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
- innerClasses += (pool.getClassName(innerIndex) -> entry)
- }
+ val innerIndex, outerIndex, nameIndex, jflags = in.nextChar.toInt
+ if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0)
+ innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags)
}
case _ =>
in.skip(attrLen)
@@ -1163,72 +1154,69 @@ abstract class ClassfileParser {
def externalName = pool getClassName external
def outerName = pool getClassName outer
def originalName = pool getName name
+ def isStatic = ClassfileParser.this.isStatic(jflags)
+ def isModule = originalName.isTermName
+ def scope = if (isStatic) staticScope else instanceScope
+ def enclosing = if (isStatic) enclModule else enclClass
+
+ // The name of the outer class, without its trailing $ if it has one.
+ private def strippedOuter = nme stripModuleSuffix outerName
+ private def isInner = innerClasses contains strippedOuter
+ private def enclClass = if (isInner) innerClasses innerSymbol strippedOuter else classNameToSymbol(strippedOuter)
+ private def enclModule = enclClass.companionModule
+
+ private def staticWord = if (isStatic) "static " else ""
+ override def toString = s"$staticWord$originalName in $outerName ($externalName)"
+ }
- override def toString =
- originalName + " in " + outerName + "(" + externalName +")"
+ /** Return the Symbol of the top level class enclosing `name`,
+ * or the symbol of `name` itself if no enclosing classes are found.
+ */
+ def topLevelClass(name: Name): Symbol = innerClasses getEntry name match {
+ case Some(entry) => topLevelClass(entry.outerName)
+ case _ => classNameToSymbol(name)
}
- object innerClasses extends scala.collection.mutable.HashMap[Name, InnerClassEntry] {
- /** Return the Symbol of the top level class enclosing `name`,
- * or 'name's symbol if no entry found for `name`.
- */
- def topLevelClass(name: Name): Symbol = {
- val tlName = if (isDefinedAt(name)) {
- var entry = this(name)
- while (isDefinedAt(entry.outerName))
- entry = this(entry.outerName)
- entry.outerName
- } else
- name
- classNameToSymbol(tlName)
+ /** Return the class symbol for the given name. It looks it up in its outer class.
+ * Forces all outer class symbols to be completed.
+ *
+ * If the given name is not an inner class, it returns the symbol found in `definitions`.
+ */
+ object innerClasses {
+ private val inners = mutable.HashMap[Name, InnerClassEntry]()
+
+ def contains(name: Name) = inners contains name
+ def getEntry(name: Name) = inners get name
+ def entries = inners.values
+
+ def add(entry: InnerClassEntry): Unit = {
+ inners get entry.externalName foreach (existing =>
+ debugwarn(s"Overwriting inner class entry! Was $existing, now $entry")
+ )
+ inners(entry.externalName) = entry
}
-
- /** Return the class symbol for `externalName`. It looks it up in its outer class.
- * Forces all outer class symbols to be completed.
- *
- * If the given name is not an inner class, it returns the symbol found in `definitions`.
- */
- def classSymbol(externalName: Name): Symbol = {
- /** Return the symbol of `innerName`, having the given `externalName`. */
- def innerSymbol(externalName: Name, innerName: Name, static: Boolean): Symbol = {
- def getMember(sym: Symbol, name: Name): Symbol =
- if (static)
- if (sym == clazz) staticScope.lookup(name)
- else sym.companionModule.info.member(name)
- else
- if (sym == clazz) instanceScope.lookup(name)
- else sym.info.member(name)
-
- innerClasses.get(externalName) match {
- case Some(entry) =>
- val outerName = nme.stripModuleSuffix(entry.outerName)
- val sym = classSymbol(outerName)
- val s =
- // if loading during initialization of `definitions` typerPhase is not yet set.
- // in that case we simply load the member at the current phase
- if (currentRun.typerPhase != null)
- beforeTyper(getMember(sym, innerName.toTypeName))
- else
- getMember(sym, innerName.toTypeName)
-
- assert(s ne NoSymbol,
- "" + ((externalName, outerName, innerName, sym.fullLocationString)) + " / " +
- " while parsing " + ((in.file, busy)) +
- sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members
- )
- s
-
- case None =>
- classNameToSymbol(externalName)
- }
- }
-
- get(externalName) match {
- case Some(entry) =>
- innerSymbol(entry.externalName, entry.originalName, isStatic(entry.jflags))
- case None =>
- classNameToSymbol(externalName)
- }
+ def innerSymbol(externalName: Name): Symbol = this getEntry externalName match {
+ case Some(entry) => innerSymbol(entry)
+ case _ => NoSymbol
+ }
+ // if loading during initialization of `definitions` typerPhase is not yet set.
+ // in that case we simply load the member at the current phase
+ @inline private def enteringTyperIfPossible(body: => Symbol): Symbol =
+ if (currentRun.typerPhase eq null) body else beforeTyper(body)
+
+ private def innerSymbol(entry: InnerClassEntry): Symbol = {
+ val name = entry.originalName.toTypeName
+ val enclosing = entry.enclosing
+ def getMember = (
+ if (enclosing == clazz) entry.scope lookup name
+ else enclosing.info member name
+ )
+ enteringTyperIfPossible(getMember)
+ /** There used to be an assertion that this result is not NoSymbol; changing it to an error
+ * revealed it had been going off all the time, but has been swallowed by a catch t: Throwable
+ * in Repository.scala. Since it has been accomplishing nothing except misleading anyone who
+ * thought it wasn't triggering, I removed it entirely.
+ */
}
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 13c0d8993a..c304c18c4f 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -506,6 +506,13 @@ abstract class ICodeReader extends ClassfileParser {
code.emit(UNBOX(toTypeKind(m.info.resultType)))
else
code.emit(CALL_METHOD(m, Static(false)))
+ case JVM.invokedynamic =>
+ // TODO, this is just a place holder. A real implementation must parse the class constant entry
+ debuglog("Found JVM invokedynamic instructionm, inserting place holder ICode INVOKE_DYNAMIC.")
+ containsInvokeDynamic = true
+ val poolEntry = in.nextChar
+ in.skip(2)
+ code.emit(INVOKE_DYNAMIC(poolEntry))
case JVM.new_ =>
code.emit(NEW(REFERENCE(pool.getClassSymbol(in.nextChar))))
@@ -644,6 +651,7 @@ abstract class ICodeReader extends ClassfileParser {
var containsDUPX = false
var containsNEW = false
var containsEHs = false
+ var containsInvokeDynamic = false
def emit(i: Instruction) {
instrs += ((pc, i))
@@ -662,6 +670,7 @@ abstract class ICodeReader extends ClassfileParser {
val code = new Code(method)
method.setCode(code)
method.bytecodeHasEHs = containsEHs
+ method.bytecodeHasInvokeDynamic = containsInvokeDynamic
var bb = code.startBlock
def makeBasicBlocks: mutable.Map[Int, BasicBlock] =
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index e8b0cd2696..c3aded2b2d 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -293,7 +293,6 @@ abstract class Pickler extends SubComponent {
putTree(definition)
*/
case Template(parents, self, body) =>
- writeNat(parents.length)
putTrees(parents)
putTree(self)
putTrees(body)
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 889d309ba9..ead6ef288c 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -477,6 +477,7 @@ abstract class Erasure extends AddInterfaces
def checkPair(member: Symbol, other: Symbol) {
val otpe = specialErasure(root)(other.tpe)
val bridgeNeeded = afterErasure (
+ !member.isMacro &&
!(other.tpe =:= member.tpe) &&
!(deconstMap(other.tpe) =:= deconstMap(member.tpe)) &&
{ var e = bridgesScope.lookupEntry(member.name)
@@ -780,7 +781,7 @@ abstract class Erasure extends AddInterfaces
else if (tree.symbol == Any_isInstanceOf)
adaptMember(atPos(tree.pos)(Select(qual, Object_isInstanceOf)))
else if (tree.symbol.owner == AnyClass)
- adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, name))))
+ adaptMember(atPos(tree.pos)(Select(qual, getMember(ObjectClass, tree.symbol.name))))
else {
var qual1 = typedQualifier(qual)
if ((isPrimitiveValueType(qual1.tpe) && !isPrimitiveValueMember(tree.symbol)) ||
diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
index 2f28a16416..970519ab7c 100644
--- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala
@@ -45,26 +45,24 @@ abstract class ExplicitOuter extends InfoTransform
private def isInner(clazz: Symbol) =
!clazz.isPackageClass && !clazz.outerClass.isStaticOwner
- private def haveSameOuter(parent: Type, clazz: Symbol) = parent match {
- case TypeRef(pre, sym, _) =>
- val owner = clazz.owner
+ private def haveSameOuter(parent: Type, clazz: Symbol) = {
+ val owner = clazz.owner
+ val parentSym = parent.typeSymbol
- //println(s"have same outer $parent $clazz $sym ${sym.owner} $owner $pre")
-
- sym.isClass && owner.isClass &&
- (owner isSubClass sym.owner) &&
- owner.thisType =:= pre
-
- case _ => false
+ parentSym.isClass && owner.isClass &&
+ (owner isSubClass parentSym.owner) &&
+ owner.thisType =:= parent.prefix
}
/** Does given clazz define an outer field? */
def hasOuterField(clazz: Symbol) = {
- val parents = clazz.info.parents
+ val parent = clazz.info.firstParent
- isInner(clazz) && !clazz.isTrait && {
- parents.isEmpty || !haveSameOuter(parents.head, clazz)
- }
+ // space optimization: inherit the $outer pointer from the parent class if
+ // we know that it will point to the correct instance.
+ def canReuseParentOuterField = !parent.typeSymbol.isJavaDefined && haveSameOuter(parent, clazz)
+
+ isInner(clazz) && !clazz.isTrait && !canReuseParentOuterField
}
private def outerField(clazz: Symbol): Symbol = {
@@ -112,6 +110,29 @@ abstract class ExplicitOuter extends InfoTransform
sym setInfo clazz.outerClass.thisType
}
+ /**
+ * Will the outer accessor of the `clazz` subsume the outer accessor of
+ * `mixin`?
+ *
+ * This arises when an inner object mixes in its companion trait.
+ *
+ * {{{
+ * class C {
+ * trait T { C.this } // C$T$$$outer$ : C
+ * object T extends T { C.this } // C$T$$$outer$ : C.this.type
+ * }
+ * }}}
+ *
+ * See SI-7242.
+ }}
+ */
+ private def skipMixinOuterAccessor(clazz: Symbol, mixin: Symbol) = {
+ // Reliant on the current scheme for name expansion, the expanded name
+ // of the outer accessors in a trait and its companion object are the same.
+ // If the assumption is one day falsified, run/t7424.scala will let us know.
+ clazz.fullName == mixin.fullName
+ }
+
/** <p>
* The type transformation method:
* </p>
@@ -177,10 +198,14 @@ abstract class ExplicitOuter extends InfoTransform
for (mc <- clazz.mixinClasses) {
val mixinOuterAcc: Symbol = afterExplicitOuter(outerAccessor(mc))
if (mixinOuterAcc != NoSymbol) {
- if (decls1 eq decls) decls1 = decls.cloneScope
- val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED)
- newAcc setInfo (clazz.thisType memberType mixinOuterAcc)
- decls1 enter newAcc
+ if (skipMixinOuterAccessor(clazz, mc))
+ debuglog(s"Reusing outer accessor symbol of $clazz for the mixin outer accessor of $mc")
+ else {
+ if (decls1 eq decls) decls1 = decls.cloneScope
+ val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED)
+ newAcc setInfo (clazz.thisType memberType mixinOuterAcc)
+ decls1 enter newAcc
+ }
}
}
}
@@ -390,6 +415,7 @@ abstract class ExplicitOuter extends InfoTransform
val outerAcc = outerAccessor(mixinClass) overridingSymbol currentClass
def mixinPrefix = (currentClass.thisType baseType mixinClass).prefix
assert(outerAcc != NoSymbol, "No outer accessor for inner mixin " + mixinClass + " in " + currentClass)
+ assert(outerAcc.alternatives.size == 1, s"Multiple outer accessors match inner mixin $mixinClass in $currentClass : ${outerAcc.alternatives.map(_.defString)}")
// I added the mixinPrefix.typeArgs.nonEmpty condition to address the
// crash in SI-4970. I feel quite sure this can be improved.
val path = (
@@ -492,7 +518,7 @@ abstract class ExplicitOuter extends InfoTransform
}
if (!currentClass.isTrait)
for (mc <- currentClass.mixinClasses)
- if (outerAccessor(mc) != NoSymbol)
+ if (outerAccessor(mc) != NoSymbol && !skipMixinOuterAccessor(currentClass, mc))
newDefs += mixinOuterAccessorDef(mc)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 232148676c..39716d4ed0 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -78,7 +78,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
*/
/** For a given class and concrete type arguments, give its specialized class */
- val specializedClass: mutable.Map[(Symbol, TypeEnv), Symbol] = new mutable.LinkedHashMap
+ val specializedClass = perRunCaches.newMap[(Symbol, TypeEnv), Symbol]
/** Map a method symbol to a list of its specialized overloads in the same class. */
private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil
@@ -796,7 +796,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
private def normalizeMember(owner: Symbol, sym: Symbol, outerEnv: TypeEnv): List[Symbol] = {
sym :: (
if (!sym.isMethod || beforeTyper(sym.typeParams.isEmpty)) Nil
- else {
+ else if (sym.hasDefault) {
+ /* Specializing default getters is useless, also see SI-7329 . */
+ sym.resetFlag(SPECIALIZED)
+ Nil
+ } else {
// debuglog("normalizeMember: " + sym.fullNameAsName('.').decode)
var specializingOn = specializedParams(sym)
val unusedStvars = specializingOn filterNot specializedTypeVars(sym.info)
@@ -976,27 +980,24 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
debuglog("specialized overload %s for %s in %s: %s".format(om, overriding.name.decode, pp(env), om.info))
typeEnv(om) = env
addConcreteSpecMethod(overriding)
- info(om) = (
- if (overriding.isDeferred) { // abstract override
- debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
- Forward(overriding)
- }
- else {
- // if the override is a normalized member, 'om' gets the
- // implementation from its original target, and adds the
- // environment of the normalized member (that is, any
- // specialized /method/ type parameter bindings)
- val impl = info get overriding match {
- case Some(NormalizedMember(target)) =>
- typeEnv(om) = env ++ typeEnv(overriding)
- target
- case _ =>
- overriding
- }
- info(overriding) = Forward(om setPos overriding.pos)
- SpecialOverride(impl)
+ if (overriding.isDeferred) { // abstract override
+ debuglog("abstract override " + overriding.fullName + " with specialized " + om.fullName)
+ info(om) = Forward(overriding)
+ }
+ else {
+ // if the override is a normalized member, 'om' gets the
+ // implementation from its original target, and adds the
+ // environment of the normalized member (that is, any
+ // specialized /method/ type parameter bindings)
+ info get overriding match {
+ case Some(NormalizedMember(target)) =>
+ typeEnv(om) = env ++ typeEnv(overriding)
+ info(om) = Forward(target)
+ case _ =>
+ info(om) = SpecialOverride(overriding)
}
- )
+ info(overriding) = Forward(om setPos overriding.pos)
+ }
newOverload(overriding, om, env)
ifDebug(afterSpecialize(assert(
overridden.owner.info.decl(om.name) != NoSymbol,
@@ -1776,21 +1777,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
/** Create specialized class definitions */
def implSpecClasses(trees: List[Tree]): List[Tree] = {
- val buf = new mutable.ListBuffer[Tree]
- for (tree <- trees)
- tree match {
- case ClassDef(_, _, _, impl) =>
- tree.symbol.info // force specialization
- for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) {
- val parents = specCls.info.parents.map(TypeTree)
- buf +=
- ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
- .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
- debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
- }
- case _ =>
- }
- buf.toList
+ trees flatMap {
+ case tree @ ClassDef(_, _, _, impl) =>
+ tree.symbol.info // force specialization
+ for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield {
+ debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env))
+ val parents = specCls.info.parents.map(TypeTree)
+ ClassDef(specCls, atPos(impl.pos)(Template(parents, emptyValDef, List()))
+ .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos
+ }
+ case _ => Nil
+ } sortBy (_.name.decoded)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index a767850cba..938499261e 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -220,7 +220,10 @@ abstract class TailCalls extends Transform {
debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
accessed += ctx.label
- typedPos(fun.pos)(Apply(Ident(ctx.label), noTailTransform(recv) :: transformArgs))
+ typedPos(fun.pos) {
+ val args = mapWithIndex(transformArgs)((arg, i) => mkAttributedCastHack(arg, ctx.label.info.params(i + 1).tpe))
+ Apply(Ident(ctx.label), noTailTransform(recv) :: args)
+ }
}
if (!ctx.isEligible) fail("it is neither private nor final so can be overridden")
@@ -280,7 +283,7 @@ abstract class TailCalls extends Transform {
typedPos(tree.pos)(Block(
List(ValDef(newThis, This(currentClass))),
- LabelDef(newCtx.label, newThis :: vpSyms, newRHS)
+ LabelDef(newCtx.label, newThis :: vpSyms, mkAttributedCastHack(newRHS, newCtx.label.tpe.resultType))
))
}
else {
@@ -377,6 +380,13 @@ abstract class TailCalls extends Transform {
super.transform(tree)
}
}
+
+ // Workaround for SI-6900. Uncurry installs an InfoTransformer and a tree Transformer.
+ // These leave us with conflicting view on method signatures; the parameter symbols in
+ // the MethodType can be clones of the ones originally found on the parameter ValDef, and
+ // consequently appearing in the typechecked RHS of the method.
+ private def mkAttributedCastHack(tree: Tree, tpe: Type) =
+ gen.mkAttributedCast(tree, tpe)
}
// collect the LabelDefs (generated by the pattern matcher) in a DefDef that are in tail position
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index e9f403aea0..430129aaff 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -492,10 +492,14 @@ abstract class UnCurry extends InfoTransform
}
else {
log(s"Argument '$arg' at line ${arg.pos.safeLine} is $formal from ${fun.fullName}")
+ def canUseDirectly(recv: Tree) = (
+ recv.tpe.typeSymbol.isSubClass(FunctionClass(0))
+ && treeInfo.isExprSafeToInline(recv)
+ )
arg match {
// don't add a thunk for by-name argument if argument already is an application of
// a Function0. We can then remove the application and use the existing Function0.
- case Apply(Select(recv, nme.apply), Nil) if recv.tpe.typeSymbol isSubClass FunctionClass(0) =>
+ case Apply(Select(recv, nme.apply), Nil) if canUseDirectly(recv) =>
recv
case _ =>
newFunction0(arg)
@@ -619,7 +623,7 @@ abstract class UnCurry extends InfoTransform
val fn1 = withInPattern(false)(transform(fn))
val args1 = transformTrees(fn.symbol.name match {
case nme.unapply => args
- case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args.length))
+ case nme.unapplySeq => transformArgs(tree.pos, fn.symbol, args, analyzer.unapplyTypeList(fn.pos, fn.symbol, fn.tpe, args))
case _ => sys.error("internal error: UnApply node has wrong symbol")
})
treeCopy.UnApply(tree, fn1, args1)
@@ -814,7 +818,8 @@ abstract class UnCurry extends InfoTransform
*
* This transformation erases the dependent method types by:
* - Widening the formal parameter type to existentially abstract
- * over the prior parameters (using `packSymbols`)
+ * over the prior parameters (using `packSymbols`). This transformation
+ * is performed in the the `InfoTransform`er [[scala.reflect.internal.transform.UnCurry]].
* - Inserting casts in the method body to cast to the original,
* precise type.
*
@@ -842,15 +847,14 @@ abstract class UnCurry extends InfoTransform
*/
def erase(dd: DefDef): (List[List[ValDef]], Tree) = {
import dd.{ vparamss, rhs }
- val vparamSyms = vparamss flatMap (_ map (_.symbol))
-
val paramTransforms: List[ParamTransform] =
- vparamss.flatten.map { p =>
- val declaredType = p.symbol.info
- // existentially abstract over value parameters
- val packedType = typer.packSymbols(vparamSyms, declaredType)
- if (packedType =:= declaredType) Identity(p)
+ map2(vparamss.flatten, dd.symbol.info.paramss.flatten) { (p, infoParam) =>
+ val packedType = infoParam.info
+ if (packedType =:= p.symbol.info) Identity(p)
else {
+ // The Uncurry info transformer existentially abstracted over value parameters
+ // from the previous parameter lists.
+
// Change the type of the param symbol
p.symbol updateInfo packedType
@@ -862,8 +866,8 @@ abstract class UnCurry extends InfoTransform
// the method body to refer to this, rather than the parameter.
val tempVal: ValDef = {
val tempValName = unit freshTermName (p.name + "$")
- val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(declaredType)
- atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), declaredType)))
+ val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(p.symbol.info)
+ atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), p.symbol.info)))
}
Packed(newParam, tempVal)
}
@@ -881,13 +885,6 @@ abstract class UnCurry extends InfoTransform
Block(tempVals, rhsSubstituted)
}
- // update the type of the method after uncurry.
- dd.symbol updateInfo {
- val GenPolyType(tparams, tp) = dd.symbol.info
- logResult("erased dependent param types for ${dd.symbol.info}") {
- GenPolyType(tparams, MethodType(allParams map (_.symbol), tp.finalResultType))
- }
- }
(allParams :: Nil, rhs1)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
new file mode 100644
index 0000000000..dbe08315f4
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -0,0 +1,644 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab._
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.HashSet
+
+
+trait Logic extends Debugging {
+ import PatternMatchingStats._
+
+ private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
+ private def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
+ def toString(x: AnyRef) = if (x eq null) "" else x.toString
+ if (cols.isEmpty || cols.tails.isEmpty) cols map toString
+ else {
+ val colLens = cols map (c => toString(c).length)
+ val maxLen = max(colLens)
+ val avgLen = colLens.sum/colLens.length
+ val goalLen = maxLen min avgLen*2
+ def pad(s: String) = {
+ val toAdd = ((goalLen - s.length) max 0) + 2
+ (" " * (toAdd/2)) + s + (" " * (toAdd/2 + (toAdd%2)))
+ }
+ cols map (x => pad(toString(x)))
+ }
+ }
+
+ def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
+ val maxLen = max(xss map (_.length))
+ val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
+ padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
+ }
+
+ // http://www.cis.upenn.edu/~cis510/tcl/chap3.pdf
+ // http://users.encs.concordia.ca/~ta_ahmed/ms_thesis.pdf
+ // propositional logic with constants and equality
+ trait PropositionalLogic {
+ type Type
+ type Tree
+
+ class Prop
+ case class Eq(p: Var, q: Const) extends Prop
+
+ type Const
+
+ type TypeConst <: Const
+ def TypeConst: TypeConstExtractor
+ trait TypeConstExtractor {
+ def apply(tp: Type): Const
+ }
+
+ type ValueConst <: Const
+ def ValueConst: ValueConstExtractor
+ trait ValueConstExtractor {
+ def apply(p: Tree): Const
+ }
+
+ val NullConst: Const
+
+ type Var <: AbsVar
+ val Var: VarExtractor
+ trait VarExtractor {
+ def apply(x: Tree): Var
+ def unapply(v: Var): Some[Tree]
+ }
+
+ // resets hash consing -- only supposed to be called by TreeMakersToProps
+ def prepareNewAnalysis(): Unit
+
+ trait AbsVar {
+ // indicate we may later require a prop for V = C
+ def registerEquality(c: Const): Unit
+
+ // call this to indicate null is part of the domain
+ def registerNull(): Unit
+
+ // can this variable be null?
+ def mayBeNull: Boolean
+
+ // compute the domain and return it (call registerNull first!)
+ def domainSyms: Option[Set[Sym]]
+
+ // the symbol for this variable being equal to its statically known type
+ // (only available if registerEquality has been called for that type before)
+ def symForStaticTp: Option[Sym]
+
+ // for this var, call it V, turn V = C into the equivalent proposition in boolean logic
+ // registerEquality(c) must have been called prior to this call
+ // in fact, all equalities relevant to this variable must have been registered
+ def propForEqualsTo(c: Const): Prop
+
+ // populated by registerEquality
+ // once implications has been called, must not call registerEquality anymore
+ def implications: List[(Sym, List[Sym], List[Sym])]
+ }
+
+ // would be nice to statically check whether a prop is equational or pure,
+ // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
+ case class And(a: Prop, b: Prop) extends Prop
+ case class Or(a: Prop, b: Prop) extends Prop
+ case class Not(a: Prop) extends Prop
+
+ case object True extends Prop
+ case object False extends Prop
+
+ // symbols are propositions
+ abstract case class Sym(val variable: Var, val const: Const) extends Prop {
+ private[this] val id = Sym.nextSymId
+
+ override def toString = variable +"="+ const +"#"+ id
+ }
+ class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
+ object Sym {
+ private val uniques: HashSet[Sym] = new HashSet("uniques", 512)
+ def apply(variable: Var, const: Const): Sym = {
+ val newSym = new UniqueSym(variable, const)
+ (uniques findEntryOrUpdate newSym)
+ }
+ private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ }
+
+ def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
+ def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
+
+ trait PropTraverser {
+ def apply(x: Prop): Unit = x match {
+ case And(a, b) => apply(a); apply(b)
+ case Or(a, b) => apply(a); apply(b)
+ case Not(a) => apply(a)
+ case Eq(a, b) => applyVar(a); applyConst(b)
+ case _ =>
+ }
+ def applyVar(x: Var): Unit = {}
+ def applyConst(x: Const): Unit = {}
+ }
+
+ def gatherVariables(p: Prop): Set[Var] = {
+ val vars = new mutable.HashSet[Var]()
+ (new PropTraverser {
+ override def applyVar(v: Var) = vars += v
+ })(p)
+ vars.toSet
+ }
+
+ trait PropMap {
+ def apply(x: Prop): Prop = x match { // TODO: mapConserve
+ case And(a, b) => And(apply(a), apply(b))
+ case Or(a, b) => Or(apply(a), apply(b))
+ case Not(a) => Not(apply(a))
+ case p => p
+ }
+ }
+
+ // to govern how much time we spend analyzing matches for unreachability/exhaustivity
+ object AnalysisBudget {
+ import scala.tools.cmd.FromString.IntFromString
+ val max = sys.props.get("scalac.patmat.analysisBudget").collect(IntFromString.orElse{case "off" => Integer.MAX_VALUE}).getOrElse(256)
+
+ abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded")
+
+ object exceeded extends Exception(
+ s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)")
+
+ }
+
+ // convert finite domain propositional logic with subtyping to pure boolean propositional logic
+ // a type test or a value equality test are modelled as a variable being equal to some constant
+ // a variable V may be assigned multiple constants, as long as they do not contradict each other
+ // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments
+ // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain
+ // in a prelude (the equality axioms)
+ // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain
+ // 2. for each variable V in props, and each constant C it is compared to,
+ // compute which assignments imply each other (as in the example above: V = 1 implies V = Int)
+ // and which assignments are mutually exclusive (V = String implies -(V = Int))
+ //
+ // note that this is a conservative approximation: V = Constant(A) and V = Constant(B)
+ // are considered mutually exclusive (and thus both cases are considered reachable in {case A => case B =>}),
+ // even though A may be equal to B (and thus the second case is not "dynamically reachable")
+ //
+ // TODO: for V1 representing x1 and V2 standing for x1.head, encode that
+ // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
+ // may throw an AnalysisBudget.Exception
+ def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = {
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
+
+ val vars = new scala.collection.mutable.HashSet[Var]
+
+ object gatherEqualities extends PropTraverser {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) =>
+ vars += v
+ v.registerEquality(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ object rewriteEqualsToProp extends PropMap {
+ override def apply(p: Prop) = p match {
+ case Eq(v, c) => v.propForEqualsTo(c)
+ case _ => super.apply(p)
+ }
+ }
+
+ props foreach gatherEqualities.apply
+ if (modelNull) vars foreach (_.registerNull)
+
+ val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
+
+ val eqAxioms = formulaBuilder
+ @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p))
+
+ debug.patmat("removeVarEq vars: "+ vars)
+ vars.foreach { v =>
+ // if v.domainSyms.isEmpty, we must consider the domain to be infinite
+ // otherwise, since the domain fully partitions the type of the value,
+ // exactly one of the types (and whatever it implies, imposed separately) must be chosen
+ // consider X ::= A | B | C, and A => B
+ // coverage is formulated as: A \/ B \/ C and the implications are
+ v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) }
+
+ // when this variable cannot be null the equality corresponding to the type test `(x: T)`, where T is x's static type,
+ // is always true; when the variable may be null we use the implication `(x != null) => (x: T)` for the axiom
+ v.symForStaticTp foreach { symForStaticTp =>
+ if (v.mayBeNull) addAxiom(Or(v.propForEqualsTo(NullConst), symForStaticTp))
+ else addAxiom(symForStaticTp)
+ }
+
+ v.implications foreach { case (sym, implied, excluded) =>
+ // when sym is true, what must hold...
+ implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
+ // ... and what must not?
+ excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym))))
+ }
+ }
+
+ debug.patmat("eqAxioms:\n"+ cnfString(toFormula(eqAxioms)))
+ debug.patmat("pure:"+ pure.map(p => cnfString(p)).mkString("\n"))
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
+
+ (toFormula(eqAxioms), pure)
+ }
+
+
+ // an interface that should be suitable for feeding a SAT solver when the time comes
+ type Formula
+ type FormulaBuilder
+
+ // creates an empty formula builder to which more formulae can be added
+ def formulaBuilder: FormulaBuilder
+
+ // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN)
+ // toFormula(f) == andFormula(f1, andFormula(..., fN))
+ def addFormula(buff: FormulaBuilder, f: Formula): Unit
+ def toFormula(buff: FormulaBuilder): Formula
+
+ // the conjunction of formulae `a` and `b`
+ def andFormula(a: Formula, b: Formula): Formula
+
+ // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses)
+ def simplifyFormula(a: Formula): Formula
+
+ // may throw an AnalysisBudget.Exception
+ def propToSolvable(p: Prop): Formula = {
+ val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
+ andFormula(eqAxioms, pure)
+ }
+
+ // may throw an AnalysisBudget.Exception
+ def eqFreePropToSolvable(p: Prop): Formula
+ def cnfString(f: Formula): String
+
+ type Model = Map[Sym, Boolean]
+ val EmptyModel: Model
+ val NoModel: Model
+
+ def findModelFor(f: Formula): Model
+ def findAllModelsFor(f: Formula): List[Model]
+ }
+}
+
+trait ScalaLogic extends Interface with Logic with TreeAndTypeAnalysis {
+ trait TreesAndTypesDomain extends PropositionalLogic with CheckableTreeAndTypeAnalysis {
+ type Type = global.Type
+ type Tree = global.Tree
+
+ // resets hash consing -- only supposed to be called by TreeMakersToProps
+ def prepareNewAnalysis(): Unit = { Var.resetUniques(); Const.resetUniques() }
+
+ object Var extends VarExtractor {
+ private var _nextId = 0
+ def nextId = {_nextId += 1; _nextId}
+
+ def resetUniques() = {_nextId = 0; uniques.clear()}
+ private val uniques = new mutable.HashMap[Tree, Var]
+ def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
+ def unapply(v: Var) = Some(v.path)
+ }
+ class Var(val path: Tree, staticTp: Type) extends AbsVar {
+ private[this] val id: Int = Var.nextId
+
+ // private[this] var canModify: Option[Array[StackTraceElement]] = None
+ private[this] def ensureCanModify() = {} //if (canModify.nonEmpty) debug.patmat("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n"))
+
+ private[this] def observed() = {} //canModify = Some(Thread.currentThread.getStackTrace)
+
+ // don't access until all potential equalities have been registered using registerEquality
+ private[this] val symForEqualsTo = new mutable.HashMap[Const, Sym]
+
+ // when looking at the domain, we only care about types we can check at run time
+ val staticTpCheckable: Type = checkableType(staticTp)
+
+ private[this] var _mayBeNull = false
+ def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
+ def mayBeNull: Boolean = _mayBeNull
+
+ // case None => domain is unknown,
+ // case Some(List(tps: _*)) => domain is exactly tps
+ // we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
+ // once we go to run-time checks (on Const's), convert them to checkable types
+ // TODO: there seems to be bug for singleton domains (variable does not show up in model)
+ lazy val domain: Option[Set[Const]] = {
+ val subConsts = enumerateSubtypes(staticTp).map{ tps =>
+ tps.toSet[Type].map{ tp =>
+ val domainC = TypeConst(tp)
+ registerEquality(domainC)
+ domainC
+ }
+ }
+
+ val allConsts =
+ if (mayBeNull) {
+ registerEquality(NullConst)
+ subConsts map (_ + NullConst)
+ } else
+ subConsts
+
+ observed; allConsts
+ }
+
+ // populate equalitySyms
+ // don't care about the result, but want only one fresh symbol per distinct constant c
+ def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
+
+ // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
+ // (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
+ def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
+
+ // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
+ /** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
+ *
+ * that models a type test pattern `_: C` or constant pattern `C`, where the type test gives rise to a TypeConst C,
+ * and the constant pattern yields a ValueConst C
+ *
+ * for exhaustivity, we really only need implication (e.g., V = 1 implies that V = 1 /\ V = Int, if both tests occur in the match,
+ * and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
+ */
+ lazy val implications = {
+ /** when we know V = C, which other equalities must hold
+ *
+ * in general, equality to some type implies equality to its supertypes
+ * (this multi-valued kind of equality is necessary for unreachability)
+ * note that we use subtyping as a model for implication between instanceof tests
+ * i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ * unfortunately this is not true in general (see e.g. SI-6022)
+ */
+ def implies(lower: Const, upper: Const): Boolean =
+ // values and null
+ lower == upper ||
+ // type implication
+ (lower != NullConst && !upper.isValue &&
+ instanceOfTpImplies(if (lower.isValue) lower.wideTp else lower.tp, upper.tp))
+
+ // if(r) debug.patmat("implies : "+(lower, lower.tp, upper, upper.tp))
+ // else debug.patmat("NOT implies: "+(lower, upper))
+
+
+ /** Does V=A preclude V=B?
+ *
+ * (0) A or B must be in the domain to draw any conclusions.
+ *
+ * For example, knowing the the scrutinee is *not* true does not
+ * statically exclude it from being `X`, because that is an opaque
+ * Boolean.
+ *
+ * val X = true
+ * (true: Boolean) match { case true => case X <reachable> }
+ *
+ * (1) V = null excludes assignment to any other constant (modulo point #0). This includes
+ * both values and type tests (which are both modelled here as `Const`)
+ * (2) V = A and V = B, for A and B domain constants, are mutually exclusive unless A == B
+ *
+ * (3) We only reason about test tests as being excluded by null assignments, otherwise we
+ * only consider value assignments.
+ * TODO: refine this, a == 0 excludes a: String, or `a: Int` excludes `a: String`
+ * (since no value can be of both types. See also SI-7211)
+ *
+ * NOTE: V = 1 does not preclude V = Int, or V = Any, it could be said to preclude
+ * V = String, but we don't model that.
+ */
+ def excludes(a: Const, b: Const): Boolean = {
+ val bothInDomain = domain exists (d => d(a) && d(b))
+ val eitherIsNull = a == NullConst || b == NullConst
+ val bothAreValues = a.isValue && b.isValue
+ bothInDomain && (eitherIsNull || bothAreValues) && (a != b)
+ }
+
+ // if(r) debug.patmat("excludes : "+(a, a.tp, b, b.tp))
+ // else debug.patmat("NOT excludes: "+(a, b))
+
+/*
+[ HALF BAKED FANCINESS: //!equalitySyms.exists(common => implies(common.const, a) && implies(common.const, b)))
+ when type tests are involved, we reason (conservatively) under a closed world assumption,
+ since we are really only trying to counter the effects of the symbols that we introduce to model type tests
+ we don't aim to model the whole subtyping hierarchy, simply to encode enough about subtyping to do unreachability properly
+
+ consider the following hierarchy:
+
+ trait A
+ trait B
+ trait C
+ trait AB extends B with A
+
+ // two types are mutually exclusive if there is no equality symbol whose constant implies both
+ object Test extends App {
+ def foo(x: Any) = x match {
+ case _ : C => println("C")
+ case _ : AB => println("AB")
+ case _ : (A with B) => println("AB'")
+ case _ : B => println("B")
+ case _ : A => println("A")
+ }
+
+ of course this kind of reasoning is not true in general,
+ but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing}
+*/
+
+ val excludedPair = new mutable.HashSet[ExcludedPair]
+
+ case class ExcludedPair(a: Const, b: Const) {
+ override def equals(o: Any) = o match {
+ case ExcludedPair(aa, bb) => (a == aa && b == bb) || (a == bb && b == aa)
+ case _ => false
+ }
+ // make ExcludedPair(a, b).hashCode == ExcludedPair(b, a).hashCode
+ override def hashCode = a.hashCode ^ b.hashCode
+ }
+
+ equalitySyms map { sym =>
+ // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
+ // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
+ val todo = equalitySyms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
+ val (excluded, notExcluded) = todo partition (b => excludes(sym.const, b.const))
+ val implied = notExcluded filter (b => implies(sym.const, b.const))
+
+ debug.patmat("eq axioms for: "+ sym.const)
+ debug.patmat("excluded: "+ excluded)
+ debug.patmat("implied: "+ implied)
+
+ excluded foreach { excludedSym => excludedPair += ExcludedPair(sym.const, excludedSym.const)}
+
+ (sym, implied, excluded)
+ }
+ }
+
+ // accessing after calling registerNull will result in inconsistencies
+ lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
+
+ lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
+
+ // don't access until all potential equalities have been registered using registerEquality
+ private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
+
+ // don't call until all equalities have been registered and registerNull has been called (if needed)
+ def describe = {
+ def domain_s = domain match {
+ case Some(d) => d mkString (" ::= ", " | ", "// "+ symForEqualsTo.keys)
+ case _ => symForEqualsTo.keys mkString (" ::= ", " | ", " | ...")
+ }
+ s"$this: ${staticTp}${domain_s} // = $path"
+ }
+ override def toString = "V"+ id
+ }
+
+
+ import global.{ConstantType, Constant, SingletonType, Literal, Ident, singleType}
+ import global.definitions.{AnyClass, UnitClass}
+
+
+ // all our variables range over types
+ // a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
+ // equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
+ object Const {
+ def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear()}
+
+ private var _nextTypeId = 0
+ def nextTypeId = {_nextTypeId += 1; _nextTypeId}
+
+ private var _nextValueId = 0
+ def nextValueId = {_nextValueId += 1; _nextValueId}
+
+ private val uniques = new mutable.HashMap[Type, Const]
+ private[TreesAndTypesDomain] def unique(tp: Type, mkFresh: => Const): Const =
+ uniques.get(tp).getOrElse(
+ uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
+ case Some((_, c)) =>
+ debug.patmat("unique const: "+ (tp, c))
+ c
+ case _ =>
+ val fresh = mkFresh
+ debug.patmat("uniqued const: "+ (tp, fresh))
+ uniques(tp) = fresh
+ fresh
+ })
+
+ private val trees = mutable.HashSet.empty[Tree]
+
+ // hashconsing trees (modulo value-equality)
+ private[TreesAndTypesDomain] def uniqueTpForTree(t: Tree): Type =
+ // a new type for every unstable symbol -- only stable value are uniqued
+ // technically, an unreachable value may change between cases
+ // thus, the failure of a case that matches on a mutable value does not exclude the next case succeeding
+ // (and thuuuuus, the latter case must be considered reachable)
+ if (!t.symbol.isStable) t.tpe.narrow
+ else trees find (a => a.correspondsStructure(t)(sameValue)) match {
+ case Some(orig) =>
+ debug.patmat("unique tp for tree: "+ (orig, orig.tpe))
+ orig.tpe
+ case _ =>
+ // duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
+ val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
+ debug.patmat("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
+ trees += treeWithNarrowedType
+ treeWithNarrowedType.tpe
+ }
+ }
+
+ sealed abstract class Const {
+ def tp: Type
+ def wideTp: Type
+
+ def isAny = wideTp.typeSymbol == AnyClass
+ def isValue: Boolean //= tp.isStable
+
+ // note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
+ // the equals inherited from AnyRef does just this
+ }
+
+ // find most precise super-type of tp that is a class
+ // we skip non-class types (singleton types, abstract types) so that we can
+ // correctly compute how types relate in terms of the values they rule out
+ // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
+ // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
+ // (At least conceptually: `true` is an instance of class `Boolean`)
+ private def widenToClass(tp: Type): Type =
+ if (tp.typeSymbol.isClass) tp
+ else tp.baseType(tp.baseClasses.head)
+
+ object TypeConst extends TypeConstExtractor {
+ def apply(tp: Type) = {
+ if (tp =:= NullTp) NullConst
+ else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
+ else Const.unique(tp, new TypeConst(tp))
+ }
+ def unapply(c: TypeConst): Some[Type] = Some(c.tp)
+ }
+
+ // corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
+ sealed class TypeConst(val tp: Type) extends Const {
+ assert(!(tp =:= NullTp))
+ /*private[this] val id: Int = */ Const.nextTypeId
+
+ val wideTp = widenToClass(tp)
+ def isValue = false
+ override def toString = tp.toString //+"#"+ id
+ }
+
+ // p is a unique type or a constant value
+ object ValueConst extends ValueConstExtractor {
+ def fromType(tp: Type) = {
+ assert(tp.isInstanceOf[SingletonType])
+ val toString = tp match {
+ case ConstantType(c) => c.escapedStringValue
+ case _ if tp.typeSymbol.isModuleClass => tp.typeSymbol.name.toString
+ case _ => tp.toString
+ }
+ Const.unique(tp, new ValueConst(tp, tp.widen, toString))
+ }
+ def apply(p: Tree) = {
+ val tp = p.tpe.normalize
+ if (tp =:= NullTp) NullConst
+ else {
+ val wideTp = widenToClass(tp)
+
+ val narrowTp =
+ if (tp.isInstanceOf[SingletonType]) tp
+ else p match {
+ case Literal(c) =>
+ if (c.tpe.typeSymbol == UnitClass) c.tpe
+ else ConstantType(c)
+ case Ident(_) if p.symbol.isStable =>
+ // for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
+ // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala)
+ singleType(tp.prefix, p.symbol)
+ case _ =>
+ Const.uniqueTpForTree(p)
+ }
+
+ val toString =
+ if (hasStableSymbol(p)) p.symbol.name.toString // tp.toString
+ else p.toString //+"#"+ id
+
+ Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst
+ }
+ }
+ }
+ sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
+ // debug.patmat("VC"+(tp, wideTp, toString))
+ assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
+ /*private[this] val id: Int = */Const.nextValueId
+ def isValue = true
+ }
+
+
+ lazy val NullTp = ConstantType(Constant(null))
+ case object NullConst extends Const {
+ def tp = NullTp
+ def wideTp = NullTp
+
+ def isValue = true
+ override def toString = "null"
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
new file mode 100644
index 0000000000..9558542533
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala
@@ -0,0 +1,709 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+
+trait TreeAndTypeAnalysis extends Debugging {
+ import global.{Tree, Type, Symbol, definitions, analyzer,
+ ConstantType, Literal, Constant, appliedType, WildcardType, TypeRef, ModuleClassSymbol,
+ nestedMemberType, TypeMap, Ident}
+
+ import definitions._
+ import analyzer.Typer
+
+
+ // we use subtyping as a model for implication between instanceof tests
+ // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
+ // unfortunately this is not true in general:
+ // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
+ def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
+ val tpValue = tp.typeSymbol.isPrimitiveValueClass
+
+ // pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
+ // (and the subtype is respectively a value type or not a value type)
+ // this allows us to reuse subtyping as a model for implication between instanceOf tests
+ // the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
+ val tpImpliedNormalizedToAny =
+ if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
+ else tpImplied
+
+ tp <:< tpImpliedNormalizedToAny
+ }
+
+ // TODO: improve, e.g., for constants
+ def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
+ case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
+ case _ => false
+ })
+
+ trait CheckableTreeAndTypeAnalysis {
+ val typer: Typer
+
+ // TODO: domain of other feasibly enumerable built-in types (char?)
+ def enumerateSubtypes(tp: Type): Option[List[Type]] =
+ tp.typeSymbol match {
+ // TODO case _ if tp.isTupleType => // recurse into component types?
+ case UnitClass =>
+ Some(List(UnitClass.tpe))
+ case BooleanClass =>
+ Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
+ // TODO case _ if tp.isTupleType => // recurse into component types
+ case modSym: ModuleClassSymbol =>
+ Some(List(tp))
+ // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
+ case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
+ debug.patmat("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
+ None
+ case sym =>
+ val subclasses = (
+ sym.sealedDescendants.toList sortBy (_.sealedSortName)
+ // symbols which are both sealed and abstract need not be covered themselves, because
+ // all of their children must be and they cannot otherwise be created.
+ filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
+ debug.patmat("enum sealed -- subclasses: "+ (sym, subclasses))
+
+ val tpApprox = typer.infer.approximateAbstracts(tp)
+ val pre = tpApprox.prefix
+ // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
+ val validSubTypes = (subclasses flatMap {sym =>
+ // have to filter out children which cannot match: see ticket #3683 for an example
+ // compare to the fully known type `tp` (modulo abstract types),
+ // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
+ // however, must approximate abstract types in
+
+ val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
+ val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
+ val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
+ // debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
+ if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
+ else None
+ })
+ debug.patmat("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
+ Some(validSubTypes)
+ }
+
+ // approximate a type to the static type that is fully checkable at run time,
+ // hiding statically known but dynamically uncheckable information using existential quantification
+ // TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time)
+ def checkableType(tp: Type): Type = {
+ // TODO: this is extremely rough...
+ // replace type args by wildcards, since they can't be checked (don't use existentials: overkill)
+ // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
+ // similar to typer.infer.approximateAbstracts
+ object typeArgsToWildcardsExceptArray extends TypeMap {
+ // SI-6771 dealias would be enough today, but future proofing with the dealiasWiden.
+ // See neg/t6771b.scala for elaboration
+ def apply(tp: Type): Type = tp.dealiasWiden match {
+ case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) =>
+ TypeRef(pre, sym, args map (_ => WildcardType))
+ case _ =>
+ mapOver(tp)
+ }
+ }
+
+ val res = typeArgsToWildcardsExceptArray(tp)
+ debug.patmat("checkable "+(tp, res))
+ res
+ }
+
+ // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
+ // we consider tuple types with at least one component of a checkable type as a checkable type
+ def uncheckableType(tp: Type): Boolean = {
+ def tupleComponents(tp: Type) = tp.normalize.typeArgs
+ val checkable = (
+ (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
+ || enumerateSubtypes(tp).nonEmpty)
+ // if (!checkable) debug.patmat("deemed uncheckable: "+ tp)
+ !checkable
+ }
+ }
+}
+
+trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchTreeMaking {
+ import global.{Tree, Type, NoType, Symbol, NoSymbol, ConstantType, Literal, Constant, Ident, UniqueType, RefinedType, EmptyScope}
+ import global.definitions.{ListClass, NilModule}
+
+ /**
+ * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
+ *
+ */
+ trait MatchApproximator extends TreeMakers with TreesAndTypesDomain {
+ object Test {
+ var currId = 0
+ }
+ case class Test(prop: Prop, treeMaker: TreeMaker) {
+ // private val reusedBy = new scala.collection.mutable.HashSet[Test]
+ var reuses: Option[Test] = None
+ def registerReuseBy(later: Test): Unit = {
+ assert(later.reuses.isEmpty, later.reuses)
+ // reusedBy += later
+ later.reuses = Some(this)
+ }
+
+ val id = { Test.currId += 1; Test.currId}
+ override def toString =
+ "T"+ id + "C("+ prop +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
+ }
+
+
+ class TreeMakersToPropsIgnoreNullChecks(root: Symbol) extends TreeMakersToProps(root) {
+ override def uniqueNonNullProp(p: Tree): Prop = True
+ }
+
+ // returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
+ class TreeMakersToProps(val root: Symbol) {
+ prepareNewAnalysis() // reset hash consing for Var and Const
+
+ private[this] val uniqueEqualityProps = new scala.collection.mutable.HashMap[(Tree, Tree), Eq]
+ private[this] val uniqueNonNullProps = new scala.collection.mutable.HashMap[Tree, Not]
+ private[this] val uniqueTypeProps = new scala.collection.mutable.HashMap[(Tree, Type), Eq]
+
+ def uniqueEqualityProp(testedPath: Tree, rhs: Tree): Prop =
+ uniqueEqualityProps getOrElseUpdate((testedPath, rhs), Eq(Var(testedPath), ValueConst(rhs)))
+
+ // overridden in TreeMakersToPropsIgnoreNullChecks
+ def uniqueNonNullProp (testedPath: Tree): Prop =
+ uniqueNonNullProps getOrElseUpdate(testedPath, Not(Eq(Var(testedPath), NullConst)))
+
+ def uniqueTypeProp(testedPath: Tree, pt: Type): Prop =
+ uniqueTypeProps getOrElseUpdate((testedPath, pt), Eq(Var(testedPath), TypeConst(checkableType(pt))))
+
+ // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
+ private val pointsToBound = mutable.HashSet(root)
+ private val trees = mutable.HashSet.empty[Tree]
+
+ // the substitution that renames variables to variables in pointsToBound
+ private var normalize: Substitution = EmptySubstitution
+ private var substitutionComputed = false
+
+ // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
+ // in the end, instead of having x1, x1.hd, x2, x2.hd, ... flying around,
+ // we want something like x1, x1.hd, x1.hd.tl, x1.hd.tl.hd, so that we can easily recognize when
+ // we're testing the same variable
+ // TODO check:
+ // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
+ private var accumSubst: Substitution = EmptySubstitution
+
+ // hashconsing trees (modulo value-equality)
+ def unique(t: Tree, tpOverride: Type = NoType): Tree =
+ trees find (a => a.correspondsStructure(t)(sameValue)) match {
+ case Some(orig) =>
+ // debug.patmat("unique: "+ (t eq orig, orig))
+ orig
+ case _ =>
+ trees += t
+ if (tpOverride != NoType) t setType tpOverride
+ else t
+ }
+
+ def uniqueTp(tp: Type): Type = tp match {
+ // typerefs etc are already hashconsed
+ case _ : UniqueType => tp
+ case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
+ case _ => tp
+ }
+
+ // produce the unique tree used to refer to this binder
+ // the type of the binder passed to the first invocation
+ // determines the type of the tree that'll be returned for that binder as of then
+ final def binderToUniqueTree(b: Symbol) =
+ unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
+
+ // note that the sequencing of operations is important: must visit in same order as match execution
+ // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
+ abstract class TreeMakerToProp extends (TreeMaker => Prop) {
+ // requires(if (!substitutionComputed))
+ def updateSubstitution(subst: Substitution): Unit = {
+ // find part of substitution that replaces bound symbols by new symbols, and reverse that part
+ // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
+ val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
+ case (f, t) =>
+ t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
+ }
+ val (boundFrom, boundTo) = boundSubst.unzip
+ val (unboundFrom, unboundTo) = unboundSubst.unzip
+
+ // reverse substitution that would otherwise replace a variable we already encountered by a new variable
+ // NOTE: this forgets the more precise type we have for these later variables, but that's probably okay
+ normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
+ // debug.patmat ("normalize subst: "+ normalize)
+
+ val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
+ pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
+ // debug.patmat("pointsToBound: "+ pointsToBound)
+
+ accumSubst >>= okSubst
+ // debug.patmat("accumSubst: "+ accumSubst)
+ }
+
+ def handleUnknown(tm: TreeMaker): Prop
+
+ /** apply itself must render a faithful representation of the TreeMaker
+ *
+ * Concretely, True must only be used to represent a TreeMaker that is sure to match and that does not do any computation at all
+ * e.g., doCSE relies on apply itself being sound in this sense (since it drops TreeMakers that are approximated to True -- SI-6077)
+ *
+ * handleUnknown may be customized by the caller to approximate further
+ *
+ * TODO: don't ignore outer-checks
+ */
+ def apply(tm: TreeMaker): Prop = {
+ if (!substitutionComputed) updateSubstitution(tm.subPatternsAsSubstitution)
+
+ tm match {
+ case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
+ object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
+ type Result = Prop
+ def and(a: Result, b: Result) = And(a, b)
+ def outerTest(testedBinder: Symbol, expectedTp: Type) = True // TODO OuterEqProp(testedBinder, expectedType)
+ def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
+ val p = binderToUniqueTree(b); And(uniqueNonNullProp(p), uniqueTypeProp(p, uniqueTp(pt)))
+ }
+ def nonNullTest(testedBinder: Symbol) = uniqueNonNullProp(binderToUniqueTree(testedBinder))
+ def equalsTest(pat: Tree, testedBinder: Symbol) = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat))
+ def eqTest(pat: Tree, testedBinder: Symbol) = uniqueEqualityProp(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
+ def tru = True
+ }
+ ttm.renderCondition(condStrategy)
+ case EqualityTestTreeMaker(prevBinder, patTree, _) => uniqueEqualityProp(binderToUniqueTree(prevBinder), unique(patTree))
+ case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map this)))
+ case ProductExtractorTreeMaker(testedBinder, None) => uniqueNonNullProp(binderToUniqueTree(testedBinder))
+ case SubstOnlyTreeMaker(_, _) => True
+ case GuardTreeMaker(guard) =>
+ guard.tpe match {
+ case ConstantType(Constant(true)) => True
+ case ConstantType(Constant(false)) => False
+ case _ => handleUnknown(tm)
+ }
+ case ExtractorTreeMaker(_, _, _) |
+ ProductExtractorTreeMaker(_, _) |
+ BodyTreeMaker(_, _) => handleUnknown(tm)
+ }
+ }
+ }
+
+
+ private val irrefutableExtractor: PartialFunction[TreeMaker, Prop] = {
+ // the extra condition is None, the extractor's result indicates it always succeeds,
+ // (the potential type-test for the argument is represented by a separate TypeTestTreeMaker)
+ case IrrefutableExtractorTreeMaker(_, _) => True
+ }
+
+ // special-case: interpret pattern `List()` as `Nil`
+ // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil -- not sure this is a good idea...
+ private val rewriteListPattern: PartialFunction[TreeMaker, Prop] = {
+ case p @ ExtractorTreeMaker(_, _, testedBinder)
+ if testedBinder.tpe.typeSymbol == ListClass && p.checkedLength == Some(0) =>
+ uniqueEqualityProp(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
+ }
+ val fullRewrite = (irrefutableExtractor orElse rewriteListPattern)
+ val refutableRewrite = irrefutableExtractor
+
+ @inline def onUnknown(handler: TreeMaker => Prop) = new TreeMakerToProp {
+ def handleUnknown(tm: TreeMaker) = handler(tm)
+ }
+
+ // used for CSE -- rewrite all unknowns to False (the most conserative option)
+ object conservative extends TreeMakerToProp {
+ def handleUnknown(tm: TreeMaker) = False
+ }
+
+ final def approximateMatch(cases: List[List[TreeMaker]], treeMakerToProp: TreeMakerToProp = conservative) ={
+ val testss = cases.map { _ map (tm => Test(treeMakerToProp(tm), tm)) }
+ substitutionComputed = true // a second call to approximateMatch should not re-compute the substitution (would be wrong)
+ testss
+ }
+ }
+
+ def approximateMatchConservative(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] =
+ (new TreeMakersToProps(root)).approximateMatch(cases)
+
+ // turns a case (represented as a list of abstract tests)
+ // into a proposition that is satisfiable if the case may match
+ protected final def caseWithoutBodyToProp(tests: List[Test]): Prop =
+ /\(tests.takeWhile(t => !t.treeMaker.isInstanceOf[BodyTreeMaker]).map(t => t.prop))
+
+ def showTreeMakers(cases: List[List[TreeMaker]]) = {
+ debug.patmat("treeMakers:")
+ debug.patmat(alignAcrossRows(cases, ">>"))
+ }
+
+ def showTests(testss: List[List[Test]]) = {
+ debug.patmat("tests: ")
+ debug.patmat(alignAcrossRows(testss, "&"))
+ }
+ }
+
+}
+
+trait MatchAnalysis extends MatchApproximation {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, NoSymbol, Ident, Select}
+ import global.definitions.{isPrimitiveValueClass, ConsClass, isTupleSymbol}
+
+ trait MatchAnalyzer extends MatchApproximator {
+ def uncheckedWarning(pos: Position, msg: String) = global.currentUnit.uncheckedWarning(pos, msg)
+ def warn(pos: Position, ex: AnalysisBudget.Exception, kind: String) = uncheckedWarning(pos, s"Cannot check match for $kind.\n${ex.advice}")
+
+ // TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil
+ // right now hackily implement this by pruning counter-examples
+ // unreachability would also benefit from a more faithful representation
+
+
+ // reachability (dead code)
+
+ // computes the first 0-based case index that is unreachable (if any)
+ // a case is unreachable if it implies its preceding cases
+ // call C the formula that is satisfiable if the considered case matches
+ // call P the formula that is satisfiable if the cases preceding it match
+ // the case is reachable if there is a model for -P /\ C,
+ // thus, the case is unreachable if there is no model for -(-P /\ C),
+ // or, equivalently, P \/ -C, or C => P
+ def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaReach) else null
+
+ // use the same approximator so we share variables,
+ // but need different conditions depending on whether we're conservatively looking for failure or success
+ // don't rewrite List-like patterns, as List() and Nil need to distinguished for unreachability
+ val approx = new TreeMakersToProps(prevBinder)
+ def approximate(default: Prop) = approx.approximateMatch(cases, approx.onUnknown { tm =>
+ approx.refutableRewrite.applyOrElse(tm, (_: TreeMaker) => default )
+ })
+
+ val propsCasesOk = approximate(True) map caseWithoutBodyToProp
+ val propsCasesFail = approximate(False) map (t => Not(caseWithoutBodyToProp(t)))
+
+ try {
+ val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
+ val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
+ val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
+
+ val prefix = formulaBuilder
+ addFormula(prefix, eqAxioms)
+
+ var prefixRest = symbolicCasesFail
+ var current = symbolicCasesOk
+ var reachable = true
+ var caseIndex = 0
+
+ debug.patmat("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
+ debug.patmat("equality axioms:\n"+ cnfString(eqAxiomsOk))
+
+ // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
+ // termination: prefixRest.length decreases by 1
+ while (prefixRest.nonEmpty && reachable) {
+ val prefHead = prefixRest.head
+ caseIndex += 1
+ prefixRest = prefixRest.tail
+ if (prefixRest.isEmpty) reachable = true
+ else {
+ addFormula(prefix, prefHead)
+ current = current.tail
+ val model = findModelFor(andFormula(current.head, toFormula(prefix)))
+
+ // debug.patmat("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
+ // if (NoModel ne model) debug.patmat("reached: "+ modelString(model))
+
+ reachable = NoModel ne model
+ }
+ }
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start)
+
+ if (reachable) None else Some(caseIndex)
+ } catch {
+ case ex: AnalysisBudget.Exception =>
+ warn(prevBinder.pos, ex, "unreachability")
+ None // CNF budget exceeded
+ }
+ }
+
+ // exhaustivity
+
+ def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[String] = if (uncheckableType(prevBinder.info)) Nil else {
+ // customize TreeMakersToProps (which turns a tree of tree makers into a more abstract DAG of tests)
+ // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`,
+ // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive
+ // - back off (to avoid crying exhaustive too often) when:
+ // - there are guards -->
+ // - there are extractor calls (that we can't secretly/soundly) rewrite
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaExhaust) else null
+ var backoff = false
+
+ val approx = new TreeMakersToPropsIgnoreNullChecks(prevBinder)
+ val symbolicCases = approx.approximateMatch(cases, approx.onUnknown { tm =>
+ approx.fullRewrite.applyOrElse[TreeMaker, Prop](tm, {
+ case BodyTreeMaker(_, _) => True // irrelevant -- will be discarded by symbolCase later
+ case _ => // debug.patmat("backing off due to "+ tm)
+ backoff = true
+ False
+ })
+ }) map caseWithoutBodyToProp
+
+ if (backoff) Nil else {
+ val prevBinderTree = approx.binderToUniqueTree(prevBinder)
+
+ // TODO: null tests generate too much noise, so disabled them -- is there any way to bring them back?
+ // assuming we're matching on a non-null scrutinee (prevBinder), when does the match fail?
+ // val nonNullScrutineeCond =
+ // assume non-null for all the components of the tuple we're matching on (if we're matching on a tuple)
+ // if (isTupleType(prevBinder.tpe))
+ // prevBinder.tpe.typeArgs.mapWithIndex{case (_, i) => NonNullProp(codegen.tupleSel(prevBinderTree)(i))}.reduceLeft(And)
+ // else
+ // NonNullProp(prevBinderTree)
+ // val matchFails = And(symbolic(nonNullScrutineeCond), Not(symbolicCases reduceLeft (Or(_, _))))
+
+ // when does the match fail?
+ val matchFails = Not(\/(symbolicCases))
+
+ // debug output:
+ debug.patmat("analysing:")
+ showTreeMakers(cases)
+
+ // debug.patmat("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
+ // debug.patmat("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
+
+ try {
+ // find the models (under which the match fails)
+ val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
+
+ val scrutVar = Var(prevBinderTree)
+ val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
+
+ val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
+ pruned
+ } catch {
+ case ex : AnalysisBudget.Exception =>
+ warn(prevBinder.pos, ex, "exhaustivity")
+ Nil // CNF budget exceeded
+ }
+ }
+ }
+
+ object CounterExample {
+ def prune(examples: List[CounterExample]): List[CounterExample] = {
+ val distinct = examples.filterNot(_ == NoExample).toSet
+ distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
+ }
+ }
+
+ // a way to construct a value that will make the match fail: a constructor invocation, a constant, an object of some type)
+ class CounterExample {
+ protected[MatchAnalyzer] def flattenConsArgs: List[CounterExample] = Nil
+ def coveredBy(other: CounterExample): Boolean = this == other || other == WildcardExample
+ }
+ case class ValueExample(c: ValueConst) extends CounterExample { override def toString = c.toString }
+ case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
+ case class NegativeExample(eqTo: Const, nonTrivialNonEqualTo: List[Const]) extends CounterExample {
+ // require(nonTrivialNonEqualTo.nonEmpty, nonTrivialNonEqualTo)
+ override def toString = {
+ val negation =
+ if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toString
+ else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("(", ", ", ")")
+ "(x: "+ eqTo +" forSome x not in "+ negation +")"
+ }
+ }
+ case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ protected[MatchAnalyzer] override def flattenConsArgs: List[CounterExample] = ctorArgs match {
+ case hd :: tl :: Nil => hd :: tl.flattenConsArgs
+ case _ => Nil
+ }
+ protected[MatchAnalyzer] lazy val elems = flattenConsArgs
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case other@ListExample(_) =>
+ this == other || ((elems.length == other.elems.length) && (elems zip other.elems).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+
+ override def toString = elems.mkString("List(", ", ", ")")
+ }
+ case class TupleExample(ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = ctorArgs.mkString("(", ", ", ")")
+
+ override def coveredBy(other: CounterExample): Boolean =
+ other match {
+ case TupleExample(otherArgs) =>
+ this == other || ((ctorArgs.length == otherArgs.length) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b})
+ case _ => super.coveredBy(other)
+ }
+ }
+ case class ConstructorExample(cls: Symbol, ctorArgs: List[CounterExample]) extends CounterExample {
+ override def toString = cls.decodedName + (if (cls.isModuleClass) "" else ctorArgs.mkString("(", ", ", ")"))
+ }
+
+ case object WildcardExample extends CounterExample { override def toString = "_" }
+ case object NoExample extends CounterExample { override def toString = "??" }
+
+ def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
+ model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
+ val (trues, falses) = xs.partition(_._2)
+ (trues map (_._1.const), falses map (_._1.const))
+ // should never be more than one value in trues...
+ }
+
+ def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) =
+ varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
+ val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
+ v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
+ }.mkString("\n")
+
+ // return constructor call when the model is a true counter example
+ // (the variables don't take into account type information derived from other variables,
+ // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
+ // since we didn't realize the tail of the outer cons was a Nil)
+ def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
+ // x1 = ...
+ // x1.hd = ...
+ // x1.tl = ...
+ // x1.hd.hd = ...
+ // ...
+ val varAssignment = modelToVarAssignment(model)
+
+ debug.patmat("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
+
+ // chop a path into a list of symbols
+ def chop(path: Tree): List[Symbol] = path match {
+ case Ident(_) => List(path.symbol)
+ case Select(pre, name) => chop(pre) :+ path.symbol
+ case _ =>
+ // debug.patmat("don't know how to chop "+ path)
+ Nil
+ }
+
+ // turn the variable assignments into a tree
+ // the root is the scrutinee (x1), edges are labelled by the fields that are assigned
+ // a node is a variable example (which is later turned into a counter example)
+ object VariableAssignment {
+ private def findVar(path: List[Symbol]) = path match {
+ case List(root) if root == scrutVar.path.symbol => Some(scrutVar)
+ case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
+ }
+
+ private val uniques = new mutable.HashMap[Var, VariableAssignment]
+ private def unique(variable: Var): VariableAssignment =
+ uniques.getOrElseUpdate(variable, {
+ val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
+ VariableAssignment(variable, eqTo.toList, neqTo.toList, mutable.HashMap.empty)
+ })
+
+ def apply(variable: Var): VariableAssignment = {
+ val path = chop(variable.path)
+ val pre = path.init
+ val field = path.last
+
+ val newCtor = unique(variable)
+
+ if (pre.isEmpty) newCtor
+ else {
+ findVar(pre) foreach { preVar =>
+ val outerCtor = this(preVar)
+ outerCtor.fields(field) = newCtor
+ }
+ newCtor
+ }
+ }
+ }
+
+ // node in the tree that describes how to construct a counter-example
+ case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
+ // need to prune since the model now incorporates all super types of a constant (needed for reachability)
+ private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
+ private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
+ private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
+ private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
+ private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
+ private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
+
+
+ def allFieldAssignmentsLegal: Boolean =
+ (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
+
+ private lazy val nonTrivialNonEqualTo = notEqualTo.filterNot{c => c.isAny }
+
+ // NoExample if the constructor call is ill-typed
+ // (thus statically impossible -- can we incorporate this into the formula?)
+ // beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
+ def toCounterExample(beBrief: Boolean = false): CounterExample =
+ if (!allFieldAssignmentsLegal) NoExample
+ else {
+ debug.patmat("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
+ val res = prunedEqualTo match {
+ // a definite assignment to a value
+ case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
+
+ // constructor call
+ // or we did not gather any information about equality but we have information about the fields
+ // --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
+ case _ if cls != NoSymbol && !isPrimitiveValueClass(cls) &&
+ ( uniqueEqualTo.nonEmpty
+ || (fields.nonEmpty && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
+
+ def args(brevity: Boolean = beBrief) = {
+ // figure out the constructor arguments from the field assignment
+ val argLen = (caseFieldAccs.length min ctorParams.length)
+
+ (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
+ }
+
+ cls match {
+ case ConsClass => ListExample(args())
+ case _ if isTupleSymbol(cls) => TupleExample(args(true))
+ case _ => ConstructorExample(cls, args())
+ }
+
+ // a definite assignment to a type
+ case List(eq) if fields.isEmpty => TypeExample(eq)
+
+ // negative information
+ case Nil if nonTrivialNonEqualTo.nonEmpty =>
+ // negation tends to get pretty verbose
+ if (beBrief) WildcardExample
+ else {
+ val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
+ NegativeExample(eqTo, nonTrivialNonEqualTo)
+ }
+
+ // not a valid counter-example, possibly since we have a definite type but there was a field mismatch
+ // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
+ case _ => NoExample
+ }
+ debug.patmat("described as: "+ res)
+ res
+ }
+
+ override def toString = toCounterExample().toString
+ }
+
+ // slurp in information from other variables
+ varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) }
+
+ // this is the variable we want a counter example for
+ VariableAssignment(scrutVar).toCounterExample()
+ }
+
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {
+ if (!suppression.unreachable) {
+ unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
+ reportUnreachable(cases(caseIndex).last.pos)
+ }
+ }
+ if (!suppression.exhaustive) {
+ val counterExamples = exhaustive(prevBinder, cases, pt)
+ if (counterExamples.nonEmpty)
+ reportMissingCases(prevBinder.pos, counterExamples)
+ }
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
new file mode 100644
index 0000000000..57fab4eafa
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala
@@ -0,0 +1,258 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.SYNTHETIC
+import scala.language.postfixOps
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Factory methods used by TreeMakers to make the actual trees.
+ *
+ * We have two modes in which to emit trees: optimized (the default)
+ * and pure (aka "virtualized": match is parametric in its monad).
+ */
+trait MatchCodeGen extends Interface {
+ import PatternMatchingStats._
+ import global.{nme, treeInfo, definitions, gen, Tree, Type, Symbol, NoSymbol,
+ appliedType, NoType, MethodType, newTermName, Name,
+ Block, Literal, Constant, EmptyTree, Function, Typed, ValDef, LabelDef}
+ import definitions._
+
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ // generate actual trees
+ ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait CodegenCore extends MatchMonadInterface {
+ private var ctr = 0
+ def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
+
+ // assert(owner ne null); assert(owner ne NoSymbol)
+ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
+ NoSymbol.newTermSymbol(freshName(prefix), pos, newFlags = SYNTHETIC) setInfo tp
+
+ def newSynthCaseLabel(name: String) =
+ NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS
+
+ // codegen relevant to the structure of the translation (how extractors are combined)
+ trait AbsCodegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
+
+ // local / context-free
+ def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def _equals(checker: Tree, binder: Symbol): Tree
+ def _isInstanceOf(b: Symbol, tp: Type): Tree
+ def drop(tgt: Tree)(n: Int): Tree
+ def index(tgt: Tree)(i: Int): Tree
+ def mkZero(tp: Type): Tree
+ def tupleSel(binder: Symbol)(i: Int): Tree
+ }
+
+ // structure
+ trait Casegen extends AbsCodegen { import CODE._
+ def one(res: Tree): Tree
+
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
+ def ifThenElseZero(c: Tree, thenp: Tree): Tree = IF (c) THEN thenp ELSE zero
+ protected def zero: Tree
+ }
+
+ def codegen: AbsCodegen
+
+ abstract class CommonCodegen extends AbsCodegen { import CODE._
+ def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
+ def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
+ def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
+ def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
+ def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
+
+ // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = if (b.info <:< tp) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
+
+ // duplicated out of frustration with cast generation
+ def mkZero(tp: Type): Tree = {
+ tp.typeSymbol match {
+ case UnitClass => Literal(Constant())
+ case BooleanClass => Literal(Constant(false))
+ case FloatClass => Literal(Constant(0.0f))
+ case DoubleClass => Literal(Constant(0.0d))
+ case ByteClass => Literal(Constant(0.toByte))
+ case ShortClass => Literal(Constant(0.toShort))
+ case IntClass => Literal(Constant(0))
+ case LongClass => Literal(Constant(0L))
+ case CharClass => Literal(Constant(0.toChar))
+ case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
+ }
+ }
+ }
+ }
+
+ trait PureMatchMonadInterface extends MatchMonadInterface {
+ val matchStrategy: Tree
+
+ def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
+ def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
+ protected def matchMonadSym = oneSig.finalResultType.typeSymbol
+
+ import CODE._
+ def _match(n: Name): SelectStart = matchStrategy DOT n
+
+ private lazy val oneSig: Type = typer.typedOperator(_match(vpmName.one)).tpe // TODO: error message
+ }
+
+ trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
+ def codegen: AbsCodegen = pureCodegen
+
+ object pureCodegen extends CommonCodegen with Casegen { import CODE._
+ //// methods in MatchingStrategy (the monad companion) -- used directly in translation
+ // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
+ // TODO: consider catchAll, or virtualized matching will break in exception handlers
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
+
+ // __match.one(`res`)
+ def one(res: Tree): Tree = (_match(vpmName.one)) (res)
+ // __match.zero
+ protected def zero: Tree = _match(vpmName.zero)
+ // __match.guard(`c`, `then`)
+ def guard(c: Tree, thenp: Tree): Tree = _match(vpmName.guard) APPLY (c, thenp)
+
+ //// methods in the monad instance -- used directly in translation
+ // `prev`.flatMap(`b` => `next`)
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
+ // `thisCase`.orElse(`elseCase`)
+ def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
+ // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
+ // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
+ }
+ }
+
+ trait OptimizedMatchMonadInterface extends MatchMonadInterface {
+ override def inMatchMonad(tp: Type): Type = optionType(tp)
+ override def pureType(tp: Type): Type = tp
+ override protected def matchMonadSym = OptionClass
+ }
+
+ trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
+ override def codegen: AbsCodegen = optimizedCodegen
+
+ // when we know we're targetting Option, do some inlining the optimizer won't do
+ // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
+ // this is a special instance of the advanced inlining optimization that takes a method call on
+ // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
+ object optimizedCodegen extends CommonCodegen { import CODE._
+
+ /** Inline runOrElse and get rid of Option allocations
+ *
+ * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
+ * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
+ * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
+ */
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
+ val matchEnd = newSynthCaseLabel("matchEnd")
+ val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
+ matchEnd setInfo MethodType(List(matchRes), restpe)
+
+ def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
+ var _currCase = newCaseSym
+
+ val caseDefs = cases map { (mkCase: Casegen => Tree) =>
+ val currCase = _currCase
+ val nextCase = newCaseSym
+ _currCase = nextCase
+
+ LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
+ }
+
+ // must compute catchAll after caseLabels (side-effects nextCase)
+ // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
+ // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+ val catchAllDef = matchFailGen map { matchFailGen =>
+ val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+
+ LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
+ } toList // at most 1 element
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+
+ // the generated block is taken apart in TailCalls under the following assumptions
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ Block(
+ scrutDef ++ caseDefs ++ catchAllDef,
+ LabelDef(matchEnd, List(matchRes), REF(matchRes))
+ )
+ }
+
+ class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol) extends CommonCodegen with Casegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
+
+ // only used to wrap the RHS of a body
+ // res: T
+ // returns MatchMonad[T]
+ def one(res: Tree): Tree = matchEnd APPLY (res) // a jump to a case label is special-cased in typedApply
+ protected def zero: Tree = nextCase APPLY ()
+
+ // prev: MatchMonad[T]
+ // b: T
+ // next: MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
+ val tp = inMatchMonad(b.tpe)
+ val prevSym = freshSym(prev.pos, tp, "o")
+ val isEmpty = tp member vpmName.isEmpty
+ val get = tp member vpmName.get
+
+ BLOCK(
+ VAL(prevSym) === prev,
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ )
+ }
+
+ // cond: Boolean
+ // res: T
+ // nextBinder: T
+ // next == MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
+ val rest =
+ // only emit a local val for `nextBinder` if it's actually referenced in `next`
+ if (next.exists(_.symbol eq nextBinder))
+ BLOCK(
+ VAL(nextBinder) === res,
+ next
+ )
+ else next
+ ifThenElseZero(cond, rest)
+ }
+
+ // guardTree: Boolean
+ // next: MatchMonad[T]
+ // returns MatchMonad[T]
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ ifThenElseZero(guardTree, next)
+
+ def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ condSym === mkTRUE,
+ nextBinder === res,
+ next
+ ))
+ }
+
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
new file mode 100644
index 0000000000..c570dd8572
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala
@@ -0,0 +1,615 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.MUTABLE
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Optimize and analyze matches based on their TreeMaker-representation.
+ *
+ * The patmat translation doesn't rely on this, so it could be disabled in principle.
+ * - well, not quite: the backend crashes if we emit duplicates in switches (e.g. SI-7290)
+ */
+// TODO: split out match analysis
+trait MatchOptimization extends MatchTreeMaking with MatchAnalysis {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, NoSymbol, CaseDef, atPos,
+ ConstantType, Literal, Constant, gen, EmptyTree, distinctBy,
+ Typed, treeInfo, nme, Ident,
+ Apply, If, Bind, lub, Alternative, deriveCaseDef, Match, MethodType, LabelDef, TypeTree, Throw}
+
+ import global.definitions._
+
+
+ ////
+ trait CommonSubconditionElimination extends OptimizedCodegen with MatchApproximator {
+ /** a flow-sensitive, generalised, common sub-expression elimination
+ * reuse knowledge from performed tests
+ * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
+ * when a sub-expression is shared, it is stored in a mutable variable
+ * the variable is floated up so that its scope includes all of the program that shares it
+ * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
+ */
+ def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+ debug.patmat("before CSE:")
+ showTreeMakers(cases)
+
+ val testss = approximateMatchConservative(prevBinder, cases)
+
+ // interpret:
+ val dependencies = new mutable.LinkedHashMap[Test, Set[Prop]]
+ val tested = new mutable.HashSet[Prop]
+
+ // TODO: use SAT solver instead of hashconsing props and approximating implication by subset/equality
+ def storeDependencies(test: Test) = {
+ val cond = test.prop
+
+ def simplify(c: Prop): Set[Prop] = c match {
+ case And(a, b) => simplify(a) ++ simplify(b)
+ case Or(_, _) => Set(False) // TODO: make more precise
+ case Not(Eq(Var(_), NullConst)) => Set(True) // not worth remembering
+ case _ => Set(c)
+ }
+ val conds = simplify(cond)
+
+ if (conds(False)) false // stop when we encounter a definite "no" or a "not sure"
+ else {
+ val nonTrivial = conds filterNot (_ == True)
+ if (nonTrivial nonEmpty) {
+ tested ++= nonTrivial
+
+ // is there an earlier test that checks our condition and whose dependencies are implied by ours?
+ dependencies find {
+ case (priorTest, deps) =>
+ ((simplify(priorTest.prop) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly
+ (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions
+ ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
+ } foreach {
+ case (priorTest, _) =>
+ // if so, note the dependency in both tests
+ priorTest registerReuseBy test
+ }
+
+ dependencies(test) = tested.toSet // copies
+ }
+ true
+ }
+ }
+
+
+ testss foreach { tests =>
+ tested.clear()
+ tests dropWhile storeDependencies
+ }
+ debug.patmat("dependencies: "+ dependencies)
+
+ // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
+ // then, collapse these contiguous sequences of reusing tests
+ // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
+ // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
+ val reused = new mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
+ var okToCall = false
+ val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
+
+ // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker
+ // once this has been computed, we'll know which tree makers are reused,
+ // and we'll replace those by the ReusedCondTreeMakers we've constructed (and stored in the reused map)
+ val collapsed = testss map { tests =>
+ // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker
+ // if there's no sharing, simply map to the tree makers corresponding to the tests
+ var currDeps = Set[Prop]()
+ val (sharedPrefix, suffix) = tests span { test =>
+ (test.prop == True) || (for(
+ reusedTest <- test.reuses;
+ nextDeps <- dependencies.get(reusedTest);
+ diff <- (nextDeps -- currDeps).headOption;
+ _ <- Some(currDeps = nextDeps))
+ yield diff).nonEmpty
+ }
+
+ val collapsedTreeMakers =
+ if (sharedPrefix.isEmpty) None
+ else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
+ for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
+ case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
+ case _ =>
+ }
+
+ debug.patmat("sharedPrefix: "+ sharedPrefix)
+ debug.patmat("suffix: "+ sharedPrefix)
+ // if the shared prefix contains interesting conditions (!= True)
+ // and the last of such interesting shared conditions reuses another treemaker's test
+ // replace the whole sharedPrefix by a ReusingCondTreeMaker
+ for (lastShared <- sharedPrefix.reverse.dropWhile(_.prop == True).headOption;
+ lastReused <- lastShared.reuses)
+ yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
+ }
+
+ collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains True-tests, which are dropped above)
+ }
+ okToCall = true // TODO: remove (debugging)
+
+ // replace original treemakers that are reused (as determined when computing collapsed),
+ // by ReusedCondTreeMakers
+ val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
+ debug.patmat("after CSE:")
+ showTreeMakers(reusedMakers)
+ reusedMakers
+ }
+
+ object ReusedCondTreeMaker {
+ def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
+ }
+ class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+ lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
+ lazy val treesToHoist: List[Tree] = {
+ nextBinder setFlag MUTABLE
+ List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
+ }
+
+ // TODO: finer-grained duplication
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
+ atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
+
+ override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
+ }
+
+ case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
+ val pos = sharedPrefix.last.treeMaker.pos
+
+ lazy val localSubstitution = {
+ // replace binder of each dropped treemaker by corresponding binder bound by the most recent reused treemaker
+ var mostRecentReusedMaker: ReusedCondTreeMaker = null
+ def mapToStored(droppedBinder: Symbol) = if (mostRecentReusedMaker eq null) Nil else List((droppedBinder, REF(mostRecentReusedMaker.nextBinder)))
+ val (from, to) = sharedPrefix.flatMap { dropped =>
+ dropped.reuses.map(test => toReused(test.treeMaker)).foreach {
+ case reusedMaker: ReusedCondTreeMaker =>
+ mostRecentReusedMaker = reusedMaker
+ case _ =>
+ }
+
+ // TODO: have super-trait for retrieving the variable that's operated on by a tree maker
+ // and thus assumed in scope, either because it binds it or because it refers to it
+ dropped.treeMaker match {
+ case dropped: FunTreeMaker =>
+ mapToStored(dropped.nextBinder)
+ case _ => Nil
+ }
+ }.unzip
+ val rerouteToReusedBinders = Substitution(from, to)
+
+ val collapsedDroppedSubst = sharedPrefix map (t => (toReused(t.treeMaker).substitution))
+
+ collapsedDroppedSubst.foldLeft(rerouteToReusedBinders)(_ >> _)
+ }
+
+ lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => tm.reuses map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift,
+ // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
+ }
+ override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
+ }
+ }
+
+
+ //// DCE
+// trait DeadCodeElimination extends TreeMakers {
+// // TODO: non-trivial dead-code elimination
+// // e.g., the following match should compile to a simple instanceof:
+// // case class Ident(name: String)
+// // for (Ident(name) <- ts) println(name)
+// def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
+// // do minimal DCE
+// cases
+// }
+// }
+
+ //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
+ trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface {
+ import treeInfo.isGuardedCase
+
+ abstract class SwitchMaker {
+ abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
+ val SwitchableTreeMaker: SwitchableTreeMakerExtractor
+
+ def alternativesSupported: Boolean
+
+ // when collapsing guarded switch cases we may sometimes need to jump to the default case
+ // however, that's not supported in exception handlers, so when we can't jump when we need it, don't emit a switch
+ // TODO: make more fine-grained, as we don't always need to jump
+ def canJump: Boolean
+
+ /** Should exhaustivity analysis be skipped? */
+ def unchecked: Boolean
+
+
+ def isDefault(x: CaseDef): Boolean
+ def defaultSym: Symbol
+ def defaultBody: Tree
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
+
+ private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
+ if (xs exists (_.isEmpty)) None else Some(xs.flatten)
+
+ object GuardAndBodyTreeMakers {
+ def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
+ tms match {
+ case (btm@BodyTreeMaker(body, _)) :: Nil => Some((EmptyTree, btm.substitution(body)))
+ case (gtm@GuardTreeMaker(guard)) :: (btm@BodyTreeMaker(body, _)) :: Nil => Some((gtm.substitution(guard), btm.substitution(body)))
+ case _ => None
+ }
+ }
+ }
+
+ private val defaultLabel: Symbol = newSynthCaseLabel("default")
+
+ /** Collapse guarded cases that switch on the same constant (the last case may be unguarded).
+ *
+ * Cases with patterns A and B switch on the same constant iff for all values x that match A also match B and vice versa.
+ * (This roughly corresponds to equality on trees modulo alpha renaming and reordering of alternatives.)
+ *
+ * The rewrite only applies if some of the cases are guarded (this must be checked before invoking this method).
+ *
+ * The rewrite goes through the switch top-down and merges each case with the subsequent cases it is implied by
+ * (i.e. it matches if they match, not taking guards into account)
+ *
+ * If there are no unreachable cases, all cases can be uniquely assigned to a partition of such 'overlapping' cases,
+ * save for the default case (thus we jump to it rather than copying it several times).
+ * (The cases in a partition are implied by the principal element of the partition.)
+ *
+ * The overlapping cases are merged into one case with their guards pushed into the body as follows
+ * (with P the principal element of the overlapping patterns Pi):
+ *
+ * `{case Pi if(G_i) => B_i }*` is rewritten to `case P => {if(G_i) B_i}*`
+ *
+ * The rewrite fails (and returns Nil) when:
+ * (1) there is a subsequence of overlapping cases that has an unguarded case in the middle;
+ * only the last case of each subsequence of overlapping cases may be unguarded (this is implied by unreachability)
+ *
+ * (2) there are overlapping cases that differ (tested by `caseImpliedBy`)
+ * cases with patterns A and B are overlapping if for SOME value x, A matches x implies B matches y OR vice versa <-- note the difference with case equality defined above
+ * for example `case 'a' | 'b' =>` and `case 'b' =>` are different and overlapping (overlapping and equality disregard guards)
+ *
+ * The second component of the returned tuple indicates whether we'll need to emit a labeldef to jump to the default case.
+ */
+ private def collapseGuardedCases(cases: List[CaseDef]): (List[CaseDef], Boolean) = {
+ // requires(same.forall(caseEquals(same.head)))
+ // requires(same.nonEmpty, same)
+ def collapse(same: List[CaseDef], isDefault: Boolean): CaseDef = {
+ val commonPattern = same.head.pat
+ // jump to default case (either the user-supplied one or the synthetic one)
+ // unless we're collapsing the default case: then we re-use the same body as the synthetic catchall (throwing a matcherror, rethrowing the exception)
+ val jumpToDefault: Tree =
+ if (isDefault || !canJump) defaultBody
+ else Apply(Ident(defaultLabel), Nil)
+
+ val guardedBody = same.foldRight(jumpToDefault){
+ // the last case may be un-guarded (we know it's the last one since fold's accum == jumpToDefault)
+ // --> replace jumpToDefault by the un-guarded case's body
+ case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b
+ case (cd@CaseDef(_, g, b), els) if isGuardedCase(cd) => If(g, b, els)
+ }
+
+ // if the cases that we're going to collapse bind variables,
+ // must replace them by the single binder introduced by the collapsed case
+ val binders = same.collect{case CaseDef(x@Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol}
+ val (pat, guardedBodySubst) =
+ if (binders.isEmpty) (commonPattern, guardedBody)
+ else {
+ // create a single fresh binder to subsume the old binders (and their types)
+ // TODO: I don't think the binder's types can actually be different (due to checks in caseEquals)
+ // if they do somehow manage to diverge, the lub might not be precise enough and we could get a type error
+ // TODO: reuse name exactly if there's only one binder in binders
+ val binder = freshSym(binders.head.pos, lub(binders.map(_.tpe)), binders.head.name.toString)
+
+ // the patterns in same are equal (according to caseEquals)
+ // we can thus safely pick the first one arbitrarily, provided we correct binding
+ val origPatWithoutBind = commonPattern match {
+ case Bind(b, orig) => orig
+ case o => o
+ }
+ // need to replace `defaultSym` as well -- it's used in `defaultBody` (see `jumpToDefault` above)
+ val unifiedBody = guardedBody.substituteSymbols(defaultSym :: binders, binder :: binders.map(_ => binder))
+ (Bind(binder, origPatWithoutBind), unifiedBody)
+ }
+
+ atPos(commonPattern.pos)(CaseDef(pat, EmptyTree, guardedBodySubst))
+ }
+
+ // requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless)
+ var remainingCases = cases
+ val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef]
+
+ // when some of collapsed cases (except for the default case itself) did not include an un-guarded case
+ // we'll need to emit a labeldef for the default case
+ var needDefault = false
+
+ while (remainingCases.nonEmpty) {
+ val currCase = remainingCases.head
+ val currIsDefault = isDefault(CaseDef(currCase.pat, EmptyTree, EmptyTree))
+ val (impliesCurr, others) =
+ // the default case is implied by all cases, no need to partition (and remainingCases better all be default cases as well)
+ if (currIsDefault) (remainingCases.tail, Nil)
+ else remainingCases.tail partition (caseImplies(currCase))
+
+ val unguardedComesLastOrAbsent =
+ (!isGuardedCase(currCase) && impliesCurr.isEmpty) || { val LastImpliesCurr = impliesCurr.length - 1
+ impliesCurr.indexWhere(oc => !isGuardedCase(oc)) match {
+ // if all cases are guarded we will have to jump to the default case in the final else
+ // (except if we're collapsing the default case itself)
+ case -1 =>
+ if (!currIsDefault) needDefault = true
+ true
+
+ // last case is not guarded, no need to jump to the default here
+ // note: must come after case -1 => (since LastImpliesCurr may be -1)
+ case LastImpliesCurr => true
+
+ case _ => false
+ }}
+
+ if (unguardedComesLastOrAbsent /*(1)*/ && impliesCurr.forall(caseEquals(currCase)) /*(2)*/) {
+ collapsed += (
+ if (impliesCurr.isEmpty && !isGuardedCase(currCase)) currCase
+ else collapse(currCase :: impliesCurr, currIsDefault)
+ )
+
+ remainingCases = others
+ } else { // fail
+ collapsed.clear()
+ remainingCases = Nil
+ }
+ }
+
+ (collapsed.toList, needDefault)
+ }
+
+ private def caseEquals(x: CaseDef)(y: CaseDef) = patternEquals(x.pat)(y.pat)
+ private def patternEquals(x: Tree)(y: Tree): Boolean = (x, y) match {
+ case (Alternative(xs), Alternative(ys)) =>
+ xs.forall(x => ys.exists(patternEquals(x))) &&
+ ys.forall(y => xs.exists(patternEquals(y)))
+ case (Alternative(pats), _) => pats.forall(p => patternEquals(p)(y))
+ case (_, Alternative(pats)) => pats.forall(q => patternEquals(x)(q))
+ // regular switch
+ case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
+ case (Ident(nme.WILDCARD), Ident(nme.WILDCARD)) => true
+ // type-switch for catch
+ case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)), Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => tpX.tpe =:= tpY.tpe
+ case _ => false
+ }
+
+ // if y matches then x matches for sure (thus, if x comes before y, y is unreachable)
+ private def caseImplies(x: CaseDef)(y: CaseDef) = patternImplies(x.pat)(y.pat)
+ private def patternImplies(x: Tree)(y: Tree): Boolean = (x, y) match {
+ // since alternatives are flattened, must treat them as separate cases
+ case (Alternative(pats), _) => pats.exists(p => patternImplies(p)(y))
+ case (_, Alternative(pats)) => pats.exists(q => patternImplies(x)(q))
+ // regular switch
+ case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
+ case (Ident(nme.WILDCARD), _) => true
+ // type-switch for catch
+ case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)),
+ Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => instanceOfTpImplies(tpY.tpe, tpX.tpe)
+ case _ => false
+ }
+
+ private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase)
+
+ // must do this before removing guards from cases and collapsing (SI-6011, SI-6048)
+ private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
+ var cases = cs
+ var unreachable: Option[CaseDef] = None
+
+ while (cases.nonEmpty && unreachable.isEmpty) {
+ val currCase = cases.head
+ if (isDefault(currCase) && cases.tail.nonEmpty) // subsumed by the `else if` that follows, but faster
+ unreachable = Some(cases.tail.head)
+ else if (!isGuardedCase(currCase) || currCase.guard.tpe =:= ConstantType(Constant(true)))
+ unreachable = cases.tail.find(caseImplies(currCase))
+ else if (currCase.guard.tpe =:= ConstantType(Constant(false)))
+ unreachable = Some(currCase)
+
+ cases = cases.tail
+ }
+
+ unreachable
+ }
+
+ // empty list ==> failure
+ def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] =
+ // generate if-then-else for 1 case switch (avoids verify error... can't imagine a one-case switch being faster than if-then-else anyway)
+ if (cases.isEmpty || cases.tail.isEmpty) Nil
+ else {
+ val caseDefs = cases map { case (scrutSym, makers) =>
+ makers match {
+ // default case
+ case GuardAndBodyTreeMakers(guard, body) =>
+ Some(defaultCase(scrutSym, guard, body))
+ // constant (or typetest for typeSwitch)
+ case SwitchableTreeMaker(pattern) :: GuardAndBodyTreeMakers(guard, body) =>
+ Some(CaseDef(pattern, guard, body))
+ // alternatives
+ case AlternativesTreeMaker(_, altss, pos) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported =>
+ val switchableAlts = altss map {
+ case SwitchableTreeMaker(pattern) :: Nil =>
+ Some(pattern)
+ case _ =>
+ None
+ }
+
+ // succeed if they were all switchable
+ sequence(switchableAlts) map { switchableAlts =>
+ def extractConst(t: Tree) = t match {
+ case Literal(const) => const
+ case _ => t
+ }
+ // SI-7290 Discard duplicate alternatives that would crash the backend
+ val distinctAlts = distinctBy(switchableAlts)(extractConst)
+ if (distinctAlts.size < switchableAlts.size) {
+ val duplicated = switchableAlts.groupBy(extractConst).flatMap(_._2.drop(1).take(1)) // report the first duplicated
+ global.currentUnit.warning(pos, s"Pattern contains duplicate alternatives: ${duplicated.mkString(", ")}")
+ }
+ CaseDef(Alternative(distinctAlts), guard, body)
+ }
+ case _ =>
+ // debug.patmat("can't emit switch for "+ makers)
+ None //failure (can't translate pattern to a switch)
+ }
+ }
+
+ val caseDefsWithGuards = sequence(caseDefs) match {
+ case None => return Nil
+ case Some(cds) => cds
+ }
+
+ // a switch with duplicate cases yields a verify error,
+ // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
+ // (even though the verify error would disappear, the behaviour would change)
+ val allReachable = unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
+
+ if (!allReachable) Nil
+ else if (noGuards(caseDefsWithGuards)) {
+ if (isDefault(caseDefsWithGuards.last)) caseDefsWithGuards
+ else caseDefsWithGuards :+ defaultCase()
+ } else {
+ // collapse identical cases with different guards, push guards into body for all guarded cases
+ // this translation is only sound if there are no unreachable (duplicate) cases
+ // it should only be run if there are guarded cases, and on failure it returns Nil
+ val (collapsed, needDefaultLabel) = collapseGuardedCases(caseDefsWithGuards)
+
+ if (collapsed.isEmpty || (needDefaultLabel && !canJump)) Nil
+ else {
+ def wrapInDefaultLabelDef(cd: CaseDef): CaseDef =
+ if (needDefaultLabel) deriveCaseDef(cd){ b =>
+ // TODO: can b.tpe ever be null? can't really use pt, see e.g. pos/t2683 or cps/match1.scala
+ defaultLabel setInfo MethodType(Nil, if (b.tpe != null) b.tpe else pt)
+ LabelDef(defaultLabel, Nil, b)
+ } else cd
+
+ val last = collapsed.last
+ if (isDefault(last)) {
+ if (!needDefaultLabel) collapsed
+ else collapsed.init :+ wrapInDefaultLabelDef(last)
+ } else collapsed :+ wrapInDefaultLabelDef(defaultCase())
+ }
+ }
+ }
+ }
+
+ class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
+ val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
+ val alternativesSupported = true
+ val canJump = true
+
+ // Constant folding sets the type of a constant tree to `ConstantType(Constant(folded))`
+ // The tree itself can be a literal, an ident, a selection, ...
+ object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat.tpe match {
+ case ConstantType(const) if const.isIntRange =>
+ Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
+ case _ => None
+ }}
+
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
+ case _ => None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ def defaultSym: Symbol = scrutSym
+ def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (DEFAULT IF guard) ==> body
+ }}
+ }
+
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked)
+ // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
+ if (regularSwitchMaker.switchableTpe(dealiasWiden(scrutSym.tpe))) {
+ val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
+ if (caseDefsWithDefault isEmpty) None // not worth emitting a switch.
+ else {
+ // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
+ val scrutToInt: Tree =
+ if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ else (REF(scrutSym) DOT (nme.toInt))
+ Some(BLOCK(
+ VAL(scrutSym) === scrut,
+ Match(scrutToInt, caseDefsWithDefault) // a switch
+ ))
+ }
+ } else None
+ }
+
+ // for the catch-cases in a try/catch
+ private object typeSwitchMaker extends SwitchMaker {
+ val unchecked = false
+ val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
+ val canJump = false
+
+ // TODO: there are more treemaker-sequences that can be handled by type tests
+ // analyze the result of approximateTreeMaker rather than the TreeMaker itself
+ object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
+ def unapply(x: TreeMaker): Option[Tree] = x match {
+ case tm@TypeTestTreeMaker(_, _, pt, _) if tm.isPureTypeTest => // -- TODO: use this if binder does not occur in the body
+ Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(pt)) /* not used by back-end */))
+ case _ =>
+ None
+ }
+ }
+
+ def isDefault(x: CaseDef): Boolean = x match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
+ case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
+ case _ => false
+ }
+
+ lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
+ def defaultBody: Tree = Throw(CODE.REF(defaultSym))
+ def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
+ (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
+ }}
+ }
+
+ // TODO: drop null checks
+ override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
+ val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
+ if (caseDefsWithDefault isEmpty) None
+ else Some(caseDefsWithDefault)
+ }
+ }
+
+ trait MatchOptimizer extends OptimizedCodegen
+ with SwitchEmission
+ with CommonSubconditionElimination {
+ override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = {
+ // TODO: do CSE on result of doDCE(prevBinder, cases, pt)
+ val optCases = doCSE(prevBinder, cases, pt)
+ val toHoist = (
+ for (treeMakers <- optCases)
+ yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
+ ).flatten.flatten.toList
+ (optCases, toHoist)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
new file mode 100644
index 0000000000..90c52e3eb6
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala
@@ -0,0 +1,674 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+/** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers.
+ */
+trait MatchTranslation { self: PatternMatching =>
+ import PatternMatchingStats._
+ import global.{phase, currentRun, Symbol,
+ Apply, Bind, CaseDef, ClassInfoType, Ident, Literal, Match,
+ Alternative, Constant, EmptyTree, Select, Star, This, Throw, Typed, UnApply,
+ Type, MethodType, WildcardType, PolyType, ErrorType, NoType, TypeRef, typeRef,
+ Name, NoSymbol, Position, Tree, atPos, glb, rootMirror, treeInfo, nme, Transformer,
+ elimAnonymousClass, asCompactDebugString, hasLength}
+ import global.definitions.{ThrowableClass, SeqClass, ScalaPackageClass, BooleanClass, UnitClass, RepeatedParamClass,
+ repeatedToSeq, isRepeatedParamType, getProductArgs}
+ import global.analyzer.{ErrorUtils, formalTypes}
+
+ trait MatchTranslator extends TreeMakers {
+ import typer.context
+
+ // Why is it so difficult to say "here's a name and a context, give me any
+ // matching symbol in scope" ? I am sure this code is wrong, but attempts to
+ // use the scopes of the contexts in the enclosing context chain discover
+ // nothing. How to associate a name with a symbol would would be a wonderful
+ // linkage for which to establish a canonical acquisition mechanism.
+ def matchingSymbolInScope(pat: Tree): Symbol = {
+ def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
+ case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
+ case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
+ case _ => NoSymbol
+ }
+ pat match {
+ case Bind(name, _) =>
+ context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
+ res orElse declarationOfName(ctx.owner.rawInfo, name))
+ case _ => NoSymbol
+ }
+ }
+
+ // Issue better warnings than "unreachable code" when people mis-use
+ // variable patterns thinking they bind to existing identifiers.
+ //
+ // Possible TODO: more deeply nested variable patterns, like
+ // case (a, b) => 1 ; case (c, d) => 2
+ // However this is a pain (at least the way I'm going about it)
+ // and I have to think these detailed errors are primarily useful
+ // for beginners, not people writing nested pattern matches.
+ def checkMatchVariablePatterns(cases: List[CaseDef]) {
+ // A string describing the first variable pattern
+ var vpat: String = null
+ // Using an iterator so we can recognize the last case
+ val it = cases.iterator
+
+ def addendum(pat: Tree) = {
+ matchingSymbolInScope(pat) match {
+ case NoSymbol => ""
+ case sym =>
+ val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
+ s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
+ }
+ }
+
+ while (it.hasNext) {
+ val cdef = it.next
+ // If a default case has been seen, then every succeeding case is unreachable.
+ if (vpat != null)
+ context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
+ // If this is a default case and more cases follow, warn about this one so
+ // we have a reason to mention its pattern variable name and any corresponding
+ // symbol in scope. Errors will follow from the remaining cases, at least
+ // once we make the above warning an error.
+ else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
+ val vpatName = cdef.pat match {
+ case Bind(name, _) => s" '$name'"
+ case _ => ""
+ }
+ vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
+ context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
+ }
+ }
+ }
+
+ // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
+ private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
+ private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
+ private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
+ private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
+ private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
+ private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
+ private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
+
+ /** Implement a pattern match by turning its cases (including the implicit failure case)
+ * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
+ *
+ * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape
+ * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))`
+ *
+ * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
+ * thus, you must typecheck the result (and that will in turn translate nested matches)
+ * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
+ */
+ def translateMatch(match_ : Match): Tree = {
+ val Match(selector, cases) = match_
+
+ val (nonSyntheticCases, defaultOverride) = cases match {
+ case init :+ last if treeInfo isSyntheticDefaultCase last =>
+ (init, Some(((scrut: Tree) => last.body)))
+ case _ =>
+ (cases, None)
+ }
+
+ checkMatchVariablePatterns(nonSyntheticCases)
+
+ // we don't transform after uncurry
+ // (that would require more sophistication when generating trees,
+ // and the only place that emits Matches after typers is for exception handling anyway)
+ if (phase.id >= currentRun.uncurryPhase.id)
+ devWarning(s"running translateMatch past uncurry (at $phase) on $selector match $cases")
+
+ debug.patmat("translating "+ cases.mkString("{", "\n", "}"))
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null
+
+ val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
+
+ val origPt = match_.tpe
+ // when one of the internal cps-type-state annotations is present, strip all CPS annotations
+ // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
+ // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
+ val ptUnCPS =
+ if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
+ removeCPSAdaptAnnotations(origPt)
+ else origPt
+
+ // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
+ // pt is the skolemized version
+ val pt = repeatedToSeq(ptUnCPS)
+
+ // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
+ val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
+
+ // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
+ val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride)
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
+ combined
+ }
+
+ // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
+ // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
+ // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
+ // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
+ // unlike translateMatch, we type our result before returning it
+ def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
+ // if they're already simple enough to be handled by the back-end, we're done
+ if (caseDefs forall treeInfo.isCatchCase) caseDefs
+ else {
+ val swatches = { // switch-catches
+ val bindersAndCases = caseDefs map { caseDef =>
+ // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
+ // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
+ val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
+ }
+
+ for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
+ if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
+ cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
+ }
+
+ val catches = if (swatches.nonEmpty) swatches else {
+ val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
+ val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
+
+ val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
+
+ List(
+ atPos(pos) {
+ CaseDef(
+ Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
+ EmptyTree,
+ combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
+ )
+ })
+ }
+
+ typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
+ }
+
+
+
+ /** The translation of `pat if guard => body` has two aspects:
+ * 1) the substitution due to the variables bound by patterns
+ * 2) the combination of the extractor calls using `flatMap`.
+ *
+ * 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
+ * this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
+ * variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
+ * 1) is tricky because translatePattern_i determines the shape of translatePattern_i+1:
+ * zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
+ * `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
+ *
+ * `x_1` references the result (inside the monad) of the extractor corresponding to `pat_1`,
+ * this result holds the values for the constructor arguments, which translatePattern_1 has extracted
+ * from the object pointed to by `x_scrut`. The `y_i` are the symbols bound by `pat_1` (in order)
+ * in the scope of the remainder of the pattern, and they must thus be replaced by:
+ * - (for 1-ary unapply) x_1
+ * - (for n-ary unapply, n > 1) selection of the i'th tuple component of `x_1`
+ * - (for unapplySeq) x_1.apply(i)
+ *
+ * in the treemakers,
+ *
+ * Thus, the result type of `translatePattern_i`'s extractor must conform to `M[(T_1,..., T_n)]`.
+ *
+ * Operationally, phase 1) is a foldLeft, since we must consider the depth-first-flattening of
+ * the transformed patterns from left to right. For every pattern ast node, it produces a transformed ast and
+ * a function that will take care of binding and substitution of the next ast (to the right).
+ *
+ */
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
+ translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
+ }
+
+ def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
+ // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
+ type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
+ def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
+ def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
+
+ val pos = patTree.pos
+
+ def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
+ if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
+ // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
+
+ debug.patmat("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
+
+ // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
+ // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
+ // (it will later result in a type test when `tp` is not a subtype of `b.info`)
+ // TODO: can we simplify this, together with the Bound case?
+ (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
+ debug.patmat("changing "+ b +" : "+ b.info +" -> "+ tp)
+ b setInfo tp
+ }
+
+ // example check: List[Int] <:< ::[Int]
+ // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
+ // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
+ val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
+ if (patBinder.info.widen <:< extractor.paramType) {
+ // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
+ // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
+ // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
+ // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
+ /* TODO: uncomment when `settings.developer` and `devWarning` become available
+ if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
+ devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
+ */
+ (Nil, patBinder setInfo extractor.paramType, false)
+ } else {
+ // chain a type-testing extractor before the actual extractor call
+ // it tests the type, checks the outer pointer and casts to the expected type
+ // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
+ // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
+ val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
+
+ // check whether typetest implies patBinder is not null,
+ // even though the eventual null check will be on patBinderOrCasted
+ // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
+ (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
+ }
+
+ withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
+ }
+
+
+ object MaybeBoundTyped {
+ /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
+ * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
+ * The returned type is the one inferred by inferTypedPattern (`owntype`)
+ *
+ * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
+ */
+ def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
+ // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
+ case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
+ case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
+ case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
+ case _ => None
+ }
+ }
+
+ val (treeMakers, subpats) = patTree match {
+ // skip wildcard trees -- no point in checking them
+ case WildcardPattern() => noFurtherSubPats()
+ case UnApply(unfun, args) =>
+ // TODO: check unargs == args
+ // debug.patmat("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
+ translateExtractorPattern(ExtractorCall(unfun, args))
+
+ /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
+ It consists of a stable identifier c, followed by element patterns p1, ..., pn.
+ The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
+
+ If the case class is monomorphic, then it must conform to the expected type of the pattern,
+ and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
+
+ If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
+ The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
+
+ The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
+ A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
+ **/
+ case Apply(fun, args) =>
+ ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
+ ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
+ noFurtherSubPats()
+ }
+
+ /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
+ The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
+ This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
+ **/
+ // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
+ case MaybeBoundTyped(subPatBinder, pt) =>
+ val next = glb(List(dealiasWiden(patBinder.info), pt)).normalize
+ // a typed pattern never has any subtrees
+ noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
+
+ /** A pattern binder x@p consists of a pattern variable x and a pattern p.
+ The type of the variable x is the static type T of the pattern p.
+ This pattern matches any value v matched by the pattern p,
+ provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
+ and it binds the variable name to that value.
+ **/
+ case Bound(subpatBinder, p) =>
+ // replace subpatBinder by patBinder (as if the Bind was not there)
+ withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
+ // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
+ (patBinder, p)
+ )
+
+ /** 8.1.4 Literal Patterns
+ A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
+ The type of L must conform to the expected type of the pattern.
+
+ 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
+ The pattern matches any value v such that r == v (§12.1).
+ The type of r must conform to the expected type of the pattern.
+ **/
+ case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
+ noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
+
+ case Alternative(alts) =>
+ noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
+
+ /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
+ case class Foo(x: Int, y: String)
+ case class Bar(z: Int)
+
+ def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
+ */
+
+ case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
+ debug.patmat("WARNING: Bind tree with unbound symbol "+ patTree)
+ noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
+
+ // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
+
+ case _ =>
+ typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
+ noFurtherSubPats()
+ }
+
+ treeMakers ++ subpats.flatMap { case (binder, pat) =>
+ translatePattern(binder, pat) // recurse on subpatterns
+ }
+ }
+
+ def translateGuard(guard: Tree): List[TreeMaker] =
+ if (guard == EmptyTree) Nil
+ else List(GuardTreeMaker(guard))
+
+ // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
+ // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
+ // to enable this, probably need to move away from Option to a monad specific to pattern-match,
+ // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
+ // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
+ // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
+ def translateBody(body: Tree, matchPt: Type): TreeMaker =
+ BodyTreeMaker(body, matchPt)
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ object ExtractorCall {
+ def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
+ def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
+ }
+
+ abstract class ExtractorCall(val args: List[Tree]) {
+ val nbSubPats = args.length
+
+ // everything okay, captain?
+ def isTyped : Boolean
+
+ def isSeq: Boolean
+ lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
+
+ // to which type should the previous binder be casted?
+ def paramType : Type
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
+
+ // `subPatBinders` are the variables bound by this pattern in the following patterns
+ // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
+ lazy val subPatBinders = args map {
+ case Bound(b, p) => b
+ case p => freshSym(p.pos, prefix = "p")
+ }
+
+ lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
+ case (b, Bound(_, p)) => (b, p)
+ case bp => bp
+ }
+
+ // never store these in local variables (for PreserveSubPatBinders)
+ lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
+ case (b, PatternBoundToUnderscore()) => b
+ }.toSet
+
+ def subPatTypes: List[Type] =
+ if(isSeq) {
+ val TypeRef(pre, SeqClass, args) = seqTp
+ // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
+ val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
+
+ if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
+ else formalTypes(formalsWithRepeated, nbSubPats)
+ } else rawSubPatTypes
+
+ protected def rawSubPatTypes: List[Type]
+
+ protected def seqTp = rawSubPatTypes.last baseType SeqClass
+ protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
+ protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
+ protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
+ protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
+ protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
+ protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
+ protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require isSeq
+ protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
+ val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
+ val nbIndexingIndices = indexingIndices.length
+
+ // this error-condition has already been checked by checkStarPatOK:
+ // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
+ // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
+ (((1 to firstIndexingBinder) map tupleSel(binder)) ++
+ // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
+ (indexingIndices map codegen.index(seqTree(binder))) ++
+ // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
+ (if(!lastIsStar) Nil else List(
+ if(nbIndexingIndices == 0) seqTree(binder)
+ else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
+ }
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (nbSubPats == 0) Nil
+ else if (isSeq) subPatRefsSeq(binder)
+ else ((1 to nbSubPats) map tupleSel(binder)).toList
+
+ protected def lengthGuard(binder: Symbol): Option[Tree] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ checkedLength map { expectedLength => import CODE._
+ // `binder.lengthCompare(expectedLength)`
+ def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
+
+ // the comparison to perform
+ // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
+ // (otherwise equality is required)
+ def compareOp: (Tree, Tree) => Tree =
+ if (lastIsStar) _ INT_>= _
+ else _ INT_== _
+
+ // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
+ (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
+ }
+
+ def checkedLength: Option[Int] =
+ // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
+ if (!isSeq || (expectedLength < minLenToCheck)) None
+ else Some(expectedLength)
+
+ }
+
+ // TODO: to be called when there's a def unapplyProd(x: T): U
+ // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
+ //
+ // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
+ class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
+ /*override def equals(x$1: Any): Boolean = ...
+ val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
+ */
+ // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
+ // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
+ // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
+ // debug.patmat("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
+ // debug.patmat("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
+ private def constructorTp = fun.tpe
+
+ def isTyped = fun.isTyped
+
+ // to which type should the previous binder be casted?
+ def paramType = constructorTp.finalResultType
+
+ def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
+ protected def rawSubPatTypes = constructorTp.paramTypes
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
+ * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
+ */
+ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
+ val paramAccessors = binder.constrParamAccessors
+ // binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
+ // make an exception for classes under the scala package as they should be well-behaved,
+ // to optimize matching on List
+ val mutableBinders =
+ if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
+ (paramAccessors exists (_.isMutable)))
+ subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
+ else Nil
+
+ // checks binder ne null before chaining to the next extractor
+ ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
+ }
+
+ // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
+ override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
+ val accessors = binder.caseFieldAccessors
+ if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
+ else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
+ }
+
+ override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
+ }
+
+ class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
+ private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
+
+ def tpe = extractorCall.tpe
+ def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
+ def paramType = tpe.paramTypes.head
+ def resultType = tpe.finalResultType
+ def isSeq = extractorCall.symbol.name == nme.unapplySeq
+
+ /** Create the TreeMaker that embodies this extractor call
+ *
+ * `binder` has been casted to `paramType` if necessary
+ * `binderKnownNonNull` is not used in this subclass
+ *
+ * TODO: implement review feedback by @retronym:
+ * Passing the pair of values around suggests:
+ * case class Binder(sym: Symbol, knownNotNull: Boolean).
+ * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
+ */
+ def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
+ // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
+ val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
+ val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
+ ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
+ }
+
+ override protected def seqTree(binder: Symbol): Tree =
+ if (firstIndexingBinder == 0) CODE.REF(binder)
+ else super.seqTree(binder)
+
+ // the trees that select the subpatterns on the extractor's result, referenced by `binder`
+ // require (nbSubPats > 0 && (!lastIsStar || isSeq))
+ override protected def subPatRefs(binder: Symbol): List[Tree] =
+ if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
+ else super.subPatRefs(binder)
+
+ protected def spliceApply(binder: Symbol): Tree = {
+ object splice extends Transformer {
+ override def transform(t: Tree) = t match {
+ case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
+ treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
+ case _ => super.transform(t)
+ }
+ }
+ splice.transform(extractorCallIncludingDummy)
+ }
+
+ // what's the extractor's result type in the monad?
+ // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
+ protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
+ if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
+ else matchMonadResult(resultType)
+ }
+
+ protected lazy val rawSubPatTypes =
+ if (resultInMonad.typeSymbol eq UnitClass) Nil
+ else if(!isSeq && nbSubPats == 1) List(resultInMonad)
+ else getProductArgs(resultInMonad) match {
+ case Nil => List(resultInMonad)
+ case x => x
+ }
+
+ override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
+ }
+
+ /** A conservative approximation of which patterns do not discern anything.
+ * They are discarded during the translation.
+ */
+ object WildcardPattern {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Star(WildcardPattern()) => true
+ case x: Ident => treeInfo.isVarPattern(x)
+ case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
+ case EmptyTree => true
+ case _ => false
+ }
+ }
+
+ object PatternBoundToUnderscore {
+ def unapply(pat: Tree): Boolean = pat match {
+ case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
+ case Ident(nme.WILDCARD) => true
+ case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
+ case Typed(PatternBoundToUnderscore(), _) => true
+ case _ => false
+ }
+ }
+
+ object Bound {
+ def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
+ case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
+ Some((t.symbol, p))
+ case _ => None
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
new file mode 100644
index 0000000000..202f3444f8
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -0,0 +1,614 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.symtab.Flags.{SYNTHETIC, ARTIFACT}
+import scala.language.postfixOps
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.NoPosition
+
+/** Translate our IR (TreeMakers) into actual Scala Trees using the factory methods in MatchCodeGen.
+ *
+ * The IR is mostly concerned with sequencing, substitution, and rendering all necessary conditions,
+ * mostly agnostic to whether we're in optimized/pure (virtualized) mode.
+ */
+trait MatchTreeMaking extends MatchCodeGen with Debugging {
+ import PatternMatchingStats._
+ import global.{Tree, Type, Symbol, CaseDef, atPos, settings,
+ Select, Block, ThisType, SingleType, NoPrefix, NoType, needsOuterTest,
+ ConstantType, Literal, Constant, gen, This, EmptyTree, map2, NoSymbol, Traverser,
+ Function, Typed, treeInfo, TypeRef, DefTree, Ident, nme}
+
+ import global.definitions.{SomeClass, AnyRefClass, UncheckedClass, BooleanClass}
+
+ final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
+ object Suppression {
+ val NoSuppression = Suppression(false, false)
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// the making of the trees
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TreeMakers extends TypedSubstitution with CodegenCore {
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree])
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit
+
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] =
+ None
+
+ // for catch (no need to customize match failure)
+ def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
+ None
+
+ abstract class TreeMaker {
+ def pos: Position
+
+ /** captures the scope and the value of the bindings in patterns
+ * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
+ */
+ def substitution: Substitution =
+ if (currSub eq null) localSubstitution
+ else currSub
+
+ protected def localSubstitution: Substitution
+
+ private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ if (currSub ne null) {
+ debug.patmat("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
+ Thread.dumpStack()
+ }
+ else currSub = outerSubst >> substitution
+ }
+ private[this] var currSub: Substitution = null
+
+ /** The substitution that specifies the trees that compute the values of the subpattern binders.
+ *
+ * Should not be used to perform actual substitution!
+ * Only used to reason symbolically about the values the subpattern binders are bound to.
+ * See TreeMakerToCond#updateSubstitution.
+ *
+ * Overridden in PreserveSubPatBinders to pretend it replaces the subpattern binders by subpattern refs
+ * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
+ *
+ * TODO: clean this up, would be nicer to have some higher-level way to compute
+ * the binders bound by this tree maker and the symbolic values that correspond to them
+ */
+ def subPatternsAsSubstitution: Substitution = substitution
+
+ // build Tree that chains `next` after the current extractor
+ def chainBefore(next: Tree)(casegen: Casegen): Tree
+ }
+
+ trait NoNewBinders extends TreeMaker {
+ protected val localSubstitution: Substitution = EmptySubstitution
+ }
+
+ case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
+ def pos = tree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
+ }
+
+ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
+ def pos = body.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
+ atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
+ override def toString = "B"+(body, matchPt)
+ }
+
+ case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
+ val pos = NoPosition
+
+ val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
+ override def toString = "S"+ localSubstitution
+ }
+
+ abstract class FunTreeMaker extends TreeMaker {
+ val nextBinder: Symbol
+ def pos = nextBinder.pos
+ }
+
+ abstract class CondTreeMaker extends FunTreeMaker {
+ val prevBinder: Symbol
+ val nextBinderTp: Type
+ val cond: Tree
+ val res: Tree
+
+ lazy val nextBinder = freshSym(pos, nextBinderTp)
+ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree =
+ atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
+ }
+
+ // unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
+ protected val debugInfoEmitVars = !settings.optimise.value
+
+ trait PreserveSubPatBinders extends TreeMaker {
+ val subPatBinders: List[Symbol]
+ val subPatRefs: List[Tree]
+ val ignoredSubPatBinders: Set[Symbol]
+
+ // unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
+ // mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
+ // sub patterns bound to wildcard (_) are never stored as they can't be referenced
+ // dirty debuggers will have to get dirty to see the wildcards
+ lazy val storedBinders: Set[Symbol] =
+ (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
+
+ // e.g., mutable fields of a case class in ProductExtractorTreeMaker
+ def extraStoredBinders: Set[Symbol]
+
+ def emitVars = storedBinders.nonEmpty
+
+ private lazy val (stored, substed) = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) }
+
+ protected lazy val localSubstitution: Substitution = if (!emitVars) Substitution(subPatBinders, subPatRefs)
+ else {
+ val (subPatBindersSubstituted, subPatRefsSubstituted) = substed.unzip
+ Substitution(subPatBindersSubstituted.toList, subPatRefsSubstituted.toList)
+ }
+
+ /** The substitution that specifies the trees that compute the values of the subpattern binders.
+ *
+ * We pretend to replace the subpattern binders by subpattern refs
+ * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
+ */
+ override def subPatternsAsSubstitution =
+ Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
+
+ import CODE._
+ def bindSubPats(in: Tree): Tree =
+ if (!emitVars) in
+ else {
+ // binders in `subPatBindersStored` that are referenced by tree `in`
+ val usedBinders = new mutable.HashSet[Symbol]()
+ // all potentially stored subpat binders
+ val potentiallyStoredBinders = stored.unzip._1.toSet
+ // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
+ in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
+
+ if (usedBinders.isEmpty) in
+ else {
+ // only store binders actually used
+ val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
+ Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
+ }
+ }
+ }
+
+ /**
+ * Make a TreeMaker that will result in an extractor call specified by `extractor`
+ * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing
+ * a function with binder `nextBinder` over our extractor's result
+ * the function's body is determined by the next TreeMaker
+ * (furthermore, the interpretation of `flatMap` depends on the codegen instance we're using).
+ *
+ * The values for the subpatterns, as computed by the extractor call in `extractor`,
+ * are stored in local variables that re-use the symbols in `subPatBinders`.
+ * This makes extractor patterns more debuggable (SI-5739).
+ */
+ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
+ val subPatBinders: List[Symbol],
+ val subPatRefs: List[Tree],
+ extractorReturnsBoolean: Boolean,
+ val checkedLength: Option[Int],
+ val prevBinder: Symbol,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ def extraStoredBinders: Set[Symbol] = Set()
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val condAndNext = extraCond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(substitution(cond), bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
+ atPos(extractor.pos)(
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, condAndNext)
+ else casegen.flatMap(extractor, nextBinder, condAndNext)
+ )
+ }
+
+ override def toString = "X"+(extractor, nextBinder.name)
+ }
+
+ /**
+ * An optimized version of ExtractorTreeMaker for Products.
+ * For now, this is hard-coded to case classes, and we simply extract the case class fields.
+ *
+ * The values for the subpatterns, as specified by the case class fields at the time of extraction,
+ * are stored in local variables that re-use the symbols in `subPatBinders`.
+ * This makes extractor patterns more debuggable (SI-5739) as well as
+ * avoiding mutation after the pattern has been matched (SI-5158, SI-6070)
+ *
+ * TODO: make this user-definable as follows
+ * When a companion object defines a method `def unapply_1(x: T): U_1`, but no `def unapply` or `def unapplySeq`,
+ * the extractor is considered to match any non-null value of type T
+ * the pattern is expected to have as many sub-patterns as there are `def unapply_I(x: T): U_I` methods,
+ * and the type of the I'th sub-pattern is `U_I`.
+ * The same exception for Seq patterns applies: if the last extractor is of type `Seq[U_N]`,
+ * the pattern must have at least N arguments (exactly N if the last argument is annotated with `: _*`).
+ * The arguments starting at N (and beyond) are taken from the sequence returned by apply_N,
+ * and it is checked that that sequence has enough elements to provide values for all expected sub-patterns.
+ *
+ * For a case class C, the implementation is assumed to be `def unapply_I(x: C) = x._I`,
+ * and the extractor call is inlined under that assumption.
+ */
+ case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
+ val subPatBinders: List[Symbol],
+ val subPatRefs: List[Tree],
+ val mutableBinders: List[Symbol],
+ binderKnownNonNull: Boolean,
+ val ignoredSubPatBinders: Set[Symbol]
+ ) extends FunTreeMaker with PreserveSubPatBinders {
+
+ import CODE._
+ val nextBinder = prevBinder // just passing through
+
+ // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
+ def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val nullCheck = REF(prevBinder) OBJ_NE NULL
+ val cond =
+ if (binderKnownNonNull) extraCond
+ else (extraCond map (nullCheck AND _)
+ orElse Some(nullCheck))
+
+ cond match {
+ case Some(cond) =>
+ casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
+ case _ =>
+ bindSubPats(substitution(next))
+ }
+ }
+
+ override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
+ }
+
+ object IrrefutableExtractorTreeMaker {
+ // will an extractor with unapply method of methodtype `tp` always succeed?
+ // note: this assumes the other side-conditions implied by the extractor are met
+ // (argument of the right type, length check succeeds for unapplySeq,...)
+ def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
+ case TypeRef(_, SomeClass, _) => true
+ // probably not useful since this type won't be inferred nor can it be written down (yet)
+ case ConstantType(Constant(true)) => true
+ case _ => false
+ }
+
+ def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
+ case ExtractorTreeMaker(extractor, None, nextBinder) if irrefutableExtractorType(extractor.tpe) =>
+ Some((extractor, nextBinder))
+ case _ =>
+ None
+ }
+ }
+
+ object TypeTestTreeMaker {
+ // factored out so that we can consistently generate other representations of the tree that implements the test
+ // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
+ trait TypeTestCondStrategy {
+ type Result
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result
+ // TODO: can probably always widen
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result
+ def nonNullTest(testedBinder: Symbol): Result
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result
+ def eqTest(pat: Tree, testedBinder: Symbol): Result
+ def and(a: Result, b: Result): Result
+ def tru: Result
+ }
+
+ object treeCondStrategy extends TypeTestCondStrategy { import CODE._
+ type Result = Tree
+
+ def and(a: Result, b: Result): Result = a AND b
+ def tru = mkTRUE
+ def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
+ def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
+ def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
+ def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
+ val expectedOuter = expectedTp.prefix match {
+ case ThisType(clazz) => THIS(clazz)
+ case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
+ case _ => mkTRUE // fallback for SI-6183
+ }
+
+ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
+ // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
+ val outer = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedTp.prefix
+
+ (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
+ }
+ }
+
+ object pureTypeTestChecker extends TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
+
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = false
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false
+ def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
+ def tru = true
+ }
+
+ def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
+ type Result = Boolean
+
+ def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
+ def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
+ def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
+ def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
+ def and(a: Result, b: Result): Result = a || b
+ def tru = false
+ }
+ }
+
+ /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
+ *
+ * Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
+ - A reference to a class C, p.C, or T#C.
+ This type pattern matches any non-null instance of the given class.
+ Note that the prefix of the class, if it is given, is relevant for determining class instances.
+ For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
+ The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
+
+ - A singleton type p.type.
+ This type pattern matches only the value denoted by the path p
+ (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
+ // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
+
+ - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
+ This type pattern matches all values that are matched by each of the type patterns Ti.
+
+ - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
+ This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
+ The bounds or alias type of these type variable are determined as described in (§8.3).
+
+ - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
+ This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
+ **/
+ case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
+ import TypeTestTreeMaker._
+ debug.patmat("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
+
+ lazy val outerTestNeeded = (
+ !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
+ && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
+
+ // the logic to generate the run-time test that follows from the fact that
+ // a `prevBinder` is expected to have type `expectedTp`
+ // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
+ // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
+ // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
+ def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
+ import cs._
+
+ def default =
+ // do type test first to ensure we won't select outer on null
+ if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
+ else typeTest(testedBinder, expectedTp)
+
+ // propagate expected type
+ def expTp(t: Tree): t.type = t setType expectedTp
+
+ // true when called to type-test the argument to an extractor
+ // don't do any fancy equality checking, just test the type
+ if (extractorArgTypeTest) default
+ else expectedTp match {
+ // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
+ // this implies sym.isStable
+ case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ // must use == to support e.g. List() == Nil
+ case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
+ case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
+ => eqTest(expTp(CODE.NULL), testedBinder)
+ case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
+ case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
+
+ // TODO: verify that we don't need to special-case Array
+ // I think it's okay:
+ // - the isInstanceOf test includes a test for the element type
+ // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
+ case _ if testedBinder.info.widen <:< expectedTp =>
+ // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
+ // since the types conform, no further checking is required
+ if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
+ // have to test outer and non-null only when it's a reference type
+ else if (expectedTp <:< AnyRefClass.tpe) {
+ // do non-null check first to ensure we won't select outer on null
+ if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
+ else nonNullTest(testedBinder)
+ } else default
+
+ case _ => default
+ }
+ }
+
+ val cond = renderCondition(treeCondStrategy)
+ val res = codegen._asInstanceOf(testedBinder, nextBinderTp)
+
+ // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
+ def isPureTypeTest = renderCondition(pureTypeTestChecker)
+
+ def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
+
+ override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
+ }
+
+ // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
+ case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, override val pos: Position) extends CondTreeMaker {
+ val nextBinderTp = prevBinder.info.widen
+
+ // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
+ // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
+ val cond = codegen._equals(patTree, prevBinder)
+ val res = CODE.REF(prevBinder)
+ override def toString = "ET"+(prevBinder.name, patTree)
+ }
+
+ case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
+ // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
+
+ override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
+ super.incorporateOuterSubstitution(outerSubst)
+ altss = altss map (alts => propagateSubstitution(alts, substitution))
+ }
+
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+ atPos(pos){
+ // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
+ // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
+ val combinedAlts = altss map (altTreeMakers =>
+ ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(mkTRUE)))(casegen))
+ )
+
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => mkFALSE))
+ codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
+ }
+ }
+ }
+
+ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
+ val pos = guardTree.pos
+
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
+ override def toString = "G("+ guardTree +")"
+ }
+
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
+ treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
+
+
+ def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
+
+ // a foldLeft to accumulate the localSubstitution left-to-right
+ // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
+ def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
+ var accumSubst: Substitution = initial
+ treeMakers foreach { maker =>
+ maker incorporateOuterSubstitution accumSubst
+ accumSubst = maker.substitution
+ }
+ removeSubstOnly(treeMakers)
+ }
+
+ // calls propagateSubstitution on the treemakers
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
+ // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
+ combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
+ }
+
+ // pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
+ def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
+ fixerUpper(owner, scrut.pos){
+ def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
+ debug.patmat("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
+
+ val (suppression, requireSwitch): (Suppression, Boolean) =
+ if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppression, false)
+ else scrut match {
+ case Typed(tree, tpt) =>
+ val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
+ val supressUnreachable = tree match {
+ case Ident(name) if name startsWith nme.CHECK_IF_REFUTABLE_STRING => true // SI-7183 don't warn for withFilter's that turn out to be irrefutable.
+ case _ => false
+ }
+ val suppression = Suppression(suppressExhaustive, supressUnreachable)
+ // matches with two or fewer cases need not apply for switchiness (if-then-else will do)
+ val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0
+ (suppression, requireSwitch)
+ case _ =>
+ (Suppression.NoSuppression, false)
+ }
+
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{
+ if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
+
+ if (casesNoSubstOnly nonEmpty) {
+ // before optimizing, check casesNoSubstOnly for presence of a default case,
+ // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ // exhaustivity and reachability must be checked before optimization as well
+ // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
+ val synthCatchAll =
+ if (casesNoSubstOnly.nonEmpty && {
+ val nonTrivLast = casesNoSubstOnly.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }) None
+ else matchFailGen
+
+ analyzeCases(scrutSym, casesNoSubstOnly, pt, suppression)
+
+ val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
+
+ val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
+
+ if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
+ } else {
+ codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
+ }
+ }
+ }
+
+ // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
+ // TODO: assign more fine-grained positions
+ // fixes symbol nesting, assigns positions
+ protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
+ currentOwner = origOwner
+
+ override def traverse(t: Tree) {
+ if (t != EmptyTree && t.pos == NoPosition) {
+ t.setPos(pos)
+ }
+ t match {
+ case Function(_, _) if t.symbol == NoSymbol =>
+ t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
+ debug.patmat("new symbol for "+ (t, t.symbol.ownerChain))
+ case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
+ debug.patmat("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
+ t.symbol.owner = currentOwner
+ case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
+ debug.patmat("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
+ if(d.symbol.moduleClass ne NoSymbol)
+ d.symbol.moduleClass.owner = currentOwner
+
+ d.symbol.owner = currentOwner
+ // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
+ debug.patmat("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
+ case _ =>
+ }
+ super.traverse(t)
+ }
+
+ // override def apply
+ // debug.patmat("before fixerupper: "+ xTree)
+ // currentRun.trackerFactory.snapshot()
+ // debug.patmat("after fixerupper")
+ // currentRun.trackerFactory.snapshot()
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
new file mode 100644
index 0000000000..df4e699620
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala
@@ -0,0 +1,256 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.tools.nsc.Global
+import scala.tools.nsc.ast
+import scala.language.postfixOps
+import scala.tools.nsc.transform.TypingTransformers
+import scala.tools.nsc.transform.Transform
+import scala.reflect.internal.util.Statistics
+import scala.reflect.internal.Types
+import scala.reflect.internal.util.Position
+
+/** Translate pattern matching.
+ *
+ * Either into optimized if/then/else's, or virtualized as method calls (these methods form a zero-plus monad),
+ * similar in spirit to how for-comprehensions are compiled.
+ *
+ * For each case, express all patterns as extractor calls, guards as 0-ary extractors, and sequence them using `flatMap`
+ * (lifting the body of the case into the monad using `one`).
+ *
+ * Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`).
+ *
+ * TODO:
+ * - DCE (on irrefutable patterns)
+ * - update spec and double check it's implemented correctly (see TODO's)
+ *
+ * (longer-term) TODO:
+ * - user-defined unapplyProd
+ * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
+ * - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure
+ */
+trait PatternMatching extends Transform with TypingTransformers
+ with Debugging
+ with Interface
+ with MatchTranslation
+ with MatchTreeMaking
+ with MatchCodeGen
+ with ScalaLogic
+ with Solving
+ with MatchAnalysis
+ with MatchOptimization {
+ import global._
+
+ val phaseName: String = "patmat"
+
+ def newTransformer(unit: CompilationUnit): Transformer =
+ if (opt.virtPatmat) new MatchTransformer(unit)
+ else noopTransformer
+
+ class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree): Tree = tree match {
+ case Match(sel, cases) =>
+ val origTp = tree.tpe
+ // setType origTp intended for CPS -- TODO: is it necessary?
+ val translated = translator.translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]]))
+ try {
+ localTyper.typed(translated) setType origTp
+ } catch {
+ case x: (Types#TypeError) =>
+ // TODO: this should never happen; error should've been reported during type checking
+ unit.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg)
+ translated
+ }
+ case Try(block, catches, finalizer) =>
+ treeCopy.Try(tree, transform(block), translator.translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer))
+ case _ => super.transform(tree)
+ }
+
+ // TODO: only instantiate new match translator when localTyper has changed
+ // override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A
+ // as this is the only time TypingTransformer changes it
+ def translator: MatchTranslator with CodegenCore = {
+ new OptimizingMatchTranslator(localTyper)
+ }
+ }
+
+ class PureMatchTranslator(val typer: analyzer.Typer, val matchStrategy: Tree) extends MatchTranslator with PureCodegen {
+ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type) = (cases, Nil)
+ def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {}
+ }
+
+ class OptimizingMatchTranslator(val typer: analyzer.Typer) extends MatchTranslator
+ with MatchOptimizer
+ with MatchAnalyzer
+ with Solver
+}
+
+trait Debugging {
+ val global: Global
+
+ // TODO: the inliner fails to inline the closures to debug.patmat unless the method is nested in an object
+ object debug {
+ val printPatmat = global.settings.Ypatmatdebug.value
+ @inline final def patmat(s: => String) = if (printPatmat) println(s)
+ }
+}
+
+trait Interface extends ast.TreeDSL {
+ import global.{newTermName, analyzer, Type, ErrorType, Symbol, Tree}
+ import analyzer.Typer
+
+ // 2.10/2.11 compatibility
+ protected final def dealiasWiden(tp: Type) = tp.dealias // 2.11: dealiasWiden
+ protected final def mkTRUE = CODE.TRUE_typed // 2.11: CODE.TRUE
+ protected final def mkFALSE = CODE.FALSE_typed // 2.11: CODE.FALSE
+ protected final def hasStableSymbol(p: Tree) = p.hasSymbol && p.symbol.isStable // 2.11: p.hasSymbolField && p.symbol.isStable
+ protected final def devWarning(str: String) = global.debugwarn(str) // 2.11: omit
+
+ object vpmName {
+ val one = newTermName("one")
+ val drop = newTermName("drop")
+ val flatMap = newTermName("flatMap")
+ val get = newTermName("get")
+ val guard = newTermName("guard")
+ val isEmpty = newTermName("isEmpty")
+ val orElse = newTermName("orElse")
+ val outer = newTermName("<outer>")
+ val runOrElse = newTermName("runOrElse")
+ val zero = newTermName("zero")
+ val _match = newTermName("__match") // don't call the val __match, since that will trigger virtual pattern matching...
+
+ def counted(str: String, i: Int) = newTermName(str + i)
+ }
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// talking to userland
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+ /** Interface with user-defined match monad?
+ * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
+
+ type Matcher[P[_], M[+_], A] = {
+ def flatMap[B](f: P[A] => M[B]): M[B]
+ def orElse[B >: A](alternative: => M[B]): M[B]
+ }
+
+ abstract class MatchStrategy[P[_], M[+_]] {
+ // runs the matcher on the given input
+ def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U]
+
+ def zero: M[Nothing]
+ def one[T](x: P[T]): M[T]
+ def guard[T](cond: P[Boolean], then: => P[T]): M[T]
+ }
+
+ * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
+
+
+ * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
+
+ object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
+ def zero = None
+ def one[T](x: T) = Some(x)
+ // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
+ def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
+ def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
+ }
+
+ */
+ trait MatchMonadInterface {
+ val typer: Typer
+ val matchOwner = typer.context.owner
+
+ def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
+ def reportMissingCases(pos: Position, counterExamples: List[String]) = {
+ val ceString =
+ if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
+ else "inputs: " + counterExamples.mkString(", ")
+
+ typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
+ }
+
+ def inMatchMonad(tp: Type): Type
+ def pureType(tp: Type): Type
+ final def matchMonadResult(tp: Type): Type =
+ tp.baseType(matchMonadSym).typeArgs match {
+ case arg :: Nil => arg
+ case _ => ErrorType
+ }
+
+ protected def matchMonadSym: Symbol
+ }
+
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+// substitution
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+ trait TypedSubstitution extends MatchMonadInterface {
+ object Substitution {
+ def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
+ // requires sameLength(from, to)
+ def apply(from: List[Symbol], to: List[Tree]) =
+ if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
+ }
+
+ class Substitution(val from: List[Symbol], val to: List[Tree]) {
+ import global.{Transformer, Ident, NoType}
+
+ // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
+ // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
+ def apply(tree: Tree): Tree = {
+ // according to -Ystatistics 10% of translateMatch's time is spent in this method...
+ // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
+ if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
+ else (new Transformer {
+ private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
+ if (origTp == null || origTp == NoType) to
+ // important: only type when actually substing and when original tree was typed
+ // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
+ else typer.typed(to)
+
+ override def transform(tree: Tree): Tree = {
+ def subst(from: List[Symbol], to: List[Tree]): Tree =
+ if (from.isEmpty) tree
+ else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe)
+ else subst(from.tail, to.tail)
+
+ tree match {
+ case Ident(_) => subst(from, to)
+ case _ => super.transform(tree)
+ }
+ }
+ }).transform(tree)
+ }
+
+
+ // the substitution that chains `other` before `this` substitution
+ // forall t: Tree. this(other(t)) == (this >> other)(t)
+ def >>(other: Substitution): Substitution = {
+ val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
+ new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
+ }
+ override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")")
+ }
+
+ object EmptySubstitution extends Substitution(Nil, Nil) {
+ override def apply(tree: Tree): Tree = tree
+ override def >>(other: Substitution): Substitution = other
+ }
+ }
+}
+
+object PatternMatchingStats {
+ val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat")
+ val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos)
+ val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos)
+ val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter(""))
+ val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos)
+ val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos)
+ val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos)
+}
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
new file mode 100644
index 0000000000..843f831ea1
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -0,0 +1,242 @@
+/* NSC -- new Scala compiler
+ *
+ * Copyright 2011-2013 LAMP/EPFL
+ * @author Adriaan Moors
+ */
+
+package scala.tools.nsc.transform.patmat
+
+import scala.collection.mutable
+import scala.reflect.internal.util.Statistics
+
+// naive CNF translation and simple DPLL solver
+trait Solving extends Logic {
+ import PatternMatchingStats._
+ trait CNF extends PropositionalLogic {
+
+ /** Override Array creation for efficiency (to not go through reflection). */
+ private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
+ def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
+ final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
+ }
+
+ import scala.collection.mutable.ArrayBuffer
+ type FormulaBuilder = ArrayBuffer[Clause]
+ def formulaBuilder = ArrayBuffer[Clause]()
+ def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
+ def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
+ def toFormula(buff: FormulaBuilder): Formula = buff
+
+ // CNF: a formula is a conjunction of clauses
+ type Formula = FormulaBuilder
+ def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
+
+ type Clause = Set[Lit]
+ // a clause is a disjunction of distinct literals
+ def clause(l: Lit*): Clause = l.toSet
+
+ type Lit
+ def Lit(sym: Sym, pos: Boolean = true): Lit
+
+ def andFormula(a: Formula, b: Formula): Formula = a ++ b
+ def simplifyFormula(a: Formula): Formula = a.distinct
+
+ private def merge(a: Clause, b: Clause) = a ++ b
+
+ // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
+ // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
+ def eqFreePropToSolvable(p: Prop): Formula = {
+ def negationNormalFormNot(p: Prop, budget: Int): Prop =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else p match {
+ case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
+ case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
+ case Not(p) => negationNormalForm(p, budget - 1)
+ case True => False
+ case False => True
+ case s: Sym => Not(s)
+ }
+
+ def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else p match {
+ case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
+ case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
+ case Not(negated) => negationNormalFormNot(negated, budget - 1)
+ case True
+ | False
+ | (_ : Sym) => p
+ }
+
+ val TrueF = formula()
+ val FalseF = formula(clause())
+ def lit(s: Sym) = formula(clause(Lit(s)))
+ def negLit(s: Sym) = formula(clause(Lit(s, false)))
+
+ def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
+ def distribute(a: Formula, b: Formula, budget: Int): Formula =
+ if (budget <= 0) throw AnalysisBudget.exceeded
+ else
+ (a, b) match {
+ // true \/ _ = true
+ // _ \/ true = true
+ case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
+ // lit \/ lit
+ case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
+ // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
+ // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
+ case (cs, ds) =>
+ val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
+ big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size)))
+ }
+
+ if (budget <= 0) throw AnalysisBudget.exceeded
+
+ p match {
+ case True => TrueF
+ case False => FalseF
+ case s: Sym => lit(s)
+ case Not(s: Sym) => negLit(s)
+ case And(a, b) =>
+ val cnfA = conjunctiveNormalForm(a, budget - 1)
+ val cnfB = conjunctiveNormalForm(b, budget - cnfA.size)
+ cnfA ++ cnfB
+ case Or(a, b) =>
+ val cnfA = conjunctiveNormalForm(a)
+ val cnfB = conjunctiveNormalForm(b)
+ distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size))
+ }
+ }
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null
+ val res = conjunctiveNormalForm(negationNormalForm(p))
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start)
+
+ //
+ if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1
+
+// debug.patmat("cnf for\n"+ p +"\nis:\n"+cnfString(res))
+ res
+ }
+ }
+
+ // simple solver using DPLL
+ trait Solver extends CNF {
+ // a literal is a (possibly negated) variable
+ def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos)
+ class Lit(val sym: Sym, val pos: Boolean) {
+ override def toString = if (!pos) "-"+ sym.toString else sym.toString
+ override def equals(o: Any) = o match {
+ case o: Lit => (o.sym eq sym) && (o.pos == pos)
+ case _ => false
+ }
+ override def hashCode = sym.hashCode + pos.hashCode
+
+ def unary_- = Lit(sym, !pos)
+ }
+
+ def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
+
+ // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
+ val EmptyModel = Map.empty[Sym, Boolean]
+ val NoModel: Model = null
+
+ // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
+ def findAllModelsFor(f: Formula): List[Model] = {
+ val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet
+ // debug.patmat("vars "+ vars)
+ // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
+ def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
+
+ def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
+ if (recursionDepthAllowed == 0) models
+ else {
+ debug.patmat("find all models for\n"+ cnfString(f))
+ val model = findModelFor(f)
+ // if we found a solution, conjunct the formula with the model's negation and recurse
+ if (model ne NoModel) {
+ val unassigned = (vars -- model.keySet).toList
+ debug.patmat("unassigned "+ unassigned +" in "+ model)
+ def force(lit: Lit) = {
+ val model = withLit(findModelFor(dropUnit(f, lit)), lit)
+ if (model ne NoModel) List(model)
+ else Nil
+ }
+ val forced = unassigned flatMap { s =>
+ force(Lit(s, true)) ++ force(Lit(s, false))
+ }
+ debug.patmat("forced "+ forced)
+ val negated = negateModel(model)
+ findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
+ }
+ else models
+ }
+
+ findAllModels(f, Nil)
+ }
+
+ private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
+ private def dropUnit(f: Formula, unitLit: Lit): Formula = {
+ val negated = -unitLit
+ // drop entire clauses that are trivially true
+ // (i.e., disjunctions that contain the literal we're making true in the returned model),
+ // and simplify clauses by dropping the negation of the literal we're making true
+ // (since False \/ X == X)
+ val dropped = formulaBuilderSized(f.size)
+ for {
+ clause <- f
+ if !(clause contains unitLit)
+ } dropped += (clause - negated)
+ dropped
+ }
+
+ def findModelFor(f: Formula): Model = {
+ @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
+
+ debug.patmat("DPLL\n"+ cnfString(f))
+
+ val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null
+
+ val satisfiableWithModel: Model =
+ if (f isEmpty) EmptyModel
+ else if(f exists (_.isEmpty)) NoModel
+ else f.find(_.size == 1) match {
+ case Some(unitClause) =>
+ val unitLit = unitClause.head
+ // debug.patmat("unit: "+ unitLit)
+ withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
+ case _ =>
+ // partition symbols according to whether they appear in positive and/or negative literals
+ val pos = new mutable.HashSet[Sym]()
+ val neg = new mutable.HashSet[Sym]()
+ f.foreach{_.foreach{ lit =>
+ if (lit.pos) pos += lit.sym else neg += lit.sym
+ }}
+ // appearing in both positive and negative
+ val impures = pos intersect neg
+ // appearing only in either positive/negative positions
+ val pures = (pos ++ neg) -- impures
+
+ if (pures nonEmpty) {
+ val pureSym = pures.head
+ // turn it back into a literal
+ // (since equality on literals is in terms of equality
+ // of the underlying symbol and its positivity, simply construct a new Lit)
+ val pureLit = Lit(pureSym, pos(pureSym))
+ // debug.patmat("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
+ val simplified = f.filterNot(_.contains(pureLit))
+ withLit(findModelFor(simplified), pureLit)
+ } else {
+ val split = f.head.head
+ // debug.patmat("split: "+ split)
+ orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split)))
+ }
+ }
+
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
+
+ satisfiableWithModel
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 4bf7f78167..49049f110d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -589,6 +589,10 @@ trait ContextErrors {
setError(tree)
}
+ // typedPattern
+ def PatternMustBeValue(pat: Tree, pt: Type) =
+ issueNormalTypeError(pat, s"pattern must be a value: $pat"+ typePatternAdvice(pat.tpe.typeSymbol, pt.typeSymbol))
+
// SelectFromTypeTree
def TypeSelectionFromVolatileTypeError(tree: Tree, qual: Tree) = {
val hiBound = qual.tpe.bounds.hi
@@ -920,33 +924,10 @@ trait ContextErrors {
def IncompatibleScrutineeTypeError(tree: Tree, pattp: Type, pt: Type) =
issueNormalTypeError(tree, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt))
- def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) = {
- def errMsg = {
- val sym = pat.tpe.typeSymbol
- val clazz = sym.companionClass
- val addendum = (
- if (sym.isModuleClass && clazz.isCaseClass && (clazz isSubClass pt1.typeSymbol)) {
- // TODO: move these somewhere reusable.
- val typeString = clazz.typeParams match {
- case Nil => "" + clazz.name
- case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
- }
- val caseString = (
- clazz.caseFieldAccessors
- map (_ => "_") // could use the actual param names here
- mkString (clazz.name + "(", ",", ")")
- )
- (
- "\nNote: if you intended to match against the class, try `case _: " +
- typeString + "` or `case " + caseString + "`"
- )
- }
- else ""
- )
- "pattern type is incompatible with expected type"+foundReqMsg(pat.tpe, pt) + addendum
- }
- issueNormalTypeError(pat, errMsg)
- }
+ def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) =
+ issueNormalTypeError(pat,
+ "pattern type is incompatible with expected type"+ foundReqMsg(pat.tpe, pt) +
+ typePatternAdvice(pat.tpe.typeSymbol, pt1.typeSymbol))
def PolyAlternativeError(tree: Tree, argtypes: List[Type], sym: Symbol, err: PolyAlternativeErrorKind.ErrorType) = {
import PolyAlternativeErrorKind._
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 620665126e..6c2945cad3 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -84,6 +84,8 @@ trait Contexts { self: Analyzer =>
case Import(qual, _) => qual.tpe = singleType(qual.symbol.owner.thisType, qual.symbol)
case _ =>
}
+ sc.flushAndReturnBuffer()
+ sc.flushAndReturnWarningsBuffer()
sc = sc.outer
}
}
@@ -644,7 +646,14 @@ trait Contexts { self: Analyzer =>
new ImplicitInfo(sym.name, pre, sym)
private def collectImplicitImports(imp: ImportInfo): List[ImplicitInfo] = {
- val pre = imp.qual.tpe
+ val qual = imp.qual
+
+ val pre =
+ if (qual.tpe.typeSymbol.isPackageClass)
+ // SI-6225 important if the imported symbol is inherited by the the package object.
+ singleType(qual.tpe, qual.tpe member nme.PACKAGE)
+ else
+ qual.tpe
def collect(sels: List[ImportSelector]): List[ImplicitInfo] = sels match {
case List() =>
List()
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index bbba7e0435..57b9dfe3e4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -101,6 +101,7 @@ trait EtaExpansion { self: Analyzer =>
case TypeApply(fn, args) =>
treeCopy.TypeApply(tree, liftoutPrefix(fn), args) setType null
case Select(qual, name) =>
+ val name = tree.symbol.name // account for renamed imports, SI-7233
treeCopy.Select(tree, liftout(qual, false), name) setSymbol NoSymbol setType null
case Ident(name) =>
tree
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 3bea049c59..04e0b9d653 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -149,7 +149,7 @@ trait Implicits {
class SearchResult(val tree: Tree, val subst: TreeTypeSubstituter) {
override def toString = "SearchResult(%s, %s)".format(tree,
if (subst.isEmpty) "" else subst)
-
+
def isFailure = false
def isAmbiguousFailure = false
final def isSuccess = !isFailure
@@ -158,7 +158,7 @@ trait Implicits {
lazy val SearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
}
-
+
lazy val AmbiguousSearchFailure = new SearchResult(EmptyTree, EmptyTreeTypeSubstituter) {
override def isFailure = true
override def isAmbiguousFailure = true
@@ -892,11 +892,20 @@ trait Implicits {
*/
if (divergence)
throw DivergentImplicit
-
- if (invalidImplicits.nonEmpty)
+ else invalidImplicits take 1 foreach { sym =>
+ def isSensibleAddendum = pt match {
+ case Function1(_, out) => out <:< sym.tpe.finalResultType
+ case tp => tp <:< sym.tpe.finalResultType
+ case _ => false
+ }
+ // Don't pitch in with this theory unless it looks plausible that the
+ // implicit would have helped
setAddendum(pos, () =>
- "\n Note: implicit "+invalidImplicits.head+" is not applicable here"+
- " because it comes after the application point and it lacks an explicit result type")
+ if (isSensibleAddendum)
+ s"\n Note: implicit $sym is not applicable here because it comes after the application point and it lacks an explicit result type"
+ else ""
+ )
+ }
}
best
@@ -1132,7 +1141,7 @@ trait Implicits {
)
// todo. migrate hardcoded materialization in Implicits to corresponding implicit macros
var materializer = atPos(pos.focus)(gen.mkMethodCall(TagMaterializers(tagClass), List(tp), if (prefix != EmptyTree) List(prefix) else List()))
- if (settings.XlogImplicits.value) println("materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
+ if (settings.XlogImplicits.value) reporter.echo(pos, "materializing requested %s.%s[%s] using %s".format(pre, tagClass.name, tp, materializer))
if (context.macrosEnabled) success(materializer)
// don't call `failure` here. if macros are disabled, we just fail silently
// otherwise -Xlog-implicits will spam the long with zillions of "macros are disabled"
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 7161043dcf..55e0a954f0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -55,6 +55,10 @@ trait Infer extends Checkable {
*
* `formals` are the formal types before expanding a potential repeated parameter (must come last in `formals`, if at all)
*
+ * @param nbSubPats The number of arguments to the extractor pattern
+ * @param effectiveNbSubPats `nbSubPats`, unless there is one sub-pattern which, after unwrapping
+ * bind patterns, is a Tuple pattern, in which case it is the number of
+ * elements. Used to issue warnings about binding a `TupleN` to a single value.
* @throws TypeError when the unapply[Seq] definition is ill-typed
* @returns (null, null) when the expected number of sub-patterns cannot be satisfied by the given extractor
*
@@ -84,7 +88,8 @@ trait Infer extends Checkable {
* `T_1, ..., T_m, U, ..., U`. Here, `U` is repeated `n-m` times.
*
*/
- def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int, unappSym: Symbol): (List[Type], List[Type]) = {
+ def extractorFormalTypes(pos: Position, resTp: Type, nbSubPats: Int,
+ unappSym: Symbol, effectiveNbSubPats: Int): (List[Type], List[Type]) = {
val isUnapplySeq = unappSym.name == nme.unapplySeq
val booleanExtractor = resTp.typeSymbolDirect == BooleanClass
@@ -114,8 +119,9 @@ trait Infer extends Checkable {
else if (optionArgs.nonEmpty)
if (nbSubPats == 1) {
val productArity = productArgs.size
- if (productArity > 1 && settings.lint.value)
- global.currentUnit.warning(pos, s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
+ if (settings.lint.value && productArity > 1 && productArity != effectiveNbSubPats)
+ global.currentUnit.warning(pos,
+ s"extractor pattern binds a single value to a Product${productArity} of type ${optionArgs.head}")
optionArgs
}
// TODO: update spec to reflect we allow any ProductN, not just TupleN
@@ -286,7 +292,11 @@ trait Infer extends Checkable {
// println("set error: "+tree);
// throw new Error()
// }
- def name = newTermName("<error: " + tree.symbol + ">")
+ def name = {
+ val sym = tree.symbol
+ val nameStr = try sym.toString catch { case _: CyclicReference => sym.nameString }
+ newTermName(s"<error: $nameStr>")
+ }
def errorClass = if (context.reportErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass
def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue
def errorSym = if (tree.isType) errorClass else errorValue
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 245656e2d7..0ba30ffa73 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -333,9 +333,7 @@ trait Macros extends scala.tools.reflect.FastTrack with Traces {
def param(tree: Tree): Symbol =
paramCache.getOrElseUpdate(tree.symbol, {
val sym = tree.symbol
- val sigParam = makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe))
- if (sym.isSynthetic) sigParam.flags |= SYNTHETIC
- sigParam
+ makeParam(sym.name, sym.pos, implType(sym.isType, sym.tpe), sym.flags)
})
val paramss = List(ctxParam) :: mmap(vparamss)(param)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 7a3ab00578..379f56521b 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -516,7 +516,13 @@ trait Namers extends MethodSynthesis {
// Setting the position at the import means that if there is
// more than one hidden name, the second will not be warned.
// So it is the position of the actual hidden name.
- checkNotRedundant(tree.pos withPoint fromPos, from, to)
+ //
+ // Note: java imports have precence over definitions in the same package
+ // so don't warn for them. There is a corresponding special treatment
+ // in the shadowing rules in typedIdent to (SI-7232). In any case,
+ // we shouldn't be emitting warnings for .java source files.
+ if (!context.unit.isJava)
+ checkNotRedundant(tree.pos withPoint fromPos, from, to)
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 2340c78f8c..f3736f1519 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -273,22 +273,25 @@ trait NamesDefaults { self: Analyzer =>
*/
def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[Option[ValDef]] = {
val context = blockTyper.context
- val symPs = map2(args, paramTypes)((arg, tpe) => arg match {
+ val symPs = map2(args, paramTypes)((arg, paramTpe) => arg match {
case Ident(nme.SELECTOR_DUMMY) =>
None // don't create a local ValDef if the argument is <unapply-selector>
case _ =>
- val byName = isByNameParamType(tpe)
- val repeated = isScalaRepeatedParamType(tpe)
+ val byName = isByNameParamType(paramTpe)
+ val repeated = isScalaRepeatedParamType(paramTpe)
val argTpe = (
if (repeated) arg match {
case Typed(expr, Ident(tpnme.WILDCARD_STAR)) => expr.tpe
case _ => seqType(arg.tpe)
}
- else arg.tpe
- ).widen // have to widen or types inferred from literal defaults will be singletons
+ else
+ // Note stabilizing can lead to a non-conformant argument when existentials are involved, e.g. neg/t3507-old.scala, hence the filter.
+ // We have to deconst or types inferred from literal arguments will be Constant(_), e.g. pos/z1730.scala.
+ gen.stableTypeFor(arg).filter(_ <:< paramTpe).getOrElse(arg.tpe).deconst
+ )
val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos) setInfo (
- if (byName) functionType(Nil, argTpe) else argTpe
- )
+ if (byName) functionType(Nil, argTpe) else argTpe
+ )
Some((context.scope.enter(s), byName, repeated))
})
map2(symPs, args) {
@@ -329,11 +332,10 @@ trait NamesDefaults { self: Analyzer =>
// type the application without names; put the arguments in definition-site order
val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt)
- if (typedApp.isErrorTyped) tree
- else typedApp match {
+ typedApp match {
// Extract the typed arguments, restore the call-site evaluation order (using
// ValDef's in the block), change the arguments to these local values.
- case Apply(expr, typedArgs) =>
+ case Apply(expr, typedArgs) if !(typedApp :: typedArgs).exists(_.isErrorTyped) => // bail out with erroneous args, see SI-7238
// typedArgs: definition-site order
val formals = formalTypes(expr.tpe.paramTypes, typedArgs.length, removeByName = false, removeRepeated = false)
// valDefs: call-site order
@@ -361,6 +363,7 @@ trait NamesDefaults { self: Analyzer =>
context.namedApplyBlockInfo =
Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)))
block
+ case _ => tree
}
}
@@ -499,7 +502,7 @@ trait NamesDefaults { self: Analyzer =>
// disable conforms as a view...
val errsBefore = reporter.ERROR.count
try typer.silent { tpr =>
- val res = tpr.typed(arg, subst(paramtpe))
+ val res = tpr.typed(arg.duplicate, subst(paramtpe))
// better warning for SI-5044: if `silent` was not actually silent give a hint to the user
// [H]: the reason why `silent` is not silent is because the cyclic reference exception is
// thrown in a context completely different from `context` here. The exception happens while
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala b/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
deleted file mode 100644
index 87fe3fb3f6..0000000000
--- a/src/compiler/scala/tools/nsc/typechecker/PatternMatching.scala
+++ /dev/null
@@ -1,3876 +0,0 @@
-/* NSC -- new Scala compiler
- *
- * Copyright 2011-2013 LAMP/EPFL
- * @author Adriaan Moors
- */
-
-package scala.tools.nsc
-package typechecker
-
-import symtab._
-import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC, ARTIFACT}
-import scala.language.postfixOps
-import scala.tools.nsc.transform.TypingTransformers
-import scala.tools.nsc.transform.Transform
-import scala.collection.mutable.HashSet
-import scala.collection.mutable.HashMap
-import scala.reflect.internal.util.Statistics
-import scala.reflect.internal.Types
-
-/** Translate pattern matching.
- *
- * Either into optimized if/then/else's,
- * or virtualized as method calls (these methods form a zero-plus monad), similar in spirit to how for-comprehensions are compiled.
- *
- * For each case, express all patterns as extractor calls, guards as 0-ary extractors, and sequence them using `flatMap`
- * (lifting the body of the case into the monad using `one`).
- *
- * Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`).
- *
- * TODO:
- * - DCE (on irrefutable patterns)
- * - update spec and double check it's implemented correctly (see TODO's)
- *
- * (longer-term) TODO:
- * - user-defined unapplyProd
- * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?)
- * - recover exhaustivity/unreachability of user-defined extractors by partitioning the types they match on using an HList or similar type-level structure
- */
-trait PatternMatching extends Transform with TypingTransformers with ast.TreeDSL { // self: Analyzer =>
- import Statistics._
- import PatternMatchingStats._
-
- val global: Global // need to repeat here because otherwise last mixin defines global as
- // SymbolTable. If we had DOT this would not be an issue
- import global._ // the global environment
- import definitions._ // standard classes and methods
-
- val phaseName: String = "patmat"
-
- // TODO: the inliner fails to inline the closures to patmatDebug
- object debugging {
- val printPatmat = settings.Ypatmatdebug.value
- @inline final def patmatDebug(s: => String) = if (printPatmat) println(s)
- }
- import debugging.patmatDebug
-
- // to govern how much time we spend analyzing matches for unreachability/exhaustivity
- object AnalysisBudget {
- import scala.tools.cmd.FromString.IntFromString
- val max = sys.props.get("scalac.patmat.analysisBudget").collect(IntFromString.orElse{case "off" => Integer.MAX_VALUE}).getOrElse(256)
-
- abstract class Exception extends RuntimeException("CNF budget exceeded") {
- val advice: String
- def warn(pos: Position, kind: String) = currentUnit.uncheckedWarning(pos, s"Cannot check match for $kind.\n$advice")
- }
-
- object exceeded extends Exception {
- val advice = s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)"
- }
- }
-
- def newTransformer(unit: CompilationUnit): Transformer =
- if (opt.virtPatmat) new MatchTransformer(unit)
- else noopTransformer
-
- // duplicated from CPSUtils (avoid dependency from compiler -> cps plugin...)
- private lazy val MarkerCPSAdaptPlus = rootMirror.getClassIfDefined("scala.util.continuations.cpsPlus")
- private lazy val MarkerCPSAdaptMinus = rootMirror.getClassIfDefined("scala.util.continuations.cpsMinus")
- private lazy val MarkerCPSSynth = rootMirror.getClassIfDefined("scala.util.continuations.cpsSynth")
- private lazy val stripTriggerCPSAnns = List(MarkerCPSSynth, MarkerCPSAdaptMinus, MarkerCPSAdaptPlus)
- private lazy val MarkerCPSTypes = rootMirror.getClassIfDefined("scala.util.continuations.cpsParam")
- private lazy val strippedCPSAnns = MarkerCPSTypes :: stripTriggerCPSAnns
- private def removeCPSAdaptAnnotations(tp: Type) = tp filterAnnotations (ann => !(strippedCPSAnns exists (ann matches _)))
-
- class MatchTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
- override def transform(tree: Tree): Tree = tree match {
- case Match(sel, cases) =>
- val origTp = tree.tpe
- // setType origTp intended for CPS -- TODO: is it necessary?
- val translated = translator.translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]]))
- try {
- localTyper.typed(translated) setType origTp
- } catch {
- case x: (Types#TypeError) =>
- // TODO: this should never happen; error should've been reported during type checking
- unit.error(tree.pos, "error during expansion of this match (this is a scalac bug).\nThe underlying error was: "+ x.msg)
- translated
- }
- case Try(block, catches, finalizer) =>
- treeCopy.Try(tree, transform(block), translator.translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer))
- case _ => super.transform(tree)
- }
-
- def translator: MatchTranslation with CodegenCore = {
- new OptimizingMatchTranslator(localTyper)
- }
- }
-
- import definitions._
- import analyzer._ //Typer
-
- object vpmName {
- val one = newTermName("one")
- val drop = newTermName("drop")
- val flatMap = newTermName("flatMap")
- val get = newTermName("get")
- val guard = newTermName("guard")
- val isEmpty = newTermName("isEmpty")
- val orElse = newTermName("orElse")
- val outer = newTermName("<outer>")
- val runOrElse = newTermName("runOrElse")
- val zero = newTermName("zero")
- val _match = newTermName("__match") // don't call the val __match, since that will trigger virtual pattern matching...
-
- def counted(str: String, i: Int) = newTermName(str+i)
- }
-
- class PureMatchTranslator(val typer: Typer, val matchStrategy: Tree) extends MatchTranslation with TreeMakers with PureCodegen
- class OptimizingMatchTranslator(val typer: Typer) extends MatchTranslation with TreeMakers with MatchOptimizations
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// talking to userland
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- /** Interface with user-defined match monad?
- * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below:
-
- type Matcher[P[_], M[+_], A] = {
- def flatMap[B](f: P[A] => M[B]): M[B]
- def orElse[B >: A](alternative: => M[B]): M[B]
- }
-
- abstract class MatchStrategy[P[_], M[+_]] {
- // runs the matcher on the given input
- def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U]
-
- def zero: M[Nothing]
- def one[T](x: P[T]): M[T]
- def guard[T](cond: P[Boolean], then: => P[T]): M[T]
- }
-
- * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`)
-
-
- * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly)
-
- object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] {
- def zero = None
- def one[T](x: T) = Some(x)
- // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted
- def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None
- def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x))
- }
-
- */
- trait MatchMonadInterface {
- val typer: Typer
- val matchOwner = typer.context.owner
-
- def reportUnreachable(pos: Position) = typer.context.unit.warning(pos, "unreachable code")
- def reportMissingCases(pos: Position, counterExamples: List[String]) = {
- val ceString =
- if (counterExamples.tail.isEmpty) "input: " + counterExamples.head
- else "inputs: " + counterExamples.mkString(", ")
-
- typer.context.unit.warning(pos, "match may not be exhaustive.\nIt would fail on the following "+ ceString)
- }
-
- def inMatchMonad(tp: Type): Type
- def pureType(tp: Type): Type
- final def matchMonadResult(tp: Type): Type =
- tp.baseType(matchMonadSym).typeArgs match {
- case arg :: Nil => arg
- case _ => ErrorType
- }
-
- protected def matchMonadSym: Symbol
- }
-
- trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
- import typer.{typed, context, silent, reallyExists}
- // import typer.infer.containsUnchecked
-
- // Why is it so difficult to say "here's a name and a context, give me any
- // matching symbol in scope" ? I am sure this code is wrong, but attempts to
- // use the scopes of the contexts in the enclosing context chain discover
- // nothing. How to associate a name with a symbol would would be a wonderful
- // linkage for which to establish a canonical acquisition mechanism.
- def matchingSymbolInScope(pat: Tree): Symbol = {
- def declarationOfName(tpe: Type, name: Name): Symbol = tpe match {
- case PolyType(tparams, restpe) => tparams find (_.name == name) getOrElse declarationOfName(restpe, name)
- case MethodType(params, restpe) => params find (_.name == name) getOrElse declarationOfName(restpe, name)
- case ClassInfoType(_, _, clazz) => clazz.rawInfo member name
- case _ => NoSymbol
- }
- pat match {
- case Bind(name, _) =>
- context.enclosingContextChain.foldLeft(NoSymbol: Symbol)((res, ctx) =>
- res orElse declarationOfName(ctx.owner.rawInfo, name))
- case _ => NoSymbol
- }
- }
-
- // Issue better warnings than "unreachable code" when people mis-use
- // variable patterns thinking they bind to existing identifiers.
- //
- // Possible TODO: more deeply nested variable patterns, like
- // case (a, b) => 1 ; case (c, d) => 2
- // However this is a pain (at least the way I'm going about it)
- // and I have to think these detailed errors are primarily useful
- // for beginners, not people writing nested pattern matches.
- def checkMatchVariablePatterns(cases: List[CaseDef]) {
- // A string describing the first variable pattern
- var vpat: String = null
- // Using an iterator so we can recognize the last case
- val it = cases.iterator
-
- def addendum(pat: Tree) = {
- matchingSymbolInScope(pat) match {
- case NoSymbol => ""
- case sym =>
- val desc = if (sym.isParameter) s"parameter ${sym.nameString} of" else sym + " in"
- s"\nIf you intended to match against $desc ${sym.owner}, you must use backticks, like: case `${sym.nameString}` =>"
- }
- }
-
- while (it.hasNext) {
- val cdef = it.next
- // If a default case has been seen, then every succeeding case is unreachable.
- if (vpat != null)
- context.unit./*error*/warning(cdef.body.pos, "unreachable code due to " + vpat + addendum(cdef.pat))
- // If this is a default case and more cases follow, warn about this one so
- // we have a reason to mention its pattern variable name and any corresponding
- // symbol in scope. Errors will follow from the remaining cases, at least
- // once we make the above warning an error.
- else if (it.hasNext && (treeInfo isDefaultCase cdef)) {
- val vpatName = cdef.pat match {
- case Bind(name, _) => s" '$name'"
- case _ => ""
- }
- vpat = s"variable pattern$vpatName on line ${cdef.pat.pos.line}"
- context.unit.warning(cdef.pos, s"patterns after a variable pattern cannot match (SLS 8.1.1)" + addendum(cdef.pat))
- }
- }
- }
-
- /** Implement a pattern match by turning its cases (including the implicit failure case)
- * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
- *
- * For `scrutinee match { case1 ... caseN }`, the resulting tree has the shape
- * `runOrElse(scrutinee)(x => translateCase1(x).orElse(translateCase2(x)).....orElse(zero))`
- *
- * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
- * thus, you must typecheck the result (and that will in turn translate nested matches)
- * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
- */
- def translateMatch(match_ : Match): Tree = {
- val Match(selector, cases) = match_
-
- val (nonSyntheticCases, defaultOverride) = cases match {
- case init :+ last if treeInfo isSyntheticDefaultCase last =>
- (init, Some(((scrut: Tree) => last.body)))
- case _ =>
- (cases, None)
- }
-
- checkMatchVariablePatterns(nonSyntheticCases)
-
- // we don't transform after uncurry
- // (that would require more sophistication when generating trees,
- // and the only place that emits Matches after typers is for exception handling anyway)
- if(phase.id >= currentRun.uncurryPhase.id) debugwarn("running translateMatch at "+ phase +" on "+ selector +" match "+ cases)
- patmatDebug("translating "+ cases.mkString("{", "\n", "}"))
-
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null
-
- val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations))
-
- val origPt = match_.tpe
- // when one of the internal cps-type-state annotations is present, strip all CPS annotations
- // a cps-type-state-annotated type makes no sense as an expected type (matchX.tpe is used as pt in translateMatch)
- // (only test availability of MarkerCPSAdaptPlus assuming they are either all available or none of them are)
- val ptUnCPS =
- if (MarkerCPSAdaptPlus != NoSymbol && (stripTriggerCPSAnns exists origPt.hasAnnotation))
- removeCPSAdaptAnnotations(origPt)
- else origPt
-
- // relevant test cases: pos/existentials-harmful.scala, pos/gadt-gilles.scala, pos/t2683.scala, pos/virtpatmat_exist4.scala
- // pt is the skolemized version
- val pt = repeatedToSeq(ptUnCPS)
-
- // val packedPt = repeatedToSeq(typer.packedType(match_, context.owner))
-
- val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS
-
- // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride)
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start)
- combined
- }
-
- // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
- // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
- // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
- // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
- // unlike translateMatch, we type our result before returning it
- def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
- // if they're already simple enough to be handled by the back-end, we're done
- if (caseDefs forall treeInfo.isCatchCase) caseDefs
- else {
- val swatches = { // switch-catches
- val bindersAndCases = caseDefs map { caseDef =>
- // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
- // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
- val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
- }
-
- for(cases <- emitTypeSwitch(bindersAndCases, pt).toList;
- if cases forall treeInfo.isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
- cse <- cases) yield fixerUpper(matchOwner, pos)(cse).asInstanceOf[CaseDef]
- }
-
- val catches = if (swatches.nonEmpty) swatches else {
- val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
-
- val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
-
- List(
- atPos(pos) {
- CaseDef(
- Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
- EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
- )
- })
- }
-
- typer.typedCases(catches, ThrowableClass.tpe, WildcardType)
- }
-
-
-
- /** The translation of `pat if guard => body` has two aspects:
- * 1) the substitution due to the variables bound by patterns
- * 2) the combination of the extractor calls using `flatMap`.
- *
- * 2) is easy -- it looks like: `translatePattern_1.flatMap(translatePattern_2....flatMap(translatePattern_N.flatMap(translateGuard.flatMap((x_i) => success(Xbody(x_i)))))...)`
- * this must be right-leaning tree, as can be seen intuitively by considering the scope of bound variables:
- * variables bound by pat_1 must be visible from the function inside the left-most flatMap right up to Xbody all the way on the right
- * 1) is tricky because translatePattern_i determines the shape of translatePattern_i+1:
- * zoom in on `translatePattern_1.flatMap(translatePattern_2)` for example -- it actually looks more like:
- * `translatePattern_1(x_scrut).flatMap((x_1) => {y_i -> x_1._i}translatePattern_2)`
- *
- * `x_1` references the result (inside the monad) of the extractor corresponding to `pat_1`,
- * this result holds the values for the constructor arguments, which translatePattern_1 has extracted
- * from the object pointed to by `x_scrut`. The `y_i` are the symbols bound by `pat_1` (in order)
- * in the scope of the remainder of the pattern, and they must thus be replaced by:
- * - (for 1-ary unapply) x_1
- * - (for n-ary unapply, n > 1) selection of the i'th tuple component of `x_1`
- * - (for unapplySeq) x_1.apply(i)
- *
- * in the treemakers,
- *
- * Thus, the result type of `translatePattern_i`'s extractor must conform to `M[(T_1,..., T_n)]`.
- *
- * Operationally, phase 1) is a foldLeft, since we must consider the depth-first-flattening of
- * the transformed patterns from left to right. For every pattern ast node, it produces a transformed ast and
- * a function that will take care of binding and substitution of the next ast (to the right).
- *
- */
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = caseDef match { case CaseDef(pattern, guard, body) =>
- translatePattern(scrutSym, pattern) ++ translateGuard(guard) :+ translateBody(body, pt)
- }
-
- def translatePattern(patBinder: Symbol, patTree: Tree): List[TreeMaker] = {
- // a list of TreeMakers that encode `patTree`, and a list of arguments for recursive invocations of `translatePattern` to encode its subpatterns
- type TranslationStep = (List[TreeMaker], List[(Symbol, Tree)])
- def withSubPats(treeMakers: List[TreeMaker], subpats: (Symbol, Tree)*): TranslationStep = (treeMakers, subpats.toList)
- def noFurtherSubPats(treeMakers: TreeMaker*): TranslationStep = (treeMakers.toList, Nil)
-
- val pos = patTree.pos
-
- def translateExtractorPattern(extractor: ExtractorCall): TranslationStep = {
- if (!extractor.isTyped) ErrorUtils.issueNormalTypeError(patTree, "Could not typecheck extractor call: "+ extractor)(context)
- // if (extractor.resultInMonad == ErrorType) throw new TypeError(pos, "Unsupported extractor type: "+ extractor.tpe)
-
- patmatDebug("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType))
-
- // must use type `tp`, which is provided by extractor's result, not the type expected by binder,
- // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation
- // (it will later result in a type test when `tp` is not a subtype of `b.info`)
- // TODO: can we simplify this, together with the Bound case?
- (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) =>
- patmatDebug("changing "+ b +" : "+ b.info +" -> "+ tp)
- b setInfo tp
- }
-
- // example check: List[Int] <:< ::[Int]
- // TODO: extractor.paramType may contain unbound type params (run/t2800, run/t3530)
- // `patBinderOrCasted` is assigned the result of casting `patBinder` to `extractor.paramType`
- val (typeTestTreeMaker, patBinderOrCasted, binderKnownNonNull) =
- if (patBinder.info.widen <:< extractor.paramType) {
- // no type test needed, but the tree maker relies on `patBinderOrCasted` having type `extractor.paramType` (and not just some type compatible with it)
- // SI-6624 shows this is necessary because apparently patBinder may have an unfortunate type (.decls don't have the case field accessors)
- // TODO: get to the bottom of this -- I assume it happens when type checking infers a weird type for an unapply call
- // by going back to the parameterType for the extractor call we get a saner type, so let's just do that for now
- /* TODO: uncomment when `settings.developer` and `devWarning` become available
- if (settings.developer.value && !(patBinder.info =:= extractor.paramType))
- devWarning(s"resetting info of $patBinder: ${patBinder.info} to ${extractor.paramType}")
- */
- (Nil, patBinder setInfo extractor.paramType, false)
- } else {
- // chain a type-testing extractor before the actual extractor call
- // it tests the type, checks the outer pointer and casts to the expected type
- // TODO: the outer check is mandated by the spec for case classes, but we do it for user-defined unapplies as well [SPEC]
- // (the prefix of the argument passed to the unapply must equal the prefix of the type of the binder)
- val treeMaker = TypeTestTreeMaker(patBinder, patBinder, extractor.paramType, extractor.paramType)(pos, extractorArgTypeTest = true)
-
- // check whether typetest implies patBinder is not null,
- // even though the eventual null check will be on patBinderOrCasted
- // it'll be equal to patBinder casted to extractor.paramType anyway (and the type test is on patBinder)
- (List(treeMaker), treeMaker.nextBinder, treeMaker.impliesBinderNonNull(patBinder))
- }
-
- withSubPats(typeTestTreeMaker :+ extractor.treeMaker(patBinderOrCasted, binderKnownNonNull, pos), extractor.subBindersAndPatterns: _*)
- }
-
-
- object MaybeBoundTyped {
- /** Decompose the pattern in `tree`, of shape C(p_1, ..., p_N), into a list of N symbols, and a list of its N sub-trees
- * The list of N symbols contains symbols for every bound name as well as the un-named sub-patterns (fresh symbols are generated here for these).
- * The returned type is the one inferred by inferTypedPattern (`owntype`)
- *
- * @arg patBinder symbol used to refer to the result of the previous pattern's extractor (will later be replaced by the outer tree with the correct tree to refer to that patterns result)
- */
- def unapply(tree: Tree): Option[(Symbol, Type)] = tree match {
- // the Ident subpattern can be ignored, subpatBinder or patBinder tell us all we need to know about it
- case Bound(subpatBinder, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((subpatBinder, typed.tpe))
- case Bind(_, typed@Typed(Ident(_), tpt)) if typed.tpe ne null => Some((patBinder, typed.tpe))
- case Typed(Ident(_), tpt) if tree.tpe ne null => Some((patBinder, tree.tpe))
- case _ => None
- }
- }
-
- val (treeMakers, subpats) = patTree match {
- // skip wildcard trees -- no point in checking them
- case WildcardPattern() => noFurtherSubPats()
- case UnApply(unfun, args) =>
- // TODO: check unargs == args
- // patmatDebug("unfun: "+ (unfun.tpe, unfun.symbol.ownerChain, unfun.symbol.info, patBinder.info))
- translateExtractorPattern(ExtractorCall(unfun, args))
-
- /** A constructor pattern is of the form c(p1, ..., pn) where n ≥ 0.
- It consists of a stable identifier c, followed by element patterns p1, ..., pn.
- The constructor c is a simple or qualified name which denotes a case class (§5.3.2).
-
- If the case class is monomorphic, then it must conform to the expected type of the pattern,
- and the formal parameter types of x’s primary constructor (§5.3) are taken as the expected types of the element patterns p1, ..., pn.
-
- If the case class is polymorphic, then its type parameters are instantiated so that the instantiation of c conforms to the expected type of the pattern.
- The instantiated formal parameter types of c’s primary constructor are then taken as the expected types of the component patterns p1, ..., pn.
-
- The pattern matches all objects created from constructor invocations c(v1, ..., vn) where each element pattern pi matches the corresponding value vi .
- A special case arises when c’s formal parameter types end in a repeated parameter. This is further discussed in (§8.1.9).
- **/
- case Apply(fun, args) =>
- ExtractorCall.fromCaseClass(fun, args) map translateExtractorPattern getOrElse {
- ErrorUtils.issueNormalTypeError(patTree, "Could not find unapply member for "+ fun +" with args "+ args)(context)
- noFurtherSubPats()
- }
-
- /** A typed pattern x : T consists of a pattern variable x and a type pattern T.
- The type of x is the type pattern T, where each type variable and wildcard is replaced by a fresh, unknown type.
- This pattern matches any value matched by the type pattern T (§8.2); it binds the variable name to that value.
- **/
- // must treat Typed and Bind together -- we need to know the patBinder of the Bind pattern to get at the actual type
- case MaybeBoundTyped(subPatBinder, pt) =>
- val next = glb(List(patBinder.info.widen, pt)).normalize
- // a typed pattern never has any subtrees
- noFurtherSubPats(TypeTestTreeMaker(subPatBinder, patBinder, pt, next)(pos))
-
- /** A pattern binder x@p consists of a pattern variable x and a pattern p.
- The type of the variable x is the static type T of the pattern p.
- This pattern matches any value v matched by the pattern p,
- provided the run-time type of v is also an instance of T, <-- TODO! https://issues.scala-lang.org/browse/SI-1503
- and it binds the variable name to that value.
- **/
- case Bound(subpatBinder, p) =>
- // replace subpatBinder by patBinder (as if the Bind was not there)
- withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)),
- // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast
- (patBinder, p)
- )
-
- /** 8.1.4 Literal Patterns
- A literal pattern L matches any value that is equal (in terms of ==) to the literal L.
- The type of L must conform to the expected type of the pattern.
-
- 8.1.5 Stable Identifier Patterns (a stable identifier r (see §3.1))
- The pattern matches any value v such that r == v (§12.1).
- The type of r must conform to the expected type of the pattern.
- **/
- case Literal(Constant(_)) | Ident(_) | Select(_, _) | This(_) =>
- noFurtherSubPats(EqualityTestTreeMaker(patBinder, patTree, pos))
-
- case Alternative(alts) =>
- noFurtherSubPats(AlternativesTreeMaker(patBinder, alts map (translatePattern(patBinder, _)), alts.head.pos))
-
- /* TODO: Paul says about future version: I think this should work, and always intended to implement if I can get away with it.
- case class Foo(x: Int, y: String)
- case class Bar(z: Int)
-
- def f(x: Any) = x match { case Foo(x, _) | Bar(x) => x } // x is lub of course.
- */
-
- case Bind(n, p) => // this happens in certain ill-formed programs, there'll be an error later
- patmatDebug("WARNING: Bind tree with unbound symbol "+ patTree)
- noFurtherSubPats() // there's no symbol -- something's wrong... don't fail here though (or should we?)
-
- // case Star(_) | ArrayValue => error("stone age pattern relics encountered!")
-
- case _ =>
- typer.context.unit.error(patTree.pos, s"unsupported pattern: $patTree (a ${patTree.getClass}).\n This is a scalac bug. Tree diagnostics: ${asCompactDebugString(patTree)}.")
- noFurtherSubPats()
- }
-
- treeMakers ++ subpats.flatMap { case (binder, pat) =>
- translatePattern(binder, pat) // recurse on subpatterns
- }
- }
-
- def translateGuard(guard: Tree): List[TreeMaker] =
- if (guard == EmptyTree) Nil
- else List(GuardTreeMaker(guard))
-
- // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one),
- // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand?
- // to enable this, probably need to move away from Option to a monad specific to pattern-match,
- // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad
- // 2) body.tpe is the type of the body after applying the substitution that represents the solution of GADT type inference
- // need the explicit cast in case our substitutions in the body change the type to something that doesn't take GADT typing into account
- def translateBody(body: Tree, matchPt: Type): TreeMaker =
- BodyTreeMaker(body, matchPt)
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// helper methods: they analyze types and trees in isolation, but they are not (directly) concerned with the structure of the overall translation
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- object ExtractorCall {
- def apply(unfun: Tree, args: List[Tree]): ExtractorCall = new ExtractorCallRegular(unfun, args)
-
- def fromCaseClass(fun: Tree, args: List[Tree]): Option[ExtractorCall] = Some(new ExtractorCallProd(fun, args))
-
- // THE PRINCIPLED SLOW PATH -- NOT USED
- // generate a call to the (synthetically generated) extractor of a case class
- // NOTE: it's an apply, not a select, since in general an extractor call may have multiple argument lists (including an implicit one)
- // that we need to preserve, so we supply the scrutinee as Ident(nme.SELECTOR_DUMMY),
- // and replace that dummy by a reference to the actual binder in translateExtractorPattern
- def fromCaseClassUnapply(fun: Tree, args: List[Tree]): Option[ExtractorCall] = {
- // TODO: can we rework the typer so we don't have to do all this twice?
- // undo rewrite performed in (5) of adapt
- val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- val origSym = orig.symbol
- val extractor = unapplyMember(origSym.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe)
-
- if((fun.tpe eq null) || fun.tpe.isError || (extractor eq NoSymbol)) {
- None
- } else {
- // this is a tricky balance: pos/t602.scala, pos/sudoku.scala, run/virtpatmat_alts.scala must all be happy
- // bypass typing at own risk: val extractorCall = Select(orig, extractor) setType caseClassApplyToUnapplyTp(fun.tpe)
- // can't always infer type arguments (pos/t602):
- /* case class Span[K <: Ordered[K]](low: Option[K]) {
- override def equals(x: Any): Boolean = x match {
- case Span((low0 @ _)) if low0 equals low => true
- }
- }*/
- // so... leave undetermined type params floating around if we have to
- // (if we don't infer types, uninstantiated type params show up later: pos/sudoku.scala)
- // (see also run/virtpatmat_alts.scala)
- val savedUndets = context.undetparams
- val extractorCall = try {
- context.undetparams = Nil
- silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
- case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError()
- case _ =>
- // this fails to resolve overloading properly...
- // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
-
- // patmatDebug("funtpe after = "+ fun.tpe.finalResultType)
- // patmatDebug("orig: "+(orig, orig.tpe))
- val tgt = typed(orig, EXPRmode | QUALmode | POLYmode, HasMember(extractor.name)) // can't specify fun.tpe.finalResultType as the type for the extractor's arg,
- // as it may have been inferred incorrectly (see t602, where it's com.mosol.sl.Span[Any], instead of com.mosol.sl.Span[?K])
- // patmatDebug("tgt = "+ (tgt, tgt.tpe))
- val oper = typed(Select(tgt, extractor.name), EXPRmode | FUNmode | POLYmode | TAPPmode, WildcardType)
- // patmatDebug("oper: "+ (oper, oper.tpe))
- Apply(oper, List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway
- }
- } finally context.undetparams = savedUndets
-
- Some(this(extractorCall, args)) // TODO: simplify spliceApply?
- }
- }
- }
-
- abstract class ExtractorCall(val args: List[Tree]) {
- val nbSubPats = args.length
-
- // everything okay, captain?
- def isTyped : Boolean
-
- def isSeq: Boolean
- lazy val lastIsStar = (nbSubPats > 0) && treeInfo.isStar(args.last)
-
- // to which type should the previous binder be casted?
- def paramType : Type
-
- /** Create the TreeMaker that embodies this extractor call
- *
- * `binder` has been casted to `paramType` if necessary
- * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
- * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
- */
- def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker
-
- // `subPatBinders` are the variables bound by this pattern in the following patterns
- // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is)
- lazy val subPatBinders = args map {
- case Bound(b, p) => b
- case p => freshSym(p.pos, prefix = "p")
- }
-
- lazy val subBindersAndPatterns: List[(Symbol, Tree)] = (subPatBinders zip args) map {
- case (b, Bound(_, p)) => (b, p)
- case bp => bp
- }
-
- // never store these in local variables (for PreserveSubPatBinders)
- lazy val ignoredSubPatBinders = (subPatBinders zip args).collect{
- case (b, PatternBoundToUnderscore()) => b
- }.toSet
-
- def subPatTypes: List[Type] =
- if(isSeq) {
- val TypeRef(pre, SeqClass, args) = seqTp
- // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns)
- val formalsWithRepeated = rawSubPatTypes.init :+ typeRef(pre, RepeatedParamClass, args)
-
- if (lastIsStar) formalTypes(formalsWithRepeated, nbSubPats - 1) :+ seqTp
- else formalTypes(formalsWithRepeated, nbSubPats)
- } else rawSubPatTypes
-
- protected def rawSubPatTypes: List[Type]
-
- protected def seqTp = rawSubPatTypes.last baseType SeqClass
- protected def seqLenCmp = rawSubPatTypes.last member nme.lengthCompare
- protected lazy val firstIndexingBinder = rawSubPatTypes.length - 1 // rawSubPatTypes.last is the Seq, thus there are `rawSubPatTypes.length - 1` non-seq elements in the tuple
- protected lazy val lastIndexingBinder = if(lastIsStar) nbSubPats-2 else nbSubPats-1
- protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1
- protected lazy val minLenToCheck = if(lastIsStar) 1 else 0
- protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1)
- protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i)
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require isSeq
- protected def subPatRefsSeq(binder: Symbol): List[Tree] = {
- val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder))
- val nbIndexingIndices = indexingIndices.length
-
- // this error-condition has already been checked by checkStarPatOK:
- // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats))
- // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq
- (((1 to firstIndexingBinder) map tupleSel(binder)) ++
- // then we have to index the binder that represents the sequence for the remaining subpatterns, except for...
- (indexingIndices map codegen.index(seqTree(binder))) ++
- // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder
- (if(!lastIsStar) Nil else List(
- if(nbIndexingIndices == 0) seqTree(binder)
- else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList
- }
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
- protected def subPatRefs(binder: Symbol): List[Tree] =
- if (nbSubPats == 0) Nil
- else if (isSeq) subPatRefsSeq(binder)
- else ((1 to nbSubPats) map tupleSel(binder)).toList
-
- protected def lengthGuard(binder: Symbol): Option[Tree] =
- // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- checkedLength map { expectedLength => import CODE._
- // `binder.lengthCompare(expectedLength)`
- def checkExpectedLength = (seqTree(binder) DOT seqLenCmp)(LIT(expectedLength))
-
- // the comparison to perform
- // when the last subpattern is a wildcard-star the expectedLength is but a lower bound
- // (otherwise equality is required)
- def compareOp: (Tree, Tree) => Tree =
- if (lastIsStar) _ INT_>= _
- else _ INT_== _
-
- // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
- (seqTree(binder) ANY_!= NULL) AND compareOp(checkExpectedLength, ZERO)
- }
-
- def checkedLength: Option[Int] =
- // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied
- if (!isSeq || (expectedLength < minLenToCheck)) None
- else Some(expectedLength)
-
- }
-
- // TODO: to be called when there's a def unapplyProd(x: T): U
- // U must have N members _1,..., _N -- the _i are type checked, call their type Ti,
- //
- // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it)
- class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) {
- // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here:
- /*override def equals(x$1: Any): Boolean = ...
- val o5: Option[com.mosol.sl.Span[Any]] = // Span[Any] --> Any is not a legal type argument for Span!
- */
- // private val orig = fun match {case tpt: TypeTree => tpt.original case _ => fun}
- // private val origExtractorTp = unapplyMember(orig.symbol.filter(sym => reallyExists(unapplyMember(sym.tpe))).tpe).tpe
- // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)
- // patmatDebug("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType)))
- // patmatDebug("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe)))
- private def constructorTp = fun.tpe
-
- def isTyped = fun.isTyped
-
- // to which type should the previous binder be casted?
- def paramType = constructorTp.finalResultType
-
- def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last)
- protected def rawSubPatTypes = constructorTp.paramTypes
-
- /** Create the TreeMaker that embodies this extractor call
- *
- * `binder` has been casted to `paramType` if necessary
- * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null
- * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder
- */
- def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- val paramAccessors = binder.constrParamAccessors
- // binders corresponding to mutable fields should be stored (SI-5158, SI-6070)
- // make an exception for classes under the scala package as they should be well-behaved,
- // to optimize matching on List
- val mutableBinders =
- if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) &&
- (paramAccessors exists (_.isMutable)))
- subPatBinders.zipWithIndex.collect{ case (binder, idx) if paramAccessors(idx).isMutable => binder }
- else Nil
-
- // checks binder ne null before chaining to the next extractor
- ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders)
- }
-
- // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component
- override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._
- val accessors = binder.caseFieldAccessors
- if (accessors isDefinedAt (i-1)) REF(binder) DOT accessors(i-1)
- else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
- }
-
- override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args
- }
-
- class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) {
- private lazy val Some(Apply(extractorCall, _)) = extractorCallIncludingDummy.find{ case Apply(_, List(Ident(nme.SELECTOR_DUMMY))) => true case _ => false }
-
- def tpe = extractorCall.tpe
- def isTyped = (tpe ne NoType) && extractorCall.isTyped && (resultInMonad ne ErrorType)
- def paramType = tpe.paramTypes.head
- def resultType = tpe.finalResultType
- def isSeq = extractorCall.symbol.name == nme.unapplySeq
-
- /** Create the TreeMaker that embodies this extractor call
- *
- * `binder` has been casted to `paramType` if necessary
- * `binderKnownNonNull` is not used in this subclass
- *
- * TODO: implement review feedback by @retronym:
- * Passing the pair of values around suggests:
- * case class Binder(sym: Symbol, knownNotNull: Boolean).
- * Perhaps it hasn't reached critical mass, but it would already clean things up a touch.
- */
- def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = {
- // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern)
- val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted))
- val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type
- ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(subPatBinders, subPatRefs(binder), resultType.typeSymbol == BooleanClass, checkedLength, patBinderOrCasted, ignoredSubPatBinders)
- }
-
- override protected def seqTree(binder: Symbol): Tree =
- if (firstIndexingBinder == 0) CODE.REF(binder)
- else super.seqTree(binder)
-
- // the trees that select the subpatterns on the extractor's result, referenced by `binder`
- // require (nbSubPats > 0 && (!lastIsStar || isSeq))
- override protected def subPatRefs(binder: Symbol): List[Tree] =
- if (!isSeq && nbSubPats == 1) List(CODE.REF(binder)) // special case for extractors
- else super.subPatRefs(binder)
-
- protected def spliceApply(binder: Symbol): Tree = {
- object splice extends Transformer {
- override def transform(t: Tree) = t match {
- case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) =>
- treeCopy.Apply(t, x, List(CODE.REF(binder).setPos(i.pos)))
- case _ => super.transform(t)
- }
- }
- splice.transform(extractorCallIncludingDummy)
- }
-
- // what's the extractor's result type in the monad?
- // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands
- protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else {
- if (resultType.typeSymbol == BooleanClass) UnitClass.tpe
- else matchMonadResult(resultType)
- }
-
- protected lazy val rawSubPatTypes =
- if (resultInMonad.typeSymbol eq UnitClass) Nil
- else if(!isSeq && nbSubPats == 1) List(resultInMonad)
- else getProductArgs(resultInMonad) match {
- case Nil => List(resultInMonad)
- case x => x
- }
-
- override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")."
- }
-
- /** A conservative approximation of which patterns do not discern anything.
- * They are discarded during the translation.
- */
- object WildcardPattern {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
- case Star(WildcardPattern()) => true
- case x: Ident => treeInfo.isVarPattern(x)
- case Alternative(ps) => ps forall (WildcardPattern.unapply(_))
- case EmptyTree => true
- case _ => false
- }
- }
-
- object PatternBoundToUnderscore {
- def unapply(pat: Tree): Boolean = pat match {
- case Bind(nme.WILDCARD, _) => true // don't skip when binding an interesting symbol!
- case Ident(nme.WILDCARD) => true
- case Alternative(ps) => ps forall (PatternBoundToUnderscore.unapply(_))
- case Typed(PatternBoundToUnderscore(), _) => true
- case _ => false
- }
- }
-
- object Bound {
- def unapply(t: Tree): Option[(Symbol, Tree)] = t match {
- case t@Bind(n, p) if (t.symbol ne null) && (t.symbol ne NoSymbol) => // pos/t2429 does not satisfy these conditions
- Some((t.symbol, p))
- case _ => None
- }
- }
- }
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// substitution
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait TypedSubstitution extends MatchMonadInterface {
- object Substitution {
- def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to))
- // requires sameLength(from, to)
- def apply(from: List[Symbol], to: List[Tree]) =
- if (from nonEmpty) new Substitution(from, to) else EmptySubstitution
- }
-
- class Substitution(val from: List[Symbol], val to: List[Tree]) {
- // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed,
- // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees.
- def apply(tree: Tree): Tree = {
- // according to -Ystatistics 10% of translateMatch's time is spent in this method...
- // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst
- if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree
- else (new Transformer {
- private def typedIfOrigTyped(to: Tree, origTp: Type): Tree =
- if (origTp == null || origTp == NoType) to
- // important: only type when actually substing and when original tree was typed
- // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors)
- else typer.typed(to, EXPRmode, WildcardType)
-
- override def transform(tree: Tree): Tree = {
- def subst(from: List[Symbol], to: List[Tree]): Tree =
- if (from.isEmpty) tree
- else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate.setPos(tree.pos), tree.tpe)
- else subst(from.tail, to.tail)
-
- tree match {
- case Ident(_) => subst(from, to)
- case _ => super.transform(tree)
- }
- }
- }).transform(tree)
- }
-
-
- // the substitution that chains `other` before `this` substitution
- // forall t: Tree. this(other(t)) == (this >> other)(t)
- def >>(other: Substitution): Substitution = {
- val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) }
- new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly
- }
- override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")")
- }
-
- object EmptySubstitution extends Substitution(Nil, Nil) {
- override def apply(tree: Tree): Tree = tree
- override def >>(other: Substitution): Substitution = other
- }
- }
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// the making of the trees
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait TreeMakers extends TypedSubstitution { self: CodegenCore =>
- def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, supression: Suppression): (List[List[TreeMaker]], List[Tree]) =
- (cases, Nil)
-
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] =
- None
-
- // for catch (no need to customize match failure)
- def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
- None
-
- abstract class TreeMaker {
- def pos: Position
-
- /** captures the scope and the value of the bindings in patterns
- * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed)
- */
- def substitution: Substitution =
- if (currSub eq null) localSubstitution
- else currSub
-
- protected def localSubstitution: Substitution
-
- private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
- if (currSub ne null) {
- patmatDebug("BUG: incorporateOuterSubstitution called more than once for "+ (this, currSub, outerSubst))
- Thread.dumpStack()
- }
- else currSub = outerSubst >> substitution
- }
- private[this] var currSub: Substitution = null
-
- /** The substitution that specifies the trees that compute the values of the subpattern binders.
- *
- * Should not be used to perform actual substitution!
- * Only used to reason symbolically about the values the subpattern binders are bound to.
- * See TreeMakerToCond#updateSubstitution.
- *
- * Overridden in PreserveSubPatBinders to pretend it replaces the subpattern binders by subpattern refs
- * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
- *
- * TODO: clean this up, would be nicer to have some higher-level way to compute
- * the binders bound by this tree maker and the symbolic values that correspond to them
- */
- def subPatternsAsSubstitution: Substitution = substitution
-
- // build Tree that chains `next` after the current extractor
- def chainBefore(next: Tree)(casegen: Casegen): Tree
- }
-
- trait NoNewBinders extends TreeMaker {
- protected val localSubstitution: Substitution = EmptySubstitution
- }
-
- case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
- def pos = tree.pos
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
- }
-
- case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
- def pos = body.pos
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
- atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
- override def toString = "B"+(body, matchPt)
- }
-
- case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
- val pos = NoPosition
-
- val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
- def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
- override def toString = "S"+ localSubstitution
- }
-
- abstract class FunTreeMaker extends TreeMaker {
- val nextBinder: Symbol
- def pos = nextBinder.pos
- }
-
- abstract class CondTreeMaker extends FunTreeMaker {
- val prevBinder: Symbol
- val nextBinderTp: Type
- val cond: Tree
- val res: Tree
-
- lazy val nextBinder = freshSym(pos, nextBinderTp)
- lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree =
- atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
- }
-
- // unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
- protected val debugInfoEmitVars = !settings.optimise.value
-
- trait PreserveSubPatBinders extends TreeMaker {
- val subPatBinders: List[Symbol]
- val subPatRefs: List[Tree]
- val ignoredSubPatBinders: Set[Symbol]
-
- // unless `debugInfoEmitVars`, this set should contain the bare minimum for correctness
- // mutable case class fields need to be stored regardless (SI-5158, SI-6070) -- see override in ProductExtractorTreeMaker
- // sub patterns bound to wildcard (_) are never stored as they can't be referenced
- // dirty debuggers will have to get dirty to see the wildcards
- lazy val storedBinders: Set[Symbol] =
- (if (debugInfoEmitVars) subPatBinders.toSet else Set.empty) ++ extraStoredBinders -- ignoredSubPatBinders
-
- // e.g., mutable fields of a case class in ProductExtractorTreeMaker
- def extraStoredBinders: Set[Symbol]
-
- def emitVars = storedBinders.nonEmpty
-
- private lazy val (stored, substed) = (subPatBinders, subPatRefs).zipped.partition{ case (sym, _) => storedBinders(sym) }
-
- protected lazy val localSubstitution: Substitution = if (!emitVars) Substitution(subPatBinders, subPatRefs)
- else {
- val (subPatBindersSubstituted, subPatRefsSubstituted) = substed.unzip
- Substitution(subPatBindersSubstituted.toList, subPatRefsSubstituted.toList)
- }
-
- /** The substitution that specifies the trees that compute the values of the subpattern binders.
- *
- * We pretend to replace the subpattern binders by subpattern refs
- * (Even though we don't do so anymore -- see SI-5158, SI-5739 and SI-6070.)
- */
- override def subPatternsAsSubstitution =
- Substitution(subPatBinders, subPatRefs) >> super.subPatternsAsSubstitution
-
- import CODE._
- def bindSubPats(in: Tree): Tree =
- if (!emitVars) in
- else {
- // binders in `subPatBindersStored` that are referenced by tree `in`
- val usedBinders = new collection.mutable.HashSet[Symbol]()
- // all potentially stored subpat binders
- val potentiallyStoredBinders = stored.unzip._1.toSet
- // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders
- in.foreach(t => if (potentiallyStoredBinders(t.symbol)) usedBinders += t.symbol)
-
- if (usedBinders.isEmpty) in
- else {
- // only store binders actually used
- val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip
- Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(VAL(_) === _), in)
- }
- }
- }
-
- /**
- * Make a TreeMaker that will result in an extractor call specified by `extractor`
- * the next TreeMaker (here, we don't know which it'll be) is chained after this one by flatMap'ing
- * a function with binder `nextBinder` over our extractor's result
- * the function's body is determined by the next TreeMaker
- * (furthermore, the interpretation of `flatMap` depends on the codegen instance we're using).
- *
- * The values for the subpatterns, as computed by the extractor call in `extractor`,
- * are stored in local variables that re-use the symbols in `subPatBinders`.
- * This makes extractor patterns more debuggable (SI-5739).
- */
- case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol)(
- val subPatBinders: List[Symbol],
- val subPatRefs: List[Tree],
- extractorReturnsBoolean: Boolean,
- val checkedLength: Option[Int],
- val prevBinder: Symbol,
- val ignoredSubPatBinders: Set[Symbol]
- ) extends FunTreeMaker with PreserveSubPatBinders {
-
- def extraStoredBinders: Set[Symbol] = Set()
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- val condAndNext = extraCond match {
- case Some(cond) =>
- casegen.ifThenElseZero(substitution(cond), bindSubPats(substitution(next)))
- case _ =>
- bindSubPats(substitution(next))
- }
- atPos(extractor.pos)(
- if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, condAndNext)
- else casegen.flatMap(extractor, nextBinder, condAndNext)
- )
- }
-
- override def toString = "X"+(extractor, nextBinder.name)
- }
-
- /**
- * An optimized version of ExtractorTreeMaker for Products.
- * For now, this is hard-coded to case classes, and we simply extract the case class fields.
- *
- * The values for the subpatterns, as specified by the case class fields at the time of extraction,
- * are stored in local variables that re-use the symbols in `subPatBinders`.
- * This makes extractor patterns more debuggable (SI-5739) as well as
- * avoiding mutation after the pattern has been matched (SI-5158, SI-6070)
- *
- * TODO: make this user-definable as follows
- * When a companion object defines a method `def unapply_1(x: T): U_1`, but no `def unapply` or `def unapplySeq`,
- * the extractor is considered to match any non-null value of type T
- * the pattern is expected to have as many sub-patterns as there are `def unapply_I(x: T): U_I` methods,
- * and the type of the I'th sub-pattern is `U_I`.
- * The same exception for Seq patterns applies: if the last extractor is of type `Seq[U_N]`,
- * the pattern must have at least N arguments (exactly N if the last argument is annotated with `: _*`).
- * The arguments starting at N (and beyond) are taken from the sequence returned by apply_N,
- * and it is checked that that sequence has enough elements to provide values for all expected sub-patterns.
- *
- * For a case class C, the implementation is assumed to be `def unapply_I(x: C) = x._I`,
- * and the extractor call is inlined under that assumption.
- */
- case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree])(
- val subPatBinders: List[Symbol],
- val subPatRefs: List[Tree],
- val mutableBinders: List[Symbol],
- binderKnownNonNull: Boolean,
- val ignoredSubPatBinders: Set[Symbol]
- ) extends FunTreeMaker with PreserveSubPatBinders {
-
- import CODE._
- val nextBinder = prevBinder // just passing through
-
- // mutable binders must be stored to avoid unsoundness or seeing mutation of fields after matching (SI-5158, SI-6070)
- def extraStoredBinders: Set[Symbol] = mutableBinders.toSet
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- val nullCheck = REF(prevBinder) OBJ_NE NULL
- val cond =
- if (binderKnownNonNull) extraCond
- else (extraCond map (nullCheck AND _)
- orElse Some(nullCheck))
-
- cond match {
- case Some(cond) =>
- casegen.ifThenElseZero(cond, bindSubPats(substitution(next)))
- case _ =>
- bindSubPats(substitution(next))
- }
- }
-
- override def toString = "P"+(prevBinder.name, extraCond getOrElse "", localSubstitution)
- }
-
- object TypeTestTreeMaker {
- // factored out so that we can consistently generate other representations of the tree that implements the test
- // (e.g. propositions for exhaustivity and friends, boolean for isPureTypeTest)
- trait TypeTestCondStrategy {
- type Result
-
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result
- // TODO: can probably always widen
- def typeTest(testedBinder: Symbol, expectedTp: Type): Result
- def nonNullTest(testedBinder: Symbol): Result
- def equalsTest(pat: Tree, testedBinder: Symbol): Result
- def eqTest(pat: Tree, testedBinder: Symbol): Result
- def and(a: Result, b: Result): Result
- def tru: Result
- }
-
- object treeCondStrategy extends TypeTestCondStrategy { import CODE._
- type Result = Tree
-
- def and(a: Result, b: Result): Result = a AND b
- def tru = TRUE_typed
- def typeTest(testedBinder: Symbol, expectedTp: Type) = codegen._isInstanceOf(testedBinder, expectedTp)
- def nonNullTest(testedBinder: Symbol) = REF(testedBinder) OBJ_NE NULL
- def equalsTest(pat: Tree, testedBinder: Symbol) = codegen._equals(pat, testedBinder)
- def eqTest(pat: Tree, testedBinder: Symbol) = REF(testedBinder) OBJ_EQ pat
-
- def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
- val expectedOuter = expectedTp.prefix match {
- case ThisType(clazz) => THIS(clazz)
- case pre if pre != NoType => REF(pre.prefix, pre.termSymbol)
- case _ => TRUE_typed // fallback for SI-6183
- }
-
- // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
- // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
- val outer = expectedTp.typeSymbol.newMethod(vpmName.outer, newFlags = SYNTHETIC | ARTIFACT) setInfo expectedTp.prefix
-
- (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
- }
- }
-
- object pureTypeTestChecker extends TypeTestCondStrategy {
- type Result = Boolean
-
- def typeTest(testedBinder: Symbol, expectedTp: Type): Result = true
-
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
- def nonNullTest(testedBinder: Symbol): Result = false
- def equalsTest(pat: Tree, testedBinder: Symbol): Result = false
- def eqTest(pat: Tree, testedBinder: Symbol): Result = false
- def and(a: Result, b: Result): Result = false // we don't and type tests, so the conjunction must include at least one false
- def tru = true
- }
-
- def nonNullImpliedByTestChecker(binder: Symbol) = new TypeTestCondStrategy {
- type Result = Boolean
-
- def typeTest(testedBinder: Symbol, expectedTp: Type): Result = testedBinder eq binder
- def outerTest(testedBinder: Symbol, expectedTp: Type): Result = false
- def nonNullTest(testedBinder: Symbol): Result = testedBinder eq binder
- def equalsTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
- def eqTest(pat: Tree, testedBinder: Symbol): Result = false // could in principle analyse pat and see if it's statically known to be non-null
- def and(a: Result, b: Result): Result = a || b
- def tru = false
- }
- }
-
- /** implements the run-time aspects of (§8.2) (typedPattern has already done the necessary type transformations)
- *
- * Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms:
- - A reference to a class C, p.C, or T#C.
- This type pattern matches any non-null instance of the given class.
- Note that the prefix of the class, if it is given, is relevant for determining class instances.
- For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix.
- The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case.
-
- - A singleton type p.type.
- This type pattern matches only the value denoted by the path p
- (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now
- // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time"
-
- - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern.
- This type pattern matches all values that are matched by each of the type patterns Ti.
-
- - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _.
- This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards.
- The bounds or alias type of these type variable are determined as described in (§8.3).
-
- - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO
- This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1.
- **/
- case class TypeTestTreeMaker(prevBinder: Symbol, testedBinder: Symbol, expectedTp: Type, nextBinderTp: Type)(override val pos: Position, extractorArgTypeTest: Boolean = false) extends CondTreeMaker {
- import TypeTestTreeMaker._
- patmatDebug("TTTM"+(prevBinder, extractorArgTypeTest, testedBinder, expectedTp, nextBinderTp))
-
- lazy val outerTestNeeded = (
- !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass)
- && needsOuterTest(expectedTp, testedBinder.info, matchOwner))
-
- // the logic to generate the run-time test that follows from the fact that
- // a `prevBinder` is expected to have type `expectedTp`
- // the actual tree-generation logic is factored out, since the analyses generate Cond(ition)s rather than Trees
- // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null`
- // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false")
- def renderCondition(cs: TypeTestCondStrategy): cs.Result = {
- import cs._
-
- def default =
- // do type test first to ensure we won't select outer on null
- if (outerTestNeeded) and(typeTest(testedBinder, expectedTp), outerTest(testedBinder, expectedTp))
- else typeTest(testedBinder, expectedTp)
-
- // propagate expected type
- def expTp(t: Tree): t.type = t setType expectedTp
-
- // true when called to type-test the argument to an extractor
- // don't do any fancy equality checking, just test the type
- if (extractorArgTypeTest) default
- else expectedTp match {
- // TODO: [SPEC] the spec requires `eq` instead of `==` for singleton types
- // this implies sym.isStable
- case SingleType(_, sym) => and(equalsTest(gen.mkAttributedQualifier(expectedTp), testedBinder), typeTest(testedBinder, expectedTp.widen))
- // must use == to support e.g. List() == Nil
- case ThisType(sym) if sym.isModule => and(equalsTest(CODE.REF(sym), testedBinder), typeTest(testedBinder, expectedTp.widen))
- case ConstantType(Constant(null)) if testedBinder.info.widen <:< AnyRefClass.tpe
- => eqTest(expTp(CODE.NULL), testedBinder)
- case ConstantType(const) => equalsTest(expTp(Literal(const)), testedBinder)
- case ThisType(sym) => eqTest(expTp(This(sym)), testedBinder)
-
- // TODO: verify that we don't need to special-case Array
- // I think it's okay:
- // - the isInstanceOf test includes a test for the element type
- // - Scala's arrays are invariant (so we don't drop type tests unsoundly)
- case _ if testedBinder.info.widen <:< expectedTp =>
- // if the expected type is a primitive value type, it cannot be null and it cannot have an outer pointer
- // since the types conform, no further checking is required
- if (expectedTp.typeSymbol.isPrimitiveValueClass) tru
- // have to test outer and non-null only when it's a reference type
- else if (expectedTp <:< AnyRefClass.tpe) {
- // do non-null check first to ensure we won't select outer on null
- if (outerTestNeeded) and(nonNullTest(testedBinder), outerTest(testedBinder, expectedTp))
- else nonNullTest(testedBinder)
- } else default
-
- case _ => default
- }
- }
-
- val cond = renderCondition(treeCondStrategy)
- val res = codegen._asInstanceOf(testedBinder, nextBinderTp)
-
- // is this purely a type test, e.g. no outer check, no equality tests (used in switch emission)
- def isPureTypeTest = renderCondition(pureTypeTestChecker)
-
- def impliesBinderNonNull(binder: Symbol) = renderCondition(nonNullImpliedByTestChecker(binder))
-
- override def toString = "TT"+(expectedTp, testedBinder.name, nextBinderTp)
- }
-
- // need to substitute to deal with existential types -- TODO: deal with existentials better, don't substitute (see RichClass during quick.comp)
- case class EqualityTestTreeMaker(prevBinder: Symbol, patTree: Tree, override val pos: Position) extends CondTreeMaker {
- val nextBinderTp = prevBinder.info.widen
-
- // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null)
- // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required)
- val cond = codegen._equals(patTree, prevBinder)
- val res = CODE.REF(prevBinder)
- override def toString = "ET"+(prevBinder.name, patTree)
- }
-
- case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
- // don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
-
- override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
- super.incorporateOuterSubstitution(outerSubst)
- altss = altss map (alts => propagateSubstitution(alts, substitution))
- }
-
- def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
- atPos(pos){
- // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
- // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
- val combinedAlts = altss map (altTreeMakers =>
- ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE_typed)))(casegen))
- )
-
- val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE_typed))
- codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
- }
- }
- }
-
- case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
- val pos = guardTree.pos
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
- override def toString = "G("+ guardTree +")"
- }
-
- // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
- // requires propagateSubstitution(treeMakers) has been called
- def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
- treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
-
-
- def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
-
- // a foldLeft to accumulate the localSubstitution left-to-right
- // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution
- def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = {
- var accumSubst: Substitution = initial
- treeMakers foreach { maker =>
- maker incorporateOuterSubstitution accumSubst
- accumSubst = maker.substitution
- }
- removeSubstOnly(treeMakers)
- }
-
- // calls propagateSubstitution on the treemakers
- def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
- // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
- val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
- combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
- }
-
- // pt is the fully defined type of the cases (either pt or the lub of the types of the cases)
- def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
- fixerUpper(owner, scrut.pos){
- def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
- patmatDebug("combining cases: "+ (casesNoSubstOnly.map(_.mkString(" >> ")).mkString("{", "\n", "}")))
-
- val (suppression, requireSwitch): (Suppression, Boolean) =
- if (settings.XnoPatmatAnalysis.value) (Suppression.NoSuppresion, false)
- else scrut match {
- case Typed(tree, tpt) =>
- val suppressExhaustive = tpt.tpe hasAnnotation UncheckedClass
- val supressUnreachable = tree match {
- case Ident(name) if name startsWith nme.CHECK_IF_REFUTABLE_STRING => true // SI-7183 don't warn for withFilter's that turn out to be irrefutable.
- case _ => false
- }
- val suppression = Suppression(suppressExhaustive, supressUnreachable)
- // matches with two or fewer cases need not apply for switchiness (if-then-else will do)
- val requireSwitch = treeInfo.isSwitchAnnotation(tpt.tpe) && casesNoSubstOnly.lengthCompare(2) > 0
- (suppression, requireSwitch)
- case _ =>
- (Suppression.NoSuppresion, false)
- }
-
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride, suppression.exhaustive).getOrElse{
- if (requireSwitch) typer.context.unit.warning(scrut.pos, "could not emit switch for @switch annotated match")
-
- if (casesNoSubstOnly nonEmpty) {
- // before optimizing, check casesNoSubstOnly for presence of a default case,
- // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
- // exhaustivity and reachability must be checked before optimization as well
- // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
- // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
- // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
- val synthCatchAll =
- if (casesNoSubstOnly.nonEmpty && {
- val nonTrivLast = casesNoSubstOnly.last
- nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
- }) None
- else matchFailGen
-
- val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt, suppression)
-
- val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
-
- if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
- } else {
- codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
- }
- }
- }
-
- // TODO: do this during tree construction, but that will require tracking the current owner in treemakers
- // TODO: assign more fine-grained positions
- // fixes symbol nesting, assigns positions
- protected def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser {
- currentOwner = origOwner
-
- override def traverse(t: Tree) {
- if (t != EmptyTree && t.pos == NoPosition) {
- t.setPos(pos)
- }
- t match {
- case Function(_, _) if t.symbol == NoSymbol =>
- t.symbol = currentOwner.newAnonymousFunctionValue(t.pos)
- patmatDebug("new symbol for "+ (t, t.symbol.ownerChain))
- case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) =>
- patmatDebug("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain))
- t.symbol.owner = currentOwner
- case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2)
- patmatDebug("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain))
- if(d.symbol.moduleClass ne NoSymbol)
- d.symbol.moduleClass.owner = currentOwner
-
- d.symbol.owner = currentOwner
- // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) =>
- patmatDebug("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain))
- case _ =>
- }
- super.traverse(t)
- }
-
- // override def apply
- // patmatDebug("before fixerupper: "+ xTree)
- // currentRun.trackerFactory.snapshot()
- // patmatDebug("after fixerupper")
- // currentRun.trackerFactory.snapshot()
- }
- }
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// generate actual trees
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
- trait CodegenCore extends MatchMonadInterface {
- private var ctr = 0
- def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
-
- // assert(owner ne null); assert(owner ne NoSymbol)
- def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
- NoSymbol.newTermSymbol(freshName(prefix), pos, newFlags = SYNTHETIC) setInfo tp
-
- def newSynthCaseLabel(name: String) =
- NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS
-
- // codegen relevant to the structure of the translation (how extractors are combined)
- trait AbsCodegen {
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
-
- // local / context-free
- def _asInstanceOf(b: Symbol, tp: Type): Tree
- def _asInstanceOf(t: Tree, tp: Type): Tree
- def _equals(checker: Tree, binder: Symbol): Tree
- def _isInstanceOf(b: Symbol, tp: Type): Tree
- def and(a: Tree, b: Tree): Tree
- def drop(tgt: Tree)(n: Int): Tree
- def index(tgt: Tree)(i: Int): Tree
- def mkZero(tp: Type): Tree
- def tupleSel(binder: Symbol)(i: Int): Tree
- }
-
- // structure
- trait Casegen extends AbsCodegen { import CODE._
- def one(res: Tree): Tree
-
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
- def flatMapGuard(cond: Tree, next: Tree): Tree
- def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
- protected def zero: Tree
- }
-
- def codegen: AbsCodegen
-
- def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt))
-
- // we use subtyping as a model for implication between instanceof tests
- // i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
- // unfortunately this is not true in general:
- // SI-6022 expects instanceOfTpImplies(ProductClass.tpe, AnyRefClass.tpe)
- def instanceOfTpImplies(tp: Type, tpImplied: Type) = {
- val tpValue = tp.typeSymbol.isPrimitiveValueClass
-
- // pretend we're comparing to Any when we're actually comparing to AnyVal or AnyRef
- // (and the subtype is respectively a value type or not a value type)
- // this allows us to reuse subtyping as a model for implication between instanceOf tests
- // the latter don't see a difference between AnyRef, Object or Any when comparing non-value types -- SI-6022
- val tpImpliedNormalizedToAny =
- if (tpImplied =:= (if (tpValue) AnyValClass.tpe else AnyRefClass.tpe)) AnyClass.tpe
- else tpImplied
-
- tp <:< tpImpliedNormalizedToAny
- }
-
- abstract class CommonCodegen extends AbsCodegen { import CODE._
- def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body)
- def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree)
- def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder
- def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i))
- def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
- def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
- def and(a: Tree, b: Tree): Tree = a AND b
-
- // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
- def _asInstanceOf(t: Tree, tp: Type): Tree = if (t.tpe != NoType && t.isTyped && typesConform(t.tpe, tp)) t else gen.mkCastPreservingAnnotations(t, tp)
- def _asInstanceOf(b: Symbol, tp: Type): Tree = if (typesConform(b.info, tp)) REF(b) else gen.mkCastPreservingAnnotations(REF(b), tp)
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), tp.withoutAnnotations, true, false)
- // if (typesConform(b.info, tpX)) { patmatDebug("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
-
- // duplicated out of frustration with cast generation
- def mkZero(tp: Type): Tree = {
- tp.typeSymbol match {
- case UnitClass => Literal(Constant())
- case BooleanClass => Literal(Constant(false))
- case FloatClass => Literal(Constant(0.0f))
- case DoubleClass => Literal(Constant(0.0d))
- case ByteClass => Literal(Constant(0.toByte))
- case ShortClass => Literal(Constant(0.toShort))
- case IntClass => Literal(Constant(0))
- case LongClass => Literal(Constant(0L))
- case CharClass => Literal(Constant(0.toChar))
- case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here
- }
- }
- }
- }
-
- trait PureMatchMonadInterface extends MatchMonadInterface {
- val matchStrategy: Tree
-
- def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType
- def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.headOption getOrElse NoType // fail gracefully (otherwise we get crashes)
- protected def matchMonadSym = oneSig.finalResultType.typeSymbol
-
- import CODE._
- def _match(n: Name): SelectStart = matchStrategy DOT n
-
- private lazy val oneSig: Type =
- typer.typed(_match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message
- }
-
- trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
- def codegen: AbsCodegen = pureCodegen
-
- object pureCodegen extends CommonCodegen with Casegen { import CODE._
- //// methods in MatchingStrategy (the monad companion) -- used directly in translation
- // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
- // TODO: consider catchAll, or virtualized matching will break in exception handlers
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
- _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
-
- // __match.one(`res`)
- def one(res: Tree): Tree = (_match(vpmName.one)) (res)
- // __match.zero
- protected def zero: Tree = _match(vpmName.zero)
- // __match.guard(`c`, `then`)
- def guard(c: Tree, then: Tree): Tree = _match(vpmName.guard) APPLY (c, then)
-
- //// methods in the monad instance -- used directly in translation
- // `prev`.flatMap(`b` => `next`)
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
- // `thisCase`.orElse(`elseCase`)
- def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
- // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
- // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
- }
- }
-
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// OPTIMIZATIONS
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-// decisions, decisions
-///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
-
- trait TreeMakerApproximation extends TreeMakers with Prettification{ self: CodegenCore =>
- object Test {
- var currId = 0
- }
- case class Test(cond: Cond, treeMaker: TreeMaker) {
- // private val reusedBy = new scala.collection.mutable.HashSet[Test]
- var reuses: Option[Test] = None
- def registerReuseBy(later: Test): Unit = {
- assert(later.reuses.isEmpty, later.reuses)
- // reusedBy += later
- later.reuses = Some(this)
- }
-
- val id = { Test.currId += 1; Test.currId}
- override def toString =
- "T"+ id + "C("+ cond +")" //+ (reuses map ("== T"+_.id) getOrElse (if(reusedBy.isEmpty) treeMaker else reusedBy mkString (treeMaker+ " -->(", ", ",")")))
- }
-
- // TODO: remove Cond, replace by Prop from Logic
- object Cond {
- var currId = 0
- }
-
- abstract class Cond {
- val id = { Cond.currId += 1; Cond.currId}
- }
-
- case object TrueCond extends Cond {override def toString = "T"}
- case object FalseCond extends Cond {override def toString = "F"}
-
- case class AndCond(a: Cond, b: Cond) extends Cond {override def toString = a +"/\\"+ b}
- case class OrCond(a: Cond, b: Cond) extends Cond {override def toString = "("+a+") \\/ ("+ b +")"}
-
- object EqualityCond {
- private val uniques = new scala.collection.mutable.HashMap[(Tree, Tree), EqualityCond]
- def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs))
- def unapply(c: EqualityCond) = Some((c.testedPath, c.rhs))
- }
- class EqualityCond(val testedPath: Tree, val rhs: Tree) extends Cond {
- override def toString = testedPath +" == "+ rhs +"#"+ id
- }
-
- object NonNullCond {
- private val uniques = new scala.collection.mutable.HashMap[Tree, NonNullCond]
- def apply(testedPath: Tree): NonNullCond = uniques getOrElseUpdate(testedPath, new NonNullCond(testedPath))
- def unapply(c: NonNullCond) = Some(c.testedPath)
- }
- class NonNullCond(val testedPath: Tree) extends Cond {
- override def toString = testedPath +" ne null " +"#"+ id
- }
-
- object TypeCond {
- private val uniques = new scala.collection.mutable.HashMap[(Tree, Type), TypeCond]
- def apply(testedPath: Tree, pt: Type): TypeCond = uniques getOrElseUpdate((testedPath, pt), new TypeCond(testedPath, pt))
- def unapply(c: TypeCond) = Some((c.testedPath, c.pt))
- }
- class TypeCond(val testedPath: Tree, val pt: Type) extends Cond {
- override def toString = testedPath +" : "+ pt +"#"+ id
- }
-
-// class OuterEqCond(val testedPath: Tree, val expectedType: Type) extends Cond {
-// val expectedOuter = expectedTp.prefix match {
-// case ThisType(clazz) => THIS(clazz)
-// case pre => REF(pre.prefix, pre.termSymbol)
-// }
-//
-// // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
-// // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor?
-// val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC
-//
-// (Select(codegen._asInstanceOf(testedBinder, expectedTp), outer)) OBJ_EQ expectedOuter
-// }
-
- // TODO: improve, e.g., for constants
- def sameValue(a: Tree, b: Tree): Boolean = (a eq b) || ((a, b) match {
- case (_ : Ident, _ : Ident) => a.symbol eq b.symbol
- case _ => false
- })
-
- object IrrefutableExtractorTreeMaker {
- // will an extractor with unapply method of methodtype `tp` always succeed?
- // note: this assumes the other side-conditions implied by the extractor are met
- // (argument of the right type, length check succeeds for unapplySeq,...)
- def irrefutableExtractorType(tp: Type): Boolean = tp.resultType.dealias match {
- case TypeRef(_, SomeClass, _) => true
- // probably not useful since this type won't be inferred nor can it be written down (yet)
- case ConstantType(Constant(true)) => true
- case _ => false
- }
-
- def unapply(xtm: ExtractorTreeMaker): Option[(Tree, Symbol)] = xtm match {
- case ExtractorTreeMaker(extractor, None, nextBinder) if irrefutableExtractorType(extractor.tpe) =>
- Some((extractor, nextBinder))
- case _ =>
- None
- }
- }
-
- // returns (tree, tests), where `tree` will be used to refer to `root` in `tests`
- class TreeMakersToConds(val root: Symbol) {
- // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively)
- private val pointsToBound = scala.collection.mutable.HashSet(root)
- private val trees = scala.collection.mutable.HashSet.empty[Tree]
-
- // the substitution that renames variables to variables in pointsToBound
- private var normalize: Substitution = EmptySubstitution
- private var substitutionComputed = false
-
- // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound
- // in the end, instead of having x1, x1.hd, x2, x2.hd, ... flying around,
- // we want something like x1, x1.hd, x1.hd.tl, x1.hd.tl.hd, so that we can easily recognize when
- // we're testing the same variable
- // TODO check:
- // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty
- private var accumSubst: Substitution = EmptySubstitution
-
- // hashconsing trees (modulo value-equality)
- def unique(t: Tree, tpOverride: Type = NoType): Tree =
- trees find (a => a.correspondsStructure(t)(sameValue)) match {
- case Some(orig) =>
- // patmatDebug("unique: "+ (t eq orig, orig))
- orig
- case _ =>
- trees += t
- if (tpOverride != NoType) t setType tpOverride
- else t
- }
-
- def uniqueTp(tp: Type): Type = tp match {
- // typerefs etc are already hashconsed
- case _ : UniqueType => tp
- case tp@RefinedType(parents, EmptyScope) => tp.memo(tp: Type)(identity) // TODO: does this help?
- case _ => tp
- }
-
- // produce the unique tree used to refer to this binder
- // the type of the binder passed to the first invocation
- // determines the type of the tree that'll be returned for that binder as of then
- final def binderToUniqueTree(b: Symbol) =
- unique(accumSubst(normalize(CODE.REF(b))), b.tpe)
-
- def /\(conds: Iterable[Cond]) = if (conds.isEmpty) TrueCond else conds.reduceLeft(AndCond(_, _))
- def \/(conds: Iterable[Cond]) = if (conds.isEmpty) FalseCond else conds.reduceLeft(OrCond(_, _))
-
- // note that the sequencing of operations is important: must visit in same order as match execution
- // binderToUniqueTree uses the type of the first symbol that was encountered as the type for all future binders
- abstract class TreeMakerToCond extends (TreeMaker => Cond) {
- // requires(if (!substitutionComputed))
- def updateSubstitution(subst: Substitution): Unit = {
- // find part of substitution that replaces bound symbols by new symbols, and reverse that part
- // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal
- val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition {
- case (f, t) =>
- t.isInstanceOf[Ident] && (t.symbol ne NoSymbol) && pointsToBound(f)
- }
- val (boundFrom, boundTo) = boundSubst.unzip
- val (unboundFrom, unboundTo) = unboundSubst.unzip
-
- // reverse substitution that would otherwise replace a variable we already encountered by a new variable
- // NOTE: this forgets the more precise type we have for these later variables, but that's probably okay
- normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_)))
- // patmatDebug ("normalize subst: "+ normalize)
-
- val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway
- pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1
- // patmatDebug("pointsToBound: "+ pointsToBound)
-
- accumSubst >>= okSubst
- // patmatDebug("accumSubst: "+ accumSubst)
- }
-
- def handleUnknown(tm: TreeMaker): Cond
-
- /** apply itself must render a faithful representation of the TreeMaker
- *
- * Concretely, TrueCond must only be used to represent a TreeMaker that is sure to match and that does not do any computation at all
- * e.g., doCSE relies on apply itself being sound in this sense (since it drops TreeMakers that are approximated to TrueCond -- SI-6077)
- *
- * handleUnknown may be customized by the caller to approximate further
- *
- * TODO: don't ignore outer-checks
- */
- def apply(tm: TreeMaker): Cond = {
- if (!substitutionComputed) updateSubstitution(tm.subPatternsAsSubstitution)
-
- tm match {
- case ttm@TypeTestTreeMaker(prevBinder, testedBinder, pt, _) =>
- object condStrategy extends TypeTestTreeMaker.TypeTestCondStrategy {
- type Result = Cond
- def and(a: Result, b: Result) = AndCond(a, b)
- def outerTest(testedBinder: Symbol, expectedTp: Type) = TrueCond // TODO OuterEqCond(testedBinder, expectedType)
- def typeTest(b: Symbol, pt: Type) = { // a type test implies the tested path is non-null (null.isInstanceOf[T] is false for all T)
- val p = binderToUniqueTree(b); AndCond(NonNullCond(p), TypeCond(p, uniqueTp(pt)))
- }
- def nonNullTest(testedBinder: Symbol) = NonNullCond(binderToUniqueTree(testedBinder))
- def equalsTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat))
- def eqTest(pat: Tree, testedBinder: Symbol) = EqualityCond(binderToUniqueTree(testedBinder), unique(pat)) // TODO: eq, not ==
- def tru = TrueCond
- }
- ttm.renderCondition(condStrategy)
- case EqualityTestTreeMaker(prevBinder, patTree, _) => EqualityCond(binderToUniqueTree(prevBinder), unique(patTree))
- case AlternativesTreeMaker(_, altss, _) => \/(altss map (alts => /\(alts map this)))
- case ProductExtractorTreeMaker(testedBinder, None) => NonNullCond(binderToUniqueTree(testedBinder))
- case SubstOnlyTreeMaker(_, _) => TrueCond
- case GuardTreeMaker(guard) =>
- guard.tpe match {
- case ConstantType(Constant(true)) => TrueCond
- case ConstantType(Constant(false)) => FalseCond
- case _ => handleUnknown(tm)
- }
- case ExtractorTreeMaker(_, _, _) |
- ProductExtractorTreeMaker(_, _) |
- BodyTreeMaker(_, _) => handleUnknown(tm)
- }
- }
- }
-
-
- private val irrefutableExtractor: PartialFunction[TreeMaker, Cond] = {
- // the extra condition is None, the extractor's result indicates it always succeeds,
- // (the potential type-test for the argument is represented by a separate TypeTestTreeMaker)
- case IrrefutableExtractorTreeMaker(_, _) => TrueCond
- }
-
- // special-case: interpret pattern `List()` as `Nil`
- // TODO: make it more general List(1, 2) => 1 :: 2 :: Nil -- not sure this is a good idea...
- private val rewriteListPattern: PartialFunction[TreeMaker, Cond] = {
- case p @ ExtractorTreeMaker(_, _, testedBinder)
- if testedBinder.tpe.typeSymbol == ListClass && p.checkedLength == Some(0) =>
- EqualityCond(binderToUniqueTree(p.prevBinder), unique(Ident(NilModule) setType NilModule.tpe))
- }
- val fullRewrite = (irrefutableExtractor orElse rewriteListPattern)
- val refutableRewrite = irrefutableExtractor
-
- @inline def onUnknown(handler: TreeMaker => Cond) = new TreeMakerToCond {
- def handleUnknown(tm: TreeMaker) = handler(tm)
- }
-
- // used for CSE -- rewrite all unknowns to False (the most conserative option)
- object conservative extends TreeMakerToCond {
- def handleUnknown(tm: TreeMaker) = FalseCond
- }
-
- final def approximateMatch(cases: List[List[TreeMaker]], treeMakerToCond: TreeMakerToCond = conservative) ={
- val testss = cases.map { _ map (tm => Test(treeMakerToCond(tm), tm)) }
- substitutionComputed = true // a second call to approximateMatch should not re-compute the substitution (would be wrong)
- testss
- }
- }
-
- def approximateMatchConservative(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] =
- (new TreeMakersToConds(root)).approximateMatch(cases)
-
- def showTreeMakers(cases: List[List[TreeMaker]]) = {
- patmatDebug("treeMakers:")
- patmatDebug(alignAcrossRows(cases, ">>"))
- }
-
- def showTests(testss: List[List[Test]]) = {
- patmatDebug("tests: ")
- patmatDebug(alignAcrossRows(testss, "&"))
- }
- }
-
- trait Prettification {
- private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max
-
- def alignedColumns(cols: Seq[AnyRef]): Seq[String] = {
- def toString(x: AnyRef) = if (x eq null) "" else x.toString
- if (cols.isEmpty || cols.tails.isEmpty) cols map toString
- else {
- val (colStrs, colLens) = cols map {c => val s = toString(c); (s, s.length)} unzip
- val maxLen = max(colLens)
- val avgLen = colLens.sum/colLens.length
- val goalLen = maxLen min avgLen*2
- def pad(s: String) = {
- val toAdd = ((goalLen - s.length) max 0) + 2
- (" " * (toAdd/2)) + s + (" " * (toAdd/2 + (toAdd%2)))
- }
- cols map (x => pad(toString(x)))
- }
- }
- def alignAcrossRows(xss: List[List[AnyRef]], sep: String, lineSep: String = "\n"): String = {
- val maxLen = max(xss map (_.length))
- val padded = xss map (xs => xs ++ List.fill(maxLen - xs.length)(null))
- padded.transpose.map(alignedColumns).transpose map (_.mkString(sep)) mkString(lineSep)
- }
- }
-
- // http://www.cis.upenn.edu/~cis510/tcl/chap3.pdf
- // http://users.encs.concordia.ca/~ta_ahmed/ms_thesis.pdf
- trait Logic extends Prettification {
- class Prop
- case class Eq(p: Var, q: Const) extends Prop
-
- type Const
-
- type TypeConst <: Const
- def TypeConst: TypeConstExtractor
- trait TypeConstExtractor {
- def apply(tp: Type): Const
- }
-
- def NullConst: Const
-
- type Var <: AbsVar
-
- trait AbsVar {
- // indicate we may later require a prop for V = C
- def registerEquality(c: Const): Unit
-
- // call this to indicate null is part of the domain
- def registerNull(): Unit
-
- // can this variable be null?
- def mayBeNull: Boolean
-
- // compute the domain and return it (call registerNull first!)
- def domainSyms: Option[Set[Sym]]
-
- // the symbol for this variable being equal to its statically known type
- // (only available if registerEquality has been called for that type before)
- def symForStaticTp: Option[Sym]
-
- // for this var, call it V, turn V = C into the equivalent proposition in boolean logic
- // registerEquality(c) must have been called prior to this call
- // in fact, all equalities relevant to this variable must have been registered
- def propForEqualsTo(c: Const): Prop
-
- // populated by registerEquality
- // once implications has been called, must not call registerEquality anymore
- def implications: List[(Sym, List[Sym], List[Sym])]
- }
-
- // would be nice to statically check whether a prop is equational or pure,
- // but that requires typing relations like And(x: Tx, y: Ty) : (if(Tx == PureProp && Ty == PureProp) PureProp else Prop)
- case class And(a: Prop, b: Prop) extends Prop
- case class Or(a: Prop, b: Prop) extends Prop
- case class Not(a: Prop) extends Prop
-
- case object True extends Prop
- case object False extends Prop
-
- // symbols are propositions
- abstract case class Sym(val variable: Var, val const: Const) extends Prop {
- private[this] val id = Sym.nextSymId
-
- override def toString = variable +"="+ const +"#"+ id
- }
- class UniqueSym(variable: Var, const: Const) extends Sym(variable, const)
- object Sym {
- private val uniques: util.HashSet[Sym] = new util.HashSet("uniques", 512)
- def apply(variable: Var, const: Const): Sym = {
- val newSym = new UniqueSym(variable, const)
- (uniques findEntryOrUpdate newSym)
- }
- private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
- }
-
- def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
- def \/(props: Iterable[Prop]) = if (props.isEmpty) False else props.reduceLeft(Or(_, _))
-
- trait PropTraverser {
- def apply(x: Prop): Unit = x match {
- case And(a, b) => apply(a); apply(b)
- case Or(a, b) => apply(a); apply(b)
- case Not(a) => apply(a)
- case Eq(a, b) => applyVar(a); applyConst(b)
- case _ =>
- }
- def applyVar(x: Var): Unit = {}
- def applyConst(x: Const): Unit = {}
- }
-
- def gatherVariables(p: Prop): Set[Var] = {
- val vars = new HashSet[Var]()
- (new PropTraverser {
- override def applyVar(v: Var) = vars += v
- })(p)
- vars.toSet
- }
-
- trait PropMap {
- def apply(x: Prop): Prop = x match { // TODO: mapConserve
- case And(a, b) => And(apply(a), apply(b))
- case Or(a, b) => Or(apply(a), apply(b))
- case Not(a) => Not(apply(a))
- case p => p
- }
- }
-
- // convert finite domain propositional logic with subtyping to pure boolean propositional logic
- // a type test or a value equality test are modelled as a variable being equal to some constant
- // a variable V may be assigned multiple constants, as long as they do not contradict each other
- // according to subtyping, e.g., V = ConstantType(1) and V = Int are valid assignments
- // we rewrite V = C to a fresh boolean symbol, and model what we know about the variable's domain
- // in a prelude (the equality axioms)
- // 1. a variable with a closed domain (of a sealed type) must be assigned one of the instantiatable types in its domain
- // 2. for each variable V in props, and each constant C it is compared to,
- // compute which assignments imply each other (as in the example above: V = 1 implies V = Int)
- // and which assignments are mutually exclusive (V = String implies -(V = Int))
- //
- // note that this is a conservative approximation: V = Constant(A) and V = Constant(B)
- // are considered mutually exclusive (and thus both cases are considered reachable in {case A => case B =>}),
- // even though A may be equal to B (and thus the second case is not "dynamically reachable")
- //
- // TODO: for V1 representing x1 and V2 standing for x1.head, encode that
- // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable)
- // may throw an AnalysisBudget.Exception
- def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Formula, List[Formula]) = {
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null
-
- val vars = new scala.collection.mutable.HashSet[Var]
-
- object gatherEqualities extends PropTraverser {
- override def apply(p: Prop) = p match {
- case Eq(v, c) =>
- vars += v
- v.registerEquality(c)
- case _ => super.apply(p)
- }
- }
-
- object rewriteEqualsToProp extends PropMap {
- override def apply(p: Prop) = p match {
- case Eq(v, c) => v.propForEqualsTo(c)
- case _ => super.apply(p)
- }
- }
-
- props foreach gatherEqualities.apply
- if (modelNull) vars foreach (_.registerNull)
-
- val pure = props map (p => eqFreePropToSolvable(rewriteEqualsToProp(p)))
-
- val eqAxioms = formulaBuilder
- @inline def addAxiom(p: Prop) = addFormula(eqAxioms, eqFreePropToSolvable(p))
-
- patmatDebug("removeVarEq vars: "+ vars)
- vars.foreach { v =>
- // if v.domainSyms.isEmpty, we must consider the domain to be infinite
- // otherwise, since the domain fully partitions the type of the value,
- // exactly one of the types (and whatever it implies, imposed separately) must be chosen
- // consider X ::= A | B | C, and A => B
- // coverage is formulated as: A \/ B \/ C and the implications are
- v.domainSyms foreach { dsyms => addAxiom(\/(dsyms)) }
-
- // when this variable cannot be null the equality corresponding to the type test `(x: T)`, where T is x's static type,
- // is always true; when the variable may be null we use the implication `(x != null) => (x: T)` for the axiom
- v.symForStaticTp foreach { symForStaticTp =>
- if (v.mayBeNull) addAxiom(Or(v.propForEqualsTo(NullConst), symForStaticTp))
- else addAxiom(symForStaticTp)
- }
-
- v.implications foreach { case (sym, implied, excluded) =>
- // when sym is true, what must hold...
- implied foreach (impliedSym => addAxiom(Or(Not(sym), impliedSym)))
- // ... and what must not?
- excluded foreach (excludedSym => addAxiom(Or(Not(sym), Not(excludedSym))))
- }
- }
-
- patmatDebug("eqAxioms:\n"+ cnfString(toFormula(eqAxioms)))
- patmatDebug("pure:"+ pure.map(p => cnfString(p)).mkString("\n"))
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start)
-
- (toFormula(eqAxioms), pure)
- }
-
-
- // an interface that should be suitable for feeding a SAT solver when the time comes
- type Formula
- type FormulaBuilder
-
- // creates an empty formula builder to which more formulae can be added
- def formulaBuilder: FormulaBuilder
-
- // val f = formulaBuilder; addFormula(f, f1); ... addFormula(f, fN)
- // toFormula(f) == andFormula(f1, andFormula(..., fN))
- def addFormula(buff: FormulaBuilder, f: Formula): Unit
- def toFormula(buff: FormulaBuilder): Formula
-
- // the conjunction of formulae `a` and `b`
- def andFormula(a: Formula, b: Formula): Formula
-
- // equivalent formula to `a`, but simplified in a lightweight way (drop duplicate clauses)
- def simplifyFormula(a: Formula): Formula
-
- // may throw an AnalysisBudget.Exception
- def propToSolvable(p: Prop): Formula = {
- val (eqAxioms, pure :: Nil) = removeVarEq(List(p), modelNull = false)
- andFormula(eqAxioms, pure)
- }
-
- // may throw an AnalysisBudget.Exception
- def eqFreePropToSolvable(p: Prop): Formula
- def cnfString(f: Formula): String
-
- type Model = Map[Sym, Boolean]
- val EmptyModel: Model
- val NoModel: Model
-
- def findModelFor(f: Formula): Model
- def findAllModelsFor(f: Formula): List[Model]
- }
-
- trait CNF extends Logic {
- /** Override Array creation for efficiency (to not go through reflection). */
- private implicit val clauseTag: scala.reflect.ClassTag[Clause] = new scala.reflect.ClassTag[Clause] {
- def runtimeClass: java.lang.Class[Clause] = classOf[Clause]
- final override def newArray(len: Int): Array[Clause] = new Array[Clause](len)
- }
-
- import scala.collection.mutable.ArrayBuffer
- type FormulaBuilder = ArrayBuffer[Clause]
- def formulaBuilder = ArrayBuffer[Clause]()
- def formulaBuilderSized(init: Int) = new ArrayBuffer[Clause](init)
- def addFormula(buff: FormulaBuilder, f: Formula): Unit = buff ++= f
- def toFormula(buff: FormulaBuilder): Formula = buff
-
- // CNF: a formula is a conjunction of clauses
- type Formula = FormulaBuilder
- def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
-
- type Clause = Set[Lit]
- // a clause is a disjunction of distinct literals
- def clause(l: Lit*): Clause = l.toSet
-
- type Lit
- def Lit(sym: Sym, pos: Boolean = true): Lit
-
- def andFormula(a: Formula, b: Formula): Formula = a ++ b
- def simplifyFormula(a: Formula): Formula = a.distinct
-
- private def merge(a: Clause, b: Clause) = a ++ b
-
- // throws an AnalysisBudget.Exception when the prop results in a CNF that's too big
- // TODO: be smarter/more efficient about this (http://lara.epfl.ch/w/sav09:tseitin_s_encoding)
- def eqFreePropToSolvable(p: Prop): Formula = {
- def negationNormalFormNot(p: Prop, budget: Int = AnalysisBudget.max): Prop =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else p match {
- case And(a, b) => Or(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
- case Or(a, b) => And(negationNormalFormNot(a, budget - 1), negationNormalFormNot(b, budget - 1))
- case Not(p) => negationNormalForm(p, budget - 1)
- case True => False
- case False => True
- case s: Sym => Not(s)
- }
-
- def negationNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Prop =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else p match {
- case And(a, b) => And(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
- case Or(a, b) => Or(negationNormalForm(a, budget - 1), negationNormalForm(b, budget - 1))
- case Not(negated) => negationNormalFormNot(negated, budget - 1)
- case True
- | False
- | (_ : Sym) => p
- }
-
- val TrueF = formula()
- val FalseF = formula(clause())
- def lit(s: Sym) = formula(clause(Lit(s)))
- def negLit(s: Sym) = formula(clause(Lit(s, false)))
-
- def conjunctiveNormalForm(p: Prop, budget: Int = AnalysisBudget.max): Formula = {
- def distribute(a: Formula, b: Formula, budget: Int): Formula =
- if (budget <= 0) throw AnalysisBudget.exceeded
- else
- (a, b) match {
- // true \/ _ = true
- // _ \/ true = true
- case (trueA, trueB) if trueA.size == 0 || trueB.size == 0 => TrueF
- // lit \/ lit
- case (a, b) if a.size == 1 && b.size == 1 => formula(merge(a(0), b(0)))
- // (c1 /\ ... /\ cn) \/ d = ((c1 \/ d) /\ ... /\ (cn \/ d))
- // d \/ (c1 /\ ... /\ cn) = ((d \/ c1) /\ ... /\ (d \/ cn))
- case (cs, ds) =>
- val (big, small) = if (cs.size > ds.size) (cs, ds) else (ds, cs)
- big flatMap (c => distribute(formula(c), small, budget - (big.size*small.size)))
- }
-
- if (budget <= 0) throw AnalysisBudget.exceeded
-
- p match {
- case True => TrueF
- case False => FalseF
- case s: Sym => lit(s)
- case Not(s: Sym) => negLit(s)
- case And(a, b) =>
- val cnfA = conjunctiveNormalForm(a, budget - 1)
- val cnfB = conjunctiveNormalForm(b, budget - cnfA.size)
- cnfA ++ cnfB
- case Or(a, b) =>
- val cnfA = conjunctiveNormalForm(a)
- val cnfB = conjunctiveNormalForm(b)
- distribute(cnfA, cnfB, budget - (cnfA.size + cnfB.size))
- }
- }
-
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatCNF) else null
- val res = conjunctiveNormalForm(negationNormalForm(p))
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatCNF, start)
-
- //
- if (Statistics.canEnable) patmatCNFSizes(res.size).value += 1
-
-// patmatDebug("cnf for\n"+ p +"\nis:\n"+cnfString(res))
- res
- }
-
- }
-
- trait DPLLSolver extends CNF {
- // a literal is a (possibly negated) variable
- def Lit(sym: Sym, pos: Boolean = true) = new Lit(sym, pos)
- class Lit(val sym: Sym, val pos: Boolean) {
- override def toString = if (!pos) "-"+ sym.toString else sym.toString
- override def equals(o: Any) = o match {
- case o: Lit => (o.sym eq sym) && (o.pos == pos)
- case _ => false
- }
- override def hashCode = sym.hashCode + pos.hashCode
-
- def unary_- = Lit(sym, !pos)
- }
-
- def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
-
- // adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
- val EmptyModel = Map.empty[Sym, Boolean]
- val NoModel: Model = null
-
- // returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
- def findAllModelsFor(f: Formula): List[Model] = {
- val vars: Set[Sym] = f.flatMap(_ collect {case l: Lit => l.sym}).toSet
- // patmatDebug("vars "+ vars)
- // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True)
- def negateModel(m: Model) = clause(m.toSeq.map{ case (sym, pos) => Lit(sym, !pos) } : _*)
-
- def findAllModels(f: Formula, models: List[Model], recursionDepthAllowed: Int = 10): List[Model]=
- if (recursionDepthAllowed == 0) models
- else {
- patmatDebug("find all models for\n"+ cnfString(f))
- val model = findModelFor(f)
- // if we found a solution, conjunct the formula with the model's negation and recurse
- if (model ne NoModel) {
- val unassigned = (vars -- model.keySet).toList
- patmatDebug("unassigned "+ unassigned +" in "+ model)
- def force(lit: Lit) = {
- val model = withLit(findModelFor(dropUnit(f, lit)), lit)
- if (model ne NoModel) List(model)
- else Nil
- }
- val forced = unassigned flatMap { s =>
- force(Lit(s, true)) ++ force(Lit(s, false))
- }
- patmatDebug("forced "+ forced)
- val negated = negateModel(model)
- findAllModels(f :+ negated, model :: (forced ++ models), recursionDepthAllowed - 1)
- }
- else models
- }
-
- findAllModels(f, Nil)
- }
-
- private def withLit(res: Model, l: Lit): Model = if (res eq NoModel) NoModel else res + (l.sym -> l.pos)
- private def dropUnit(f: Formula, unitLit: Lit): Formula = {
- val negated = -unitLit
- // drop entire clauses that are trivially true
- // (i.e., disjunctions that contain the literal we're making true in the returned model),
- // and simplify clauses by dropping the negation of the literal we're making true
- // (since False \/ X == X)
- val dropped = formulaBuilderSized(f.size)
- for {
- clause <- f
- if !(clause contains unitLit)
- } dropped += (clause - negated)
- dropped
- }
-
- def findModelFor(f: Formula): Model = {
- @inline def orElse(a: Model, b: => Model) = if (a ne NoModel) a else b
-
- patmatDebug("DPLL\n"+ cnfString(f))
-
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null
-
- val satisfiableWithModel: Model =
- if (f isEmpty) EmptyModel
- else if(f exists (_.isEmpty)) NoModel
- else f.find(_.size == 1) match {
- case Some(unitClause) =>
- val unitLit = unitClause.head
- // patmatDebug("unit: "+ unitLit)
- withLit(findModelFor(dropUnit(f, unitLit)), unitLit)
- case _ =>
- // partition symbols according to whether they appear in positive and/or negative literals
- val pos = new HashSet[Sym]()
- val neg = new HashSet[Sym]()
- f.foreach{_.foreach{ lit =>
- if (lit.pos) pos += lit.sym else neg += lit.sym
- }}
- // appearing in both positive and negative
- val impures = pos intersect neg
- // appearing only in either positive/negative positions
- val pures = (pos ++ neg) -- impures
-
- if (pures nonEmpty) {
- val pureSym = pures.head
- // turn it back into a literal
- // (since equality on literals is in terms of equality
- // of the underlying symbol and its positivity, simply construct a new Lit)
- val pureLit = Lit(pureSym, pos(pureSym))
- // patmatDebug("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures)
- val simplified = f.filterNot(_.contains(pureLit))
- withLit(findModelFor(simplified), pureLit)
- } else {
- val split = f.head.head
- // patmatDebug("split: "+ split)
- orElse(findModelFor(f :+ clause(split)), findModelFor(f :+ clause(-split)))
- }
- }
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
-
- satisfiableWithModel
- }
- }
-
-
- /**
- * Represent a match as a formula in propositional logic that encodes whether the match matches (abstractly: we only consider types)
- *
- */
- trait SymbolicMatchAnalysis extends TreeMakerApproximation with Logic { self: CodegenCore =>
- def prepareNewAnalysis() = { Var.resetUniques(); Const.resetUniques() }
-
- object Var {
- private var _nextId = 0
- def nextId = {_nextId += 1; _nextId}
-
- def resetUniques() = {_nextId = 0; uniques.clear()}
- private val uniques = new scala.collection.mutable.HashMap[Tree, Var]
- def apply(x: Tree): Var = uniques getOrElseUpdate(x, new Var(x, x.tpe))
- }
- class Var(val path: Tree, staticTp: Type) extends AbsVar {
- private[this] val id: Int = Var.nextId
-
- // private[this] var canModify: Option[Array[StackTraceElement]] = None
- private[this] def ensureCanModify = {} //if (canModify.nonEmpty) patmatDebug("BUG!"+ this +" modified after having been observed: "+ canModify.get.mkString("\n"))
-
- private[this] def observed = {} //canModify = Some(Thread.currentThread.getStackTrace)
-
- // don't access until all potential equalities have been registered using registerEquality
- private[this] val symForEqualsTo = new scala.collection.mutable.HashMap[Const, Sym]
-
- // when looking at the domain, we only care about types we can check at run time
- val staticTpCheckable: Type = checkableType(staticTp)
-
- private[this] var _mayBeNull = false
- def registerNull(): Unit = { ensureCanModify; if (NullTp <:< staticTpCheckable) _mayBeNull = true }
- def mayBeNull: Boolean = _mayBeNull
-
- // case None => domain is unknown,
- // case Some(List(tps: _*)) => domain is exactly tps
- // we enumerate the subtypes of the full type, as that allows us to filter out more types statically,
- // once we go to run-time checks (on Const's), convert them to checkable types
- // TODO: there seems to be bug for singleton domains (variable does not show up in model)
- lazy val domain: Option[Set[Const]] = {
- val subConsts = enumerateSubtypes(staticTp).map{ tps =>
- tps.toSet[Type].map{ tp =>
- val domainC = TypeConst(tp)
- registerEquality(domainC)
- domainC
- }
- }
-
- val allConsts =
- if (mayBeNull) {
- registerEquality(NullConst)
- subConsts map (_ + NullConst)
- } else
- subConsts
-
- observed; allConsts
- }
-
- // populate equalitySyms
- // don't care about the result, but want only one fresh symbol per distinct constant c
- def registerEquality(c: Const): Unit = {ensureCanModify; symForEqualsTo getOrElseUpdate(c, Sym(this, c))}
-
- // return the symbol that represents this variable being equal to the constant `c`, if it exists, otherwise False (for robustness)
- // (registerEquality(c) must have been called prior, either when constructing the domain or from outside)
- def propForEqualsTo(c: Const): Prop = {observed; symForEqualsTo.getOrElse(c, False)}
-
- // [implementation NOTE: don't access until all potential equalities have been registered using registerEquality]p
- /** the information needed to construct the boolean proposition that encods the equality proposition (V = C)
- *
- * that models a type test pattern `_: C` or constant pattern `C`, where the type test gives rise to a TypeConst C,
- * and the constant pattern yields a ValueConst C
- *
- * for exhaustivity, we really only need implication (e.g., V = 1 implies that V = 1 /\ V = Int, if both tests occur in the match,
- * and thus in this variable's equality symbols), but reachability also requires us to model things like V = 1 precluding V = "1"
- */
- lazy val implications = {
- /** when we know V = C, which other equalities must hold
- *
- * in general, equality to some type implies equality to its supertypes
- * (this multi-valued kind of equality is necessary for unreachability)
- * note that we use subtyping as a model for implication between instanceof tests
- * i.e., when S <:< T we assume x.isInstanceOf[S] implies x.isInstanceOf[T]
- * unfortunately this is not true in general (see e.g. SI-6022)
- */
- def implies(lower: Const, upper: Const): Boolean =
- // values and null
- lower == upper ||
- // type implication
- (lower != NullConst && !upper.isValue &&
- instanceOfTpImplies(if (lower.isValue) lower.wideTp else lower.tp, upper.tp))
-
- // if(r) patmatDebug("implies : "+(lower, lower.tp, upper, upper.tp))
- // else patmatDebug("NOT implies: "+(lower, upper))
-
-
- /** does V = C preclude V having value `other`?
- (1) V = null is an exclusive assignment,
- (2) V = A and V = B, for A and B value constants, are mutually exclusive unless A == B
- we err on the safe side, for example:
- - assume `val X = 1; val Y = 1`, then
- (2: Int) match { case X => case Y => <falsely considered reachable> }
- - V = 1 does not preclude V = Int, or V = Any, it could be said to preclude V = String, but we don't model that
-
- (3) for types we could try to do something fancy, but be conservative and just say no
- */
- def excludes(a: Const, b: Const): Boolean =
- a != b && ((a == NullConst || b == NullConst) || (a.isValue && b.isValue))
-
- // if(r) patmatDebug("excludes : "+(a, a.tp, b, b.tp))
- // else patmatDebug("NOT excludes: "+(a, b))
-
-/*
-[ HALF BAKED FANCINESS: //!equalitySyms.exists(common => implies(common.const, a) && implies(common.const, b)))
- when type tests are involved, we reason (conservatively) under a closed world assumption,
- since we are really only trying to counter the effects of the symbols that we introduce to model type tests
- we don't aim to model the whole subtyping hierarchy, simply to encode enough about subtyping to do unreachability properly
-
- consider the following hierarchy:
-
- trait A
- trait B
- trait C
- trait AB extends B with A
-
- // two types are mutually exclusive if there is no equality symbol whose constant implies both
- object Test extends App {
- def foo(x: Any) = x match {
- case _ : C => println("C")
- case _ : AB => println("AB")
- case _ : (A with B) => println("AB'")
- case _ : B => println("B")
- case _ : A => println("A")
- }
-
- of course this kind of reasoning is not true in general,
- but we can safely pretend types are mutually exclusive as long as there are no counter-examples in the match we're analyzing}
-*/
-
- val excludedPair = new scala.collection.mutable.HashSet[ExcludedPair]
-
- case class ExcludedPair(a: Const, b: Const) {
- override def equals(o: Any) = o match {
- case ExcludedPair(aa, bb) => (a == aa && b == bb) || (a == bb && b == aa)
- case _ => false
- }
- // make ExcludedPair(a, b).hashCode == ExcludedPair(b, a).hashCode
- override def hashCode = a.hashCode ^ b.hashCode
- }
-
- equalitySyms map { sym =>
- // if we've already excluded the pair at some point (-A \/ -B), then don't exclude the symmetric one (-B \/ -A)
- // (nor the positive implications -B \/ A, or -A \/ B, which would entail the equality axioms falsifying the whole formula)
- val todo = equalitySyms filterNot (b => (b.const == sym.const) || excludedPair(ExcludedPair(b.const, sym.const)))
- val (excluded, notExcluded) = todo partition (b => excludes(sym.const, b.const))
- val implied = notExcluded filter (b => implies(sym.const, b.const))
-
- patmatDebug("eq axioms for: "+ sym.const)
- patmatDebug("excluded: "+ excluded)
- patmatDebug("implied: "+ implied)
-
- excluded foreach { excludedSym => excludedPair += ExcludedPair(sym.const, excludedSym.const)}
-
- (sym, implied, excluded)
- }
- }
-
- // accessing after calling registerNull will result in inconsistencies
- lazy val domainSyms: Option[Set[Sym]] = domain map { _ map symForEqualsTo }
-
- lazy val symForStaticTp: Option[Sym] = symForEqualsTo.get(TypeConst(staticTpCheckable))
-
- // don't access until all potential equalities have been registered using registerEquality
- private lazy val equalitySyms = {observed; symForEqualsTo.values.toList}
-
- // don't call until all equalities have been registered and registerNull has been called (if needed)
- def describe = toString + ": " + staticTp + domain.map(_.mkString(" ::= ", " | ", "// "+ symForEqualsTo.keys)).getOrElse(symForEqualsTo.keys.mkString(" ::= ", " | ", " | ...")) + " // = " + path
- override def toString = "V"+ id
- }
-
-
- // all our variables range over types
- // a literal constant becomes ConstantType(Constant(v)) when the type allows it (roughly, anyval + string + null)
- // equality between variables: SingleType(x) (note that pattern variables cannot relate to each other -- it's always patternVar == nonPatternVar)
- object Const {
- def resetUniques() = {_nextTypeId = 0; _nextValueId = 0; uniques.clear() ; trees.clear()}
-
- private var _nextTypeId = 0
- def nextTypeId = {_nextTypeId += 1; _nextTypeId}
-
- private var _nextValueId = 0
- def nextValueId = {_nextValueId += 1; _nextValueId}
-
- private val uniques = new scala.collection.mutable.HashMap[Type, Const]
- private[SymbolicMatchAnalysis] def unique(tp: Type, mkFresh: => Const): Const =
- uniques.get(tp).getOrElse(
- uniques.find {case (oldTp, oldC) => oldTp =:= tp} match {
- case Some((_, c)) =>
- patmatDebug("unique const: "+ (tp, c))
- c
- case _ =>
- val fresh = mkFresh
- patmatDebug("uniqued const: "+ (tp, fresh))
- uniques(tp) = fresh
- fresh
- })
-
- private val trees = scala.collection.mutable.HashSet.empty[Tree]
-
- // hashconsing trees (modulo value-equality)
- private[SymbolicMatchAnalysis] def uniqueTpForTree(t: Tree): Type =
- // a new type for every unstable symbol -- only stable value are uniqued
- // technically, an unreachable value may change between cases
- // thus, the failure of a case that matches on a mutable value does not exclude the next case succeeding
- // (and thuuuuus, the latter case must be considered reachable)
- if (!t.symbol.isStable) t.tpe.narrow
- else trees find (a => a.correspondsStructure(t)(sameValue)) match {
- case Some(orig) =>
- patmatDebug("unique tp for tree: "+ (orig, orig.tpe))
- orig.tpe
- case _ =>
- // duplicate, don't mutate old tree (TODO: use a map tree -> type instead?)
- val treeWithNarrowedType = t.duplicate setType t.tpe.narrow
- patmatDebug("uniqued: "+ (t, t.tpe, treeWithNarrowedType.tpe))
- trees += treeWithNarrowedType
- treeWithNarrowedType.tpe
- }
- }
-
- sealed abstract class Const {
- def tp: Type
- def wideTp: Type
-
- def isAny = wideTp.typeSymbol == AnyClass
- def isValue: Boolean //= tp.isStable
-
- // note: use reference equality on Const since they're hash-consed (doing type equality all the time is too expensive)
- // the equals inherited from AnyRef does just this
- }
-
- // find most precise super-type of tp that is a class
- // we skip non-class types (singleton types, abstract types) so that we can
- // correctly compute how types relate in terms of the values they rule out
- // e.g., when we know some value must be of type T, can it still be of type S? (this is the positive formulation of what `excludes` on Const computes)
- // since we're talking values, there must have been a class involved in creating it, so rephrase our types in terms of classes
- // (At least conceptually: `true` is an instance of class `Boolean`)
- private def widenToClass(tp: Type): Type =
- if (tp.typeSymbol.isClass) tp
- else tp.baseType(tp.baseClasses.head)
-
- object TypeConst extends TypeConstExtractor {
- def apply(tp: Type) = {
- if (tp =:= NullTp) NullConst
- else if (tp.isInstanceOf[SingletonType]) ValueConst.fromType(tp)
- else Const.unique(tp, new TypeConst(tp))
- }
- def unapply(c: TypeConst): Some[Type] = Some(c.tp)
- }
-
- // corresponds to a type test that does not imply any value-equality (well, except for outer checks, which we don't model yet)
- sealed class TypeConst(val tp: Type) extends Const {
- assert(!(tp =:= NullTp))
- private[this] val id: Int = Const.nextTypeId
-
- val wideTp = widenToClass(tp)
- def isValue = false
- override def toString = tp.toString //+"#"+ id
- }
-
- // p is a unique type or a constant value
- object ValueConst {
- def fromType(tp: Type) = {
- assert(tp.isInstanceOf[SingletonType])
- val toString = tp match {
- case ConstantType(c) => c.escapedStringValue
- case _ => tp.toString
- }
- Const.unique(tp, new ValueConst(tp, tp.widen, toString))
- }
- def apply(p: Tree) = {
- val tp = p.tpe.normalize
- if (tp =:= NullTp) NullConst
- else {
- val wideTp = widenToClass(tp)
-
- val narrowTp =
- if (tp.isInstanceOf[SingletonType]) tp
- else p match {
- case Literal(c) =>
- if (c.tpe.typeSymbol == UnitClass) c.tpe
- else ConstantType(c)
- case Ident(_) if p.symbol.isStable =>
- // for Idents, can encode uniqueness of symbol as uniqueness of the corresponding singleton type
- // for Selects, which are handled by the next case, the prefix of the select varies independently of the symbol (see pos/virtpatmat_unreach_select.scala)
- singleType(tp.prefix, p.symbol)
- case _ =>
- Const.uniqueTpForTree(p)
- }
-
- val toString =
- if (p.hasSymbol && p.symbol.isStable) p.symbol.name.toString // tp.toString
- else p.toString //+"#"+ id
-
- Const.unique(narrowTp, new ValueConst(narrowTp, checkableType(wideTp), toString)) // must make wide type checkable so that it is comparable to types from TypeConst
- }
- }
- }
- sealed class ValueConst(val tp: Type, val wideTp: Type, override val toString: String) extends Const {
- // patmatDebug("VC"+(tp, wideTp, toString))
- assert(!(tp =:= NullTp)) // TODO: assert(!tp.isStable)
- private[this] val id: Int = Const.nextValueId
- def isValue = true
- }
-
- lazy val NullTp = ConstantType(Constant(null))
- case object NullConst extends Const {
- def tp = NullTp
- def wideTp = NullTp
-
- def isValue = true
- override def toString = "null"
- }
-
-
- // turns a case (represented as a list of abstract tests)
- // into a proposition that is satisfiable if the case may match
- def symbolicCase(tests: List[Test], modelNull: Boolean = false): Prop = {
- def symbolic(t: Cond): Prop = t match {
- case AndCond(a, b) => And(symbolic(a), symbolic(b))
- case OrCond(a, b) => Or(symbolic(a), symbolic(b))
- case TrueCond => True
- case FalseCond => False
- case TypeCond(p, pt) => Eq(Var(p), TypeConst(checkableType(pt)))
- case EqualityCond(p, q) => Eq(Var(p), ValueConst(q))
- case NonNullCond(p) => if (!modelNull) True else Not(Eq(Var(p), NullConst))
- }
-
- val testsBeforeBody = tests.takeWhile(t => !t.treeMaker.isInstanceOf[BodyTreeMaker])
- /\(testsBeforeBody.map(t => symbolic(t.cond)))
- }
-
- // TODO: model dependencies between variables: if V1 corresponds to (x: List[_]) and V2 is (x.hd), V2 cannot be assigned when V1 = null or V1 = Nil
- // right now hackily implement this by pruning counter-examples
- // unreachability would also benefit from a more faithful representation
-
-
- // reachability (dead code)
-
- // computes the first 0-based case index that is unreachable (if any)
- // a case is unreachable if it implies its preceding cases
- // call C the formula that is satisfiable if the considered case matches
- // call P the formula that is satisfiable if the cases preceding it match
- // the case is reachable if there is a model for -P /\ C,
- // thus, the case is unreachable if there is no model for -(-P /\ C),
- // or, equivalently, P \/ -C, or C => P
- def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = {
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaReach) else null
-
- // use the same approximator so we share variables,
- // but need different conditions depending on whether we're conservatively looking for failure or success
- // don't rewrite List-like patterns, as List() and Nil need to distinguished for unreachability
- val approx = new TreeMakersToConds(prevBinder)
- def approximate(default: Cond) = approx.approximateMatch(cases, approx.onUnknown { tm =>
- approx.refutableRewrite.applyOrElse(tm, (_: TreeMaker) => default )
- })
-
- val testCasesOk = approximate(TrueCond)
- val testCasesFail = approximate(FalseCond)
-
- prepareNewAnalysis()
-
- val propsCasesOk = testCasesOk map (t => symbolicCase(t, modelNull = true))
- val propsCasesFail = testCasesFail map (t => Not(symbolicCase(t, modelNull = true)))
-
- try {
- val (eqAxiomsFail, symbolicCasesFail) = removeVarEq(propsCasesFail, modelNull = true)
- val (eqAxiomsOk, symbolicCasesOk) = removeVarEq(propsCasesOk, modelNull = true)
- val eqAxioms = simplifyFormula(andFormula(eqAxiomsOk, eqAxiomsFail)) // I'm pretty sure eqAxiomsOk == eqAxiomsFail, but not 100% sure.
-
- val prefix = formulaBuilder
- addFormula(prefix, eqAxioms)
-
- var prefixRest = symbolicCasesFail
- var current = symbolicCasesOk
- var reachable = true
- var caseIndex = 0
-
- patmatDebug("reachability, vars:\n"+ ((propsCasesFail flatMap gatherVariables).distinct map (_.describe) mkString ("\n")))
- patmatDebug("equality axioms:\n"+ cnfString(eqAxiomsOk))
-
- // invariant (prefixRest.length == current.length) && (prefix.reverse ++ prefixRest == symbolicCasesFail)
- // termination: prefixRest.length decreases by 1
- while (prefixRest.nonEmpty && reachable) {
- val prefHead = prefixRest.head
- caseIndex += 1
- prefixRest = prefixRest.tail
- if (prefixRest.isEmpty) reachable = true
- else {
- addFormula(prefix, prefHead)
- current = current.tail
- val model = findModelFor(andFormula(current.head, toFormula(prefix)))
-
- // patmatDebug("trying to reach:\n"+ cnfString(current.head) +"\nunder prefix:\n"+ cnfString(prefix))
- // if (NoModel ne model) patmatDebug("reached: "+ modelString(model))
-
- reachable = NoModel ne model
- }
- }
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start)
-
- if (reachable) None else Some(caseIndex)
- } catch {
- case ex: AnalysisBudget.Exception =>
- ex.warn(prevBinder.pos, "unreachability")
- None // CNF budget exceeded
- }
- }
-
- // exhaustivity
-
- // TODO: domain of other feasibly enumerable built-in types (char?)
- def enumerateSubtypes(tp: Type): Option[List[Type]] =
- tp.typeSymbol match {
- // TODO case _ if tp.isTupleType => // recurse into component types?
- case UnitClass =>
- Some(List(UnitClass.tpe))
- case BooleanClass =>
- Some((List(ConstantType(Constant(true)), ConstantType(Constant(false)))))
- // TODO case _ if tp.isTupleType => // recurse into component types
- case modSym: ModuleClassSymbol =>
- Some(List(tp))
- // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte
- case sym if !sym.isSealed || isPrimitiveValueClass(sym) =>
- patmatDebug("enum unsealed "+ (tp, sym, sym.isSealed, isPrimitiveValueClass(sym)))
- None
- case sym =>
- val subclasses = (
- sym.sealedDescendants.toList sortBy (_.sealedSortName)
- // symbols which are both sealed and abstract need not be covered themselves, because
- // all of their children must be and they cannot otherwise be created.
- filterNot (x => x.isSealed && x.isAbstractClass && !isPrimitiveValueClass(x)))
- patmatDebug("enum sealed -- subclasses: "+ (sym, subclasses))
-
- val tpApprox = typer.infer.approximateAbstracts(tp)
- val pre = tpApprox.prefix
- // valid subtypes are turned into checkable types, as we are entering the realm of the dynamic
- val validSubTypes = (subclasses flatMap {sym =>
- // have to filter out children which cannot match: see ticket #3683 for an example
- // compare to the fully known type `tp` (modulo abstract types),
- // so that we can rule out stuff like: sealed trait X[T]; class XInt extends X[Int] --> XInt not valid when enumerating X[String]
- // however, must approximate abstract types in
-
- val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner)
- val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType))
- val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed?
- // patmatDebug("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox))
- if (subTpApprox <:< tpApprox) Some(checkableType(subTp))
- else None
- })
- patmatDebug("enum sealed "+ (tp, tpApprox) + " as "+ validSubTypes)
- Some(validSubTypes)
- }
-
- // approximate a type to the static type that is fully checkable at run time,
- // hiding statically known but dynamically uncheckable information using existential quantification
- // TODO: this is subject to the availability of TypeTags (since an abstract type with a type tag is checkable at run time)
- def checkableType(tp: Type): Type = {
- // TODO: this is extremely rough...
- // replace type args by wildcards, since they can't be checked (don't use existentials: overkill)
- // TODO: when type tags are available, we will check -- when this is implemented, can we take that into account here?
- // similar to typer.infer.approximateAbstracts
- object typeArgsToWildcardsExceptArray extends TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(pre, sym, args) if args.nonEmpty && (sym ne ArrayClass) =>
- TypeRef(pre, sym, args map (_ => WildcardType))
- case _ =>
- mapOver(tp)
- }
- }
-
- val res = typeArgsToWildcardsExceptArray(tp)
- patmatDebug("checkable "+(tp, res))
- res
- }
-
- // a type is "uncheckable" (for exhaustivity) if we don't statically know its subtypes (i.e., it's unsealed)
- // we consider tuple types with at least one component of a checkable type as a checkable type
- def uncheckableType(tp: Type): Boolean = {
- def tupleComponents(tp: Type) = tp.normalize.typeArgs
- val checkable = (
- (isTupleType(tp) && tupleComponents(tp).exists(tp => !uncheckableType(tp)))
- || enumerateSubtypes(tp).nonEmpty)
- // if (!checkable) patmatDebug("deemed uncheckable: "+ tp)
- !checkable
- }
-
- def exhaustive(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[String] = if (uncheckableType(prevBinder.info)) Nil else {
- // customize TreeMakersToConds (which turns a tree of tree makers into a more abstract DAG of tests)
- // - approximate the pattern `List()` (unapplySeq on List with empty length) as `Nil`,
- // otherwise the common (xs: List[Any]) match { case List() => case x :: xs => } is deemed unexhaustive
- // - back off (to avoid crying exhaustive too often) when:
- // - there are guards -->
- // - there are extractor calls (that we can't secretly/soundly) rewrite
- val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaExhaust) else null
- var backoff = false
-
- val approx = new TreeMakersToConds(prevBinder)
- val tests = approx.approximateMatch(cases, approx.onUnknown { tm =>
- approx.fullRewrite.applyOrElse[TreeMaker, Cond](tm, {
- case BodyTreeMaker(_, _) => TrueCond // irrelevant -- will be discarded by symbolCase later
- case _ => // patmatDebug("backing off due to "+ tm)
- backoff = true
- FalseCond
- })
- })
-
- if (backoff) Nil else {
- val prevBinderTree = approx.binderToUniqueTree(prevBinder)
-
- prepareNewAnalysis()
-
- val symbolicCases = tests map (symbolicCase(_, modelNull = false))
-
-
- // TODO: null tests generate too much noise, so disabled them -- is there any way to bring them back?
- // assuming we're matching on a non-null scrutinee (prevBinder), when does the match fail?
- // val nonNullScrutineeCond =
- // assume non-null for all the components of the tuple we're matching on (if we're matching on a tuple)
- // if (isTupleType(prevBinder.tpe))
- // prevBinder.tpe.typeArgs.mapWithIndex{case (_, i) => NonNullCond(codegen.tupleSel(prevBinderTree)(i))}.reduceLeft(AndCond)
- // else
- // NonNullCond(prevBinderTree)
- // val matchFails = And(symbolic(nonNullScrutineeCond), Not(symbolicCases reduceLeft (Or(_, _))))
-
- // when does the match fail?
- val matchFails = Not(\/(symbolicCases))
- val vars = gatherVariables(matchFails)
-
- // debug output:
- patmatDebug("analysing:")
- showTreeMakers(cases)
- showTests(tests)
-
- // patmatDebug("\nvars:\n"+ (vars map (_.describe) mkString ("\n")))
- // patmatDebug("\nmatchFails as CNF:\n"+ cnfString(propToSolvable(matchFails)))
-
- try {
- // find the models (under which the match fails)
- val matchFailModels = findAllModelsFor(propToSolvable(matchFails))
-
- val scrutVar = Var(prevBinderTree)
- val counterExamples = matchFailModels.map(modelToCounterExample(scrutVar))
-
- val pruned = CounterExample.prune(counterExamples).map(_.toString).sorted
-
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start)
- pruned
- } catch {
- case ex : AnalysisBudget.Exception =>
- ex.warn(prevBinder.pos, "exhaustivity")
- Nil // CNF budget exceeded
- }
- }
- }
-
- object CounterExample {
- def prune(examples: List[CounterExample]): List[CounterExample] = {
- val distinct = examples.filterNot(_ == NoExample).toSet
- distinct.filterNot(ce => distinct.exists(other => (ce ne other) && ce.coveredBy(other))).toList
- }
- }
-
- // a way to construct a value that will make the match fail: a constructor invocation, a constant, an object of some type)
- class CounterExample {
- protected[SymbolicMatchAnalysis] def flattenConsArgs: List[CounterExample] = Nil
- def coveredBy(other: CounterExample): Boolean = this == other || other == WildcardExample
- }
- case class ValueExample(c: ValueConst) extends CounterExample { override def toString = c.toString }
- case class TypeExample(c: Const) extends CounterExample { override def toString = "(_ : "+ c +")" }
- case class NegativeExample(eqTo: Const, nonTrivialNonEqualTo: List[Const]) extends CounterExample {
- // require(nonTrivialNonEqualTo.nonEmpty, nonTrivialNonEqualTo)
- override def toString = {
- val negation =
- if (nonTrivialNonEqualTo.tail.isEmpty) nonTrivialNonEqualTo.head.toString
- else nonTrivialNonEqualTo.map(_.toString).sorted.mkString("(", ", ", ")")
- "(x: "+ eqTo +" forSome x not in "+ negation +")"
- }
- }
- case class ListExample(ctorArgs: List[CounterExample]) extends CounterExample {
- protected[SymbolicMatchAnalysis] override def flattenConsArgs: List[CounterExample] = ctorArgs match {
- case hd :: tl :: Nil => hd :: tl.flattenConsArgs
- case _ => Nil
- }
- protected[SymbolicMatchAnalysis] lazy val elems = flattenConsArgs
-
- override def coveredBy(other: CounterExample): Boolean =
- other match {
- case other@ListExample(_) =>
- this == other || ((elems.length == other.elems.length) && (elems zip other.elems).forall{case (a, b) => a coveredBy b})
- case _ => super.coveredBy(other)
- }
-
- override def toString = elems.mkString("List(", ", ", ")")
- }
- case class TupleExample(ctorArgs: List[CounterExample]) extends CounterExample {
- override def toString = ctorArgs.mkString("(", ", ", ")")
-
- override def coveredBy(other: CounterExample): Boolean =
- other match {
- case TupleExample(otherArgs) =>
- this == other || ((ctorArgs.length == otherArgs.length) && (ctorArgs zip otherArgs).forall{case (a, b) => a coveredBy b})
- case _ => super.coveredBy(other)
- }
- }
- case class ConstructorExample(cls: Symbol, ctorArgs: List[CounterExample]) extends CounterExample {
- override def toString = cls.decodedName + (if (cls.isModuleClass) "" else ctorArgs.mkString("(", ", ", ")"))
- }
-
- case object WildcardExample extends CounterExample { override def toString = "_" }
- case object NoExample extends CounterExample { override def toString = "??" }
-
- def modelToVarAssignment(model: Model): Map[Var, (Seq[Const], Seq[Const])] =
- model.toSeq.groupBy{f => f match {case (sym, value) => sym.variable} }.mapValues{ xs =>
- val (trues, falses) = xs.partition(_._2)
- (trues map (_._1.const), falses map (_._1.const))
- // should never be more than one value in trues...
- }
-
- def varAssignmentString(varAssignment: Map[Var, (Seq[Const], Seq[Const])]) =
- varAssignment.toSeq.sortBy(_._1.toString).map { case (v, (trues, falses)) =>
- val assignment = "== "+ (trues mkString("(", ", ", ")")) +" != ("+ (falses mkString(", ")) +")"
- v +"(="+ v.path +": "+ v.staticTpCheckable +") "+ assignment
- }.mkString("\n")
-
- def modelString(model: Model) = varAssignmentString(modelToVarAssignment(model))
-
- // return constructor call when the model is a true counter example
- // (the variables don't take into account type information derived from other variables,
- // so, naively, you might try to construct a counter example like _ :: Nil(_ :: _, _ :: _),
- // since we didn't realize the tail of the outer cons was a Nil)
- def modelToCounterExample(scrutVar: Var)(model: Model): CounterExample = {
- // x1 = ...
- // x1.hd = ...
- // x1.tl = ...
- // x1.hd.hd = ...
- // ...
- val varAssignment = modelToVarAssignment(model)
-
- patmatDebug("var assignment for model "+ model +":\n"+ varAssignmentString(varAssignment))
-
- // chop a path into a list of symbols
- def chop(path: Tree): List[Symbol] = path match {
- case Ident(_) => List(path.symbol)
- case Select(pre, name) => chop(pre) :+ path.symbol
- case _ =>
- // patmatDebug("don't know how to chop "+ path)
- Nil
- }
-
- // turn the variable assignments into a tree
- // the root is the scrutinee (x1), edges are labelled by the fields that are assigned
- // a node is a variable example (which is later turned into a counter example)
- object VariableAssignment {
- private def findVar(path: List[Symbol]) = path match {
- case List(root) if root == scrutVar.path.symbol => Some(scrutVar)
- case _ => varAssignment.find{case (v, a) => chop(v.path) == path}.map(_._1)
- }
-
- private val uniques = new scala.collection.mutable.HashMap[Var, VariableAssignment]
- private def unique(variable: Var): VariableAssignment =
- uniques.getOrElseUpdate(variable, {
- val (eqTo, neqTo) = varAssignment.getOrElse(variable, (Nil, Nil)) // TODO
- VariableAssignment(variable, eqTo.toList, neqTo.toList, HashMap.empty)
- })
-
- def apply(variable: Var): VariableAssignment = {
- val path = chop(variable.path)
- val pre = path.init
- val field = path.last
-
- val newCtor = unique(variable)
-
- if (pre.isEmpty) newCtor
- else {
- findVar(pre) foreach { preVar =>
- val outerCtor = this(preVar)
- outerCtor.fields(field) = newCtor
- }
- newCtor
- }
- }
- }
-
- // node in the tree that describes how to construct a counter-example
- case class VariableAssignment(variable: Var, equalTo: List[Const], notEqualTo: List[Const], fields: scala.collection.mutable.Map[Symbol, VariableAssignment]) {
- // need to prune since the model now incorporates all super types of a constant (needed for reachability)
- private lazy val uniqueEqualTo = equalTo filterNot (subsumed => equalTo.exists(better => (better ne subsumed) && instanceOfTpImplies(better.tp, subsumed.tp)))
- private lazy val prunedEqualTo = uniqueEqualTo filterNot (subsumed => variable.staticTpCheckable <:< subsumed.tp)
- private lazy val ctor = (prunedEqualTo match { case List(TypeConst(tp)) => tp case _ => variable.staticTpCheckable }).typeSymbol.primaryConstructor
- private lazy val ctorParams = if (ctor == NoSymbol || ctor.paramss.isEmpty) Nil else ctor.paramss.head
- private lazy val cls = if (ctor == NoSymbol) NoSymbol else ctor.owner
- private lazy val caseFieldAccs = if (cls == NoSymbol) Nil else cls.caseFieldAccessors
-
-
- def allFieldAssignmentsLegal: Boolean =
- (fields.keySet subsetOf caseFieldAccs.toSet) && fields.values.forall(_.allFieldAssignmentsLegal)
-
- private lazy val nonTrivialNonEqualTo = notEqualTo.filterNot{c => c.isAny }
-
- // NoExample if the constructor call is ill-typed
- // (thus statically impossible -- can we incorporate this into the formula?)
- // beBrief is used to suppress negative information nested in tuples -- it tends to get too noisy
- def toCounterExample(beBrief: Boolean = false): CounterExample =
- if (!allFieldAssignmentsLegal) NoExample
- else {
- patmatDebug("describing "+ (variable, equalTo, notEqualTo, fields, cls, allFieldAssignmentsLegal))
- val res = prunedEqualTo match {
- // a definite assignment to a value
- case List(eq: ValueConst) if fields.isEmpty => ValueExample(eq)
-
- // constructor call
- // or we did not gather any information about equality but we have information about the fields
- // --> typical example is when the scrutinee is a tuple and all the cases first unwrap that tuple and only then test something interesting
- case _ if cls != NoSymbol && !isPrimitiveValueClass(cls) &&
- ( uniqueEqualTo.nonEmpty
- || (fields.nonEmpty && prunedEqualTo.isEmpty && notEqualTo.isEmpty)) =>
-
- def args(brevity: Boolean = beBrief) = {
- // figure out the constructor arguments from the field assignment
- val argLen = (caseFieldAccs.length min ctorParams.length)
-
- (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse WildcardExample).toList
- }
-
- cls match {
- case ConsClass => ListExample(args())
- case _ if isTupleSymbol(cls) => TupleExample(args(true))
- case _ => ConstructorExample(cls, args())
- }
-
- // a definite assignment to a type
- case List(eq) if fields.isEmpty => TypeExample(eq)
-
- // negative information
- case Nil if nonTrivialNonEqualTo.nonEmpty =>
- // negation tends to get pretty verbose
- if (beBrief) WildcardExample
- else {
- val eqTo = equalTo.headOption getOrElse TypeConst(variable.staticTpCheckable)
- NegativeExample(eqTo, nonTrivialNonEqualTo)
- }
-
- // not a valid counter-example, possibly since we have a definite type but there was a field mismatch
- // TODO: improve reasoning -- in the mean time, a false negative is better than an annoying false positive
- case _ => NoExample
- }
- patmatDebug("described as: "+ res)
- res
- }
-
- override def toString = toCounterExample().toString
- }
-
- // slurp in information from other variables
- varAssignment.keys.foreach{ v => if (v != scrutVar) VariableAssignment(v) }
-
- // this is the variable we want a counter example for
- VariableAssignment(scrutVar).toCounterExample()
- }
- }
-
-////
- trait CommonSubconditionElimination extends TreeMakerApproximation { self: OptimizedCodegen =>
- /** a flow-sensitive, generalised, common sub-expression elimination
- * reuse knowledge from performed tests
- * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality)
- * when a sub-expression is shared, it is stored in a mutable variable
- * the variable is floated up so that its scope includes all of the program that shares it
- * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree)
- */
- def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
- patmatDebug("before CSE:")
- showTreeMakers(cases)
-
- val testss = approximateMatchConservative(prevBinder, cases)
-
- // interpret:
- val dependencies = new scala.collection.mutable.LinkedHashMap[Test, Set[Cond]]
- val tested = new scala.collection.mutable.HashSet[Cond]
-
- def storeDependencies(test: Test) = {
- val cond = test.cond
-
- def simplify(c: Cond): Set[Cond] = c match {
- case AndCond(a, b) => simplify(a) ++ simplify(b)
- case OrCond(_, _) => Set(FalseCond) // TODO: make more precise
- case NonNullCond(_) => Set(TrueCond) // not worth remembering
- case _ => Set(c)
- }
- val conds = simplify(cond)
-
- if (conds(FalseCond)) false // stop when we encounter a definite "no" or a "not sure"
- else {
- val nonTrivial = conds filterNot (_ == TrueCond)
- if (nonTrivial nonEmpty) {
- tested ++= nonTrivial
-
- // is there an earlier test that checks our condition and whose dependencies are implied by ours?
- dependencies find {
- case (priorTest, deps) =>
- ((simplify(priorTest.cond) == nonTrivial) || // our conditions are implied by priorTest if it checks the same thing directly
- (nonTrivial subsetOf deps) // or if it depends on a superset of our conditions
- ) && (deps subsetOf tested) // the conditions we've tested when we are here in the match satisfy the prior test, and hence what it tested
- } foreach {
- case (priorTest, _) =>
- // if so, note the dependency in both tests
- priorTest registerReuseBy test
- }
-
- dependencies(test) = tested.toSet // copies
- }
- true
- }
- }
-
-
- testss foreach { tests =>
- tested.clear()
- tests dropWhile storeDependencies
- }
- patmatDebug("dependencies: "+ dependencies)
-
- // find longest prefix of tests that reuse a prior test, and whose dependent conditions monotonically increase
- // then, collapse these contiguous sequences of reusing tests
- // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used)
- // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable
- val reused = new scala.collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker]
- var okToCall = false
- val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)}
-
- // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker
- // once this has been computed, we'll know which tree makers are reused,
- // and we'll replace those by the ReusedCondTreeMakers we've constructed (and stored in the reused map)
- val collapsed = testss map { tests =>
- // map tests to the equivalent list of treemakers, replacing shared prefixes by a reusing treemaker
- // if there's no sharing, simply map to the tree makers corresponding to the tests
- var currDeps = Set[Cond]()
- val (sharedPrefix, suffix) = tests span { test =>
- (test.cond == TrueCond) || (for(
- reusedTest <- test.reuses;
- nextDeps <- dependencies.get(reusedTest);
- diff <- (nextDeps -- currDeps).headOption;
- _ <- Some(currDeps = nextDeps))
- yield diff).nonEmpty
- }
-
- val collapsedTreeMakers =
- if (sharedPrefix.isEmpty) None
- else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%)
- for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match {
- case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM)
- case _ =>
- }
-
- patmatDebug("sharedPrefix: "+ sharedPrefix)
- patmatDebug("suffix: "+ sharedPrefix)
- // if the shared prefix contains interesting conditions (!= TrueCond)
- // and the last of such interesting shared conditions reuses another treemaker's test
- // replace the whole sharedPrefix by a ReusingCondTreeMaker
- for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond == TrueCond).headOption;
- lastReused <- lastShared.reuses)
- yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker)
- }
-
- collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains TrueCond-tests, which are dropped above)
- }
- okToCall = true // TODO: remove (debugging)
-
- // replace original treemakers that are reused (as determined when computing collapsed),
- // by ReusedCondTreeMakers
- val reusedMakers = collapsed mapConserve (_ mapConserve reusedOrOrig)
- patmatDebug("after CSE:")
- showTreeMakers(reusedMakers)
- reusedMakers
- }
-
- object ReusedCondTreeMaker {
- def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos)
- }
- class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { import CODE._
- lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE
- lazy val treesToHoist: List[Tree] = {
- nextBinder setFlag MUTABLE
- List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) }
- }
-
- // TODO: finer-grained duplication
- def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
- atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
-
- override def toString = "Memo"+(nextBinder.name, storedCond.name, cond, res, substitution)
- }
-
- case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
- val pos = sharedPrefix.last.treeMaker.pos
-
- lazy val localSubstitution = {
- // replace binder of each dropped treemaker by corresponding binder bound by the most recent reused treemaker
- var mostRecentReusedMaker: ReusedCondTreeMaker = null
- def mapToStored(droppedBinder: Symbol) = if (mostRecentReusedMaker eq null) Nil else List((droppedBinder, REF(mostRecentReusedMaker.nextBinder)))
- val (from, to) = sharedPrefix.flatMap { dropped =>
- dropped.reuses.map(test => toReused(test.treeMaker)).foreach {
- case reusedMaker: ReusedCondTreeMaker =>
- mostRecentReusedMaker = reusedMaker
- case _ =>
- }
-
- // TODO: have super-trait for retrieving the variable that's operated on by a tree maker
- // and thus assumed in scope, either because it binds it or because it refers to it
- dropped.treeMaker match {
- case dropped: FunTreeMaker =>
- mapToStored(dropped.nextBinder)
- case _ => Nil
- }
- }.unzip
- val rerouteToReusedBinders = Substitution(from, to)
-
- val collapsedDroppedSubst = sharedPrefix map (t => (toReused(t.treeMaker).substitution))
-
- collapsedDroppedSubst.foldLeft(rerouteToReusedBinders)(_ >> _)
- }
-
- lazy val lastReusedTreeMaker = sharedPrefix.reverse.flatMap(tm => tm.reuses map (test => toReused(test.treeMaker))).collectFirst{case x: ReusedCondTreeMaker => x}.head
-
- def chainBefore(next: Tree)(casegen: Casegen): Tree = {
- // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift,
- // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
- casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)
- }
- override def toString = "R"+(lastReusedTreeMaker.storedCond.name, substitution)
- }
- }
-
-
- //// DCE
- trait DeadCodeElimination extends TreeMakers { self: CodegenCore =>
- // TODO: non-trivial dead-code elimination
- // e.g., the following match should compile to a simple instanceof:
- // case class Ident(name: String)
- // for (Ident(name) <- ts) println(name)
- def doDCE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = {
- // do minimal DCE
- cases
- }
- }
-
- //// SWITCHES -- TODO: operate on Tests rather than TreeMakers
- trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface { self: CodegenCore =>
- import treeInfo.isGuardedCase
-
- abstract class SwitchMaker {
- abstract class SwitchableTreeMakerExtractor { def unapply(x: TreeMaker): Option[Tree] }
- val SwitchableTreeMaker: SwitchableTreeMakerExtractor
-
- def alternativesSupported: Boolean
-
- // when collapsing guarded switch cases we may sometimes need to jump to the default case
- // however, that's not supported in exception handlers, so when we can't jump when we need it, don't emit a switch
- // TODO: make more fine-grained, as we don't always need to jump
- def canJump: Boolean
-
- /** Should exhaustivity analysis be skipped? */
- def unchecked: Boolean
-
-
- def isDefault(x: CaseDef): Boolean
- def defaultSym: Symbol
- def defaultBody: Tree
- def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef
-
- private def sequence[T](xs: List[Option[T]]): Option[List[T]] =
- if (xs exists (_.isEmpty)) None else Some(xs.flatten)
-
- object GuardAndBodyTreeMakers {
- def unapply(tms: List[TreeMaker]): Option[(Tree, Tree)] = {
- tms match {
- case (btm@BodyTreeMaker(body, _)) :: Nil => Some((EmptyTree, btm.substitution(body)))
- case (gtm@GuardTreeMaker(guard)) :: (btm@BodyTreeMaker(body, _)) :: Nil => Some((gtm.substitution(guard), btm.substitution(body)))
- case _ => None
- }
- }
- }
-
- private val defaultLabel: Symbol = newSynthCaseLabel("default")
-
- /** Collapse guarded cases that switch on the same constant (the last case may be unguarded).
- *
- * Cases with patterns A and B switch on the same constant iff for all values x that match A also match B and vice versa.
- * (This roughly corresponds to equality on trees modulo alpha renaming and reordering of alternatives.)
- *
- * The rewrite only applies if some of the cases are guarded (this must be checked before invoking this method).
- *
- * The rewrite goes through the switch top-down and merges each case with the subsequent cases it is implied by
- * (i.e. it matches if they match, not taking guards into account)
- *
- * If there are no unreachable cases, all cases can be uniquely assigned to a partition of such 'overlapping' cases,
- * save for the default case (thus we jump to it rather than copying it several times).
- * (The cases in a partition are implied by the principal element of the partition.)
- *
- * The overlapping cases are merged into one case with their guards pushed into the body as follows
- * (with P the principal element of the overlapping patterns Pi):
- *
- * `{case Pi if(G_i) => B_i }*` is rewritten to `case P => {if(G_i) B_i}*`
- *
- * The rewrite fails (and returns Nil) when:
- * (1) there is a subsequence of overlapping cases that has an unguarded case in the middle;
- * only the last case of each subsequence of overlapping cases may be unguarded (this is implied by unreachability)
- *
- * (2) there are overlapping cases that differ (tested by `caseImpliedBy`)
- * cases with patterns A and B are overlapping if for SOME value x, A matches x implies B matches y OR vice versa <-- note the difference with case equality defined above
- * for example `case 'a' | 'b' =>` and `case 'b' =>` are different and overlapping (overlapping and equality disregard guards)
- *
- * The second component of the returned tuple indicates whether we'll need to emit a labeldef to jump to the default case.
- */
- private def collapseGuardedCases(cases: List[CaseDef]): (List[CaseDef], Boolean) = {
- // requires(same.forall(caseEquals(same.head)))
- // requires(same.nonEmpty, same)
- def collapse(same: List[CaseDef], isDefault: Boolean): CaseDef = {
- val commonPattern = same.head.pat
- // jump to default case (either the user-supplied one or the synthetic one)
- // unless we're collapsing the default case: then we re-use the same body as the synthetic catchall (throwing a matcherror, rethrowing the exception)
- val jumpToDefault: Tree =
- if (isDefault || !canJump) defaultBody
- else Apply(Ident(defaultLabel), Nil)
-
- val guardedBody = same.foldRight(jumpToDefault){
- // the last case may be un-guarded (we know it's the last one since fold's accum == jumpToDefault)
- // --> replace jumpToDefault by the un-guarded case's body
- case (CaseDef(_, EmptyTree, b), `jumpToDefault`) => b
- case (cd@CaseDef(_, g, b), els) if isGuardedCase(cd) => If(g, b, els)
- }
-
- // if the cases that we're going to collapse bind variables,
- // must replace them by the single binder introduced by the collapsed case
- val binders = same.collect{case CaseDef(x@Bind(_, _), _, _) if x.symbol != NoSymbol => x.symbol}
- val (pat, guardedBodySubst) =
- if (binders.isEmpty) (commonPattern, guardedBody)
- else {
- // create a single fresh binder to subsume the old binders (and their types)
- // TODO: I don't think the binder's types can actually be different (due to checks in caseEquals)
- // if they do somehow manage to diverge, the lub might not be precise enough and we could get a type error
- // TODO: reuse name exactly if there's only one binder in binders
- val binder = freshSym(binders.head.pos, lub(binders.map(_.tpe)), binders.head.name.toString)
-
- // the patterns in same are equal (according to caseEquals)
- // we can thus safely pick the first one arbitrarily, provided we correct binding
- val origPatWithoutBind = commonPattern match {
- case Bind(b, orig) => orig
- case o => o
- }
- // need to replace `defaultSym` as well -- it's used in `defaultBody` (see `jumpToDefault` above)
- val unifiedBody = guardedBody.substituteSymbols(defaultSym :: binders, binder :: binders.map(_ => binder))
- (Bind(binder, origPatWithoutBind), unifiedBody)
- }
-
- atPos(commonPattern.pos)(CaseDef(pat, EmptyTree, guardedBodySubst))
- }
-
- // requires cases.exists(isGuardedCase) (otherwise the rewrite is pointless)
- var remainingCases = cases
- val collapsed = scala.collection.mutable.ListBuffer.empty[CaseDef]
-
- // when some of collapsed cases (except for the default case itself) did not include an un-guarded case
- // we'll need to emit a labeldef for the default case
- var needDefault = false
-
- while (remainingCases.nonEmpty) {
- val currCase = remainingCases.head
- val currIsDefault = isDefault(CaseDef(currCase.pat, EmptyTree, EmptyTree))
- val (impliesCurr, others) =
- // the default case is implied by all cases, no need to partition (and remainingCases better all be default cases as well)
- if (currIsDefault) (remainingCases.tail, Nil)
- else remainingCases.tail partition (caseImplies(currCase))
-
- val unguardedComesLastOrAbsent =
- (!isGuardedCase(currCase) && impliesCurr.isEmpty) || { val LastImpliesCurr = impliesCurr.length - 1
- impliesCurr.indexWhere(oc => !isGuardedCase(oc)) match {
- // if all cases are guarded we will have to jump to the default case in the final else
- // (except if we're collapsing the default case itself)
- case -1 =>
- if (!currIsDefault) needDefault = true
- true
-
- // last case is not guarded, no need to jump to the default here
- // note: must come after case -1 => (since LastImpliesCurr may be -1)
- case LastImpliesCurr => true
-
- case _ => false
- }}
-
- if (unguardedComesLastOrAbsent /*(1)*/ && impliesCurr.forall(caseEquals(currCase)) /*(2)*/) {
- collapsed += (
- if (impliesCurr.isEmpty && !isGuardedCase(currCase)) currCase
- else collapse(currCase :: impliesCurr, currIsDefault)
- )
-
- remainingCases = others
- } else { // fail
- collapsed.clear()
- remainingCases = Nil
- }
- }
-
- (collapsed.toList, needDefault)
- }
-
- private def caseEquals(x: CaseDef)(y: CaseDef) = patternEquals(x.pat)(y.pat)
- private def patternEquals(x: Tree)(y: Tree): Boolean = (x, y) match {
- case (Alternative(xs), Alternative(ys)) =>
- xs.forall(x => ys.exists(patternEquals(x))) &&
- ys.forall(y => xs.exists(patternEquals(y)))
- case (Alternative(pats), _) => pats.forall(p => patternEquals(p)(y))
- case (_, Alternative(pats)) => pats.forall(q => patternEquals(x)(q))
- // regular switch
- case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
- case (Ident(nme.WILDCARD), Ident(nme.WILDCARD)) => true
- // type-switch for catch
- case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)), Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => tpX.tpe =:= tpY.tpe
- case _ => false
- }
-
- // if y matches then x matches for sure (thus, if x comes before y, y is unreachable)
- private def caseImplies(x: CaseDef)(y: CaseDef) = patternImplies(x.pat)(y.pat)
- private def patternImplies(x: Tree)(y: Tree): Boolean = (x, y) match {
- // since alternatives are flattened, must treat them as separate cases
- case (Alternative(pats), _) => pats.exists(p => patternImplies(p)(y))
- case (_, Alternative(pats)) => pats.exists(q => patternImplies(x)(q))
- // regular switch
- case (Literal(Constant(cx)), Literal(Constant(cy))) => cx == cy
- case (Ident(nme.WILDCARD), _) => true
- // type-switch for catch
- case (Bind(_, Typed(Ident(nme.WILDCARD), tpX)),
- Bind(_, Typed(Ident(nme.WILDCARD), tpY))) => instanceOfTpImplies(tpY.tpe, tpX.tpe)
- case _ => false
- }
-
- private def noGuards(cs: List[CaseDef]): Boolean = !cs.exists(isGuardedCase)
-
- // must do this before removing guards from cases and collapsing (SI-6011, SI-6048)
- private def unreachableCase(cs: List[CaseDef]): Option[CaseDef] = {
- var cases = cs
- var unreachable: Option[CaseDef] = None
-
- while (cases.nonEmpty && unreachable.isEmpty) {
- val currCase = cases.head
- if (isDefault(currCase) && cases.tail.nonEmpty) // subsumed by the `else if` that follows, but faster
- unreachable = Some(cases.tail.head)
- else if (!isGuardedCase(currCase) || currCase.guard.tpe =:= ConstantType(Constant(true)))
- unreachable = cases.tail.find(caseImplies(currCase))
- else if (currCase.guard.tpe =:= ConstantType(Constant(false)))
- unreachable = Some(currCase)
-
- cases = cases.tail
- }
-
- unreachable
- }
-
- // empty list ==> failure
- def apply(cases: List[(Symbol, List[TreeMaker])], pt: Type): List[CaseDef] =
- // generate if-then-else for 1 case switch (avoids verify error... can't imagine a one-case switch being faster than if-then-else anyway)
- if (cases.isEmpty || cases.tail.isEmpty) Nil
- else {
- val caseDefs = cases map { case (scrutSym, makers) =>
- makers match {
- // default case
- case GuardAndBodyTreeMakers(guard, body) =>
- Some(defaultCase(scrutSym, guard, body))
- // constant (or typetest for typeSwitch)
- case SwitchableTreeMaker(pattern) :: GuardAndBodyTreeMakers(guard, body) =>
- Some(CaseDef(pattern, guard, body))
- // alternatives
- case AlternativesTreeMaker(_, altss, _) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported =>
- val switchableAlts = altss map {
- case SwitchableTreeMaker(pattern) :: Nil =>
- Some(pattern)
- case _ =>
- None
- }
-
- // succeed if they were all switchable
- sequence(switchableAlts) map { switchableAlts =>
- CaseDef(Alternative(switchableAlts), guard, body)
- }
- case _ =>
- // patmatDebug("can't emit switch for "+ makers)
- None //failure (can't translate pattern to a switch)
- }
- }
-
- val caseDefsWithGuards = sequence(caseDefs) match {
- case None => return Nil
- case Some(cds) => cds
- }
-
- // a switch with duplicate cases yields a verify error,
- // and a switch with duplicate cases and guards cannot soundly be rewritten to an unguarded switch
- // (even though the verify error would disappear, the behaviour would change)
- val allReachable = unreachableCase(caseDefsWithGuards) map (cd => reportUnreachable(cd.body.pos)) isEmpty
-
- if (!allReachable) Nil
- else if (noGuards(caseDefsWithGuards)) {
- if (isDefault(caseDefsWithGuards.last)) caseDefsWithGuards
- else caseDefsWithGuards :+ defaultCase()
- } else {
- // collapse identical cases with different guards, push guards into body for all guarded cases
- // this translation is only sound if there are no unreachable (duplicate) cases
- // it should only be run if there are guarded cases, and on failure it returns Nil
- val (collapsed, needDefaultLabel) = collapseGuardedCases(caseDefsWithGuards)
-
- if (collapsed.isEmpty || (needDefaultLabel && !canJump)) Nil
- else {
- def wrapInDefaultLabelDef(cd: CaseDef): CaseDef =
- if (needDefaultLabel) deriveCaseDef(cd){ b =>
- // TODO: can b.tpe ever be null? can't really use pt, see e.g. pos/t2683 or cps/match1.scala
- defaultLabel setInfo MethodType(Nil, if (b.tpe != null) b.tpe else pt)
- LabelDef(defaultLabel, Nil, b)
- } else cd
-
- val last = collapsed.last
- if (isDefault(last)) {
- if (!needDefaultLabel) collapsed
- else collapsed.init :+ wrapInDefaultLabelDef(last)
- } else collapsed :+ wrapInDefaultLabelDef(defaultCase())
- }
- }
- }
- }
-
- class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree], val unchecked: Boolean) extends SwitchMaker {
- val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
- val alternativesSupported = true
- val canJump = true
-
- // Constant folding sets the type of a constant tree to `ConstantType(Constant(folded))`
- // The tree itself can be a literal, an ident, a selection, ...
- object SwitchablePattern { def unapply(pat: Tree): Option[Tree] = pat.tpe match {
- case ConstantType(const) if const.isIntRange =>
- Some(Literal(Constant(const.intValue))) // TODO: Java 7 allows strings in switches
- case _ => None
- }}
-
- object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
- def unapply(x: TreeMaker): Option[Tree] = x match {
- case EqualityTestTreeMaker(_, SwitchablePattern(const), _) => Some(const)
- case _ => None
- }
- }
-
- def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
- case _ => false
- }
-
- def defaultSym: Symbol = scrutSym
- def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
- def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- (DEFAULT IF guard) ==> body
- }}
- }
-
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = { import CODE._
- val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride, unchecked)
- // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
- if (regularSwitchMaker.switchableTpe(scrutSym.tpe.dealias)) { // TODO: switch to dealiasWiden in 2.11
- val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
- if (caseDefsWithDefault isEmpty) None // not worth emitting a switch.
- else {
- // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
- val scrutToInt: Tree =
- if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
- else (REF(scrutSym) DOT (nme.toInt))
- Some(BLOCK(
- VAL(scrutSym) === scrut,
- Match(scrutToInt, caseDefsWithDefault) // a switch
- ))
- }
- } else None
- }
-
- // for the catch-cases in a try/catch
- private object typeSwitchMaker extends SwitchMaker {
- val unchecked = false
- def switchableTpe(tp: Type) = true
- val alternativesSupported = false // TODO: needs either back-end support of flattening of alternatives during typers
- val canJump = false
-
- // TODO: there are more treemaker-sequences that can be handled by type tests
- // analyze the result of approximateTreeMaker rather than the TreeMaker itself
- object SwitchableTreeMaker extends SwitchableTreeMakerExtractor {
- def unapply(x: TreeMaker): Option[Tree] = x match {
- case tm@TypeTestTreeMaker(_, _, pt, _) if tm.isPureTypeTest => // -- TODO: use this if binder does not occur in the body
- Some(Bind(tm.nextBinder, Typed(Ident(nme.WILDCARD), TypeTree(pt)) /* not used by back-end */))
- case _ =>
- None
- }
- }
-
- def isDefault(x: CaseDef): Boolean = x match {
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) if (tpt.tpe =:= ThrowableClass.tpe) => true
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true
- case _ => false
- }
-
- lazy val defaultSym: Symbol = freshSym(NoPosition, ThrowableClass.tpe)
- def defaultBody: Tree = Throw(CODE.REF(defaultSym))
- def defaultCase(scrutSym: Symbol = defaultSym, guard: Tree = EmptyTree, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
- (CASE (Bind(scrutSym, Typed(Ident(nme.WILDCARD), TypeTree(ThrowableClass.tpe)))) IF guard) ==> body
- }}
- }
-
- // TODO: drop null checks
- override def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = {
- val caseDefsWithDefault = typeSwitchMaker(bindersAndCases, pt)
- if (caseDefsWithDefault isEmpty) None
- else Some(caseDefsWithDefault)
- }
- }
-
- trait OptimizedMatchMonadInterface extends MatchMonadInterface {
- override def inMatchMonad(tp: Type): Type = optionType(tp)
- override def pureType(tp: Type): Type = tp
- override protected def matchMonadSym = OptionClass
- }
-
- trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface {
- override def codegen: AbsCodegen = optimizedCodegen
-
- // trait AbsOptimizedCodegen extends AbsCodegen {
- // def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree
- // }
- // def optimizedCodegen: AbsOptimizedCodegen
-
- // when we know we're targetting Option, do some inlining the optimizer won't do
- // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
- // this is a special instance of the advanced inlining optimization that takes a method call on
- // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
- object optimizedCodegen extends CommonCodegen { import CODE._
-
- /** Inline runOrElse and get rid of Option allocations
- *
- * runOrElse(scrut: scrutTp)(matcher): resTp = matcher(scrut) getOrElse ${catchAll(`scrut`)}
- * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
- * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
- */
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
- val matchEnd = newSynthCaseLabel("matchEnd")
- val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
- matchEnd setInfo MethodType(List(matchRes), restpe)
-
- def newCaseSym = newSynthCaseLabel("case") setInfo MethodType(Nil, restpe)
- var _currCase = newCaseSym
-
- val caseDefs = cases map { (mkCase: Casegen => Tree) =>
- val currCase = _currCase
- val nextCase = newCaseSym
- _currCase = nextCase
-
- LabelDef(currCase, Nil, mkCase(new OptimizedCasegen(matchEnd, nextCase)))
- }
-
- // must compute catchAll after caseLabels (side-effects nextCase)
- // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
- // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
- val catchAllDef = matchFailGen map { matchFailGen =>
- val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
-
- LabelDef(_currCase, Nil, matchEnd APPLY (matchFailGen(scrutRef)))
- } toList // at most 1 element
-
- // scrutSym == NoSymbol when generating an alternatives matcher
- val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
-
- // the generated block is taken apart in TailCalls under the following assumptions
- // the assumption is once we encounter a case, the remainder of the block will consist of cases
- // the prologue may be empty, usually it is the valdef that stores the scrut
- // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
- Block(
- scrutDef ++ caseDefs ++ catchAllDef,
- LabelDef(matchEnd, List(matchRes), REF(matchRes))
- )
- }
-
- class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol) extends CommonCodegen with Casegen {
- def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
- optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
-
- // only used to wrap the RHS of a body
- // res: T
- // returns MatchMonad[T]
- def one(res: Tree): Tree = matchEnd APPLY (res) // a jump to a case label is special-cased in typedApply
- protected def zero: Tree = nextCase APPLY ()
-
- // prev: MatchMonad[T]
- // b: T
- // next: MatchMonad[U]
- // returns MatchMonad[U]
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
-
- BLOCK(
- VAL(prevSym) === prev,
- // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
- ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
- )
- }
-
- // cond: Boolean
- // res: T
- // nextBinder: T
- // next == MatchMonad[U]
- // returns MatchMonad[U]
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = {
- val rest =
- // only emit a local val for `nextBinder` if it's actually referenced in `next`
- if (next.exists(_.symbol eq nextBinder))
- BLOCK(
- VAL(nextBinder) === res,
- next
- )
- else next
- ifThenElseZero(cond, rest)
- }
-
- // guardTree: Boolean
- // next: MatchMonad[T]
- // returns MatchMonad[T]
- def flatMapGuard(guardTree: Tree, next: Tree): Tree =
- ifThenElseZero(guardTree, next)
-
- def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- ifThenElseZero(cond, BLOCK(
- condSym === TRUE_typed,
- nextBinder === res,
- next
- ))
- }
-
- }
- }
-
-
- trait MatchOptimizations extends CommonSubconditionElimination
- with DeadCodeElimination
- with SwitchEmission
- with OptimizedCodegen
- with SymbolicMatchAnalysis
- with DPLLSolver { self: TreeMakers =>
- override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): (List[List[TreeMaker]], List[Tree]) = {
- if (!suppression.unreachable) {
- unreachableCase(prevBinder, cases, pt) foreach { caseIndex =>
- reportUnreachable(cases(caseIndex).last.pos)
- }
- }
- if (!suppression.exhaustive) {
- val counterExamples = exhaustive(prevBinder, cases, pt)
- if (counterExamples.nonEmpty)
- reportMissingCases(prevBinder.pos, counterExamples)
- }
-
- val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt)
- val toHoist = (
- for (treeMakers <- optCases)
- yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist}
- ).flatten.flatten.toList
- (optCases, toHoist)
- }
- }
-}
-
-object PatternMatchingStats {
- val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat")
- val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos)
- val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos)
- val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter(""))
- val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos)
- val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos)
- val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos)
-}
-
-final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
-
-object Suppression {
- val NoSuppresion = Suppression(false, false)
-} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index b9fdd7280e..03ce710700 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -135,7 +135,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
if (settings.lint.value) {
clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym =>
- val alts = clazz.info.decl(sym.name).alternatives
+ // implicit classes leave both a module symbol and a method symbol as residue
+ val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule)
if (alts.size > 1)
alts foreach (x => unit.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds"))
}
@@ -382,7 +383,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
overrideError("cannot be used here - classes can only override abstract types");
} else if (other.isEffectivelyFinal) { // (1.2)
overrideError("cannot override final member");
- } else if (!other.isDeferred && !member.isAnyOverride && !member.isSynthetic) { // (*)
+ } else if (!other.isDeferred && !other.hasFlag(DEFAULTMETHOD) && !member.isAnyOverride && !member.isSynthetic) { // (*)
// (*) Synthetic exclusion for (at least) default getters, fixes SI-5178. We cannot assign the OVERRIDE flag to
// the default getter: one default getter might sometimes override, sometimes not. Example in comment on ticket.
if (isNeitherInClass && !(other.owner isSubClass member.owner))
@@ -1062,9 +1063,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
case _ =>
}
+ private def isObjectOrAnyComparisonMethod(sym: Symbol) = sym match {
+ case Object_eq | Object_ne | Object_== | Object_!= | Any_== | Any_!= => true
+ case _ => false
+ }
def checkSensible(pos: Position, fn: Tree, args: List[Tree]) = fn match {
- case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 =>
- def isReferenceOp = name == nme.eq || name == nme.ne
+ case Select(qual, name @ (nme.EQ | nme.NE | nme.eq | nme.ne)) if args.length == 1 && isObjectOrAnyComparisonMethod(fn.symbol) =>
+ def isReferenceOp = fn.symbol == Object_eq || fn.symbol == Object_ne
def isNew(tree: Tree) = tree match {
case Function(_, _)
| Apply(Select(New(_), nme.CONSTRUCTOR), _) => true
@@ -1101,12 +1106,15 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
val s = fn.symbol
(s == Object_==) || (s == Object_!=) || (s == Any_==) || (s == Any_!=)
}
+ def haveSubclassRelationship = (actual isSubClass receiver) || (receiver isSubClass actual)
+
// Whether the operands+operator represent a warnable combo (assuming anyrefs)
// Looking for comparisons performed with ==/!= in combination with either an
// equals method inherited from Object or a case class synthetic equals (for
// which we know the logic.)
def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals)
def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info)
+ def isEitherValueClass = actual.isDerivedValueClass || receiver.isDerivedValueClass
def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass
def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass
def isNumeric(s: Symbol) = isNumericValueClass(unboxedValueClass(s)) || isAnyNumber(s)
@@ -1121,6 +1129,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
// unused
def possibleNumericCount = onSyms(_ filter (x => isNumeric(x) || isMaybeValue(x)) size)
val nullCount = onSyms(_ filter (_ == NullClass) size)
+ def isNonsenseValueClassCompare = (
+ !haveSubclassRelationship
+ && isUsingDefaultScalaOp
+ && isEitherValueClass
+ && !isCaseEquals
+ )
def nonSensibleWarning(what: String, alwaysEqual: Boolean) = {
val msg = alwaysEqual == (name == nme.EQ || name == nme.eq)
@@ -1132,10 +1146,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
def nonSensiblyNeq() = nonSensible("", false)
def nonSensiblyNew() = nonSensibleWarning("a fresh object", false)
+ def unrelatedMsg = name match {
+ case nme.EQ | nme.eq => "never compare equal"
+ case _ => "always compare unequal"
+ }
def unrelatedTypes() = {
- val msg = if (name == nme.EQ || name == nme.eq)
- "never compare equal" else "always compare unequal"
- unit.warning(pos, typesString + " are unrelated: they will most likely " + msg)
+ val weaselWord = if (isEitherValueClass) "" else " most likely"
+ unit.warning(pos, s"$typesString are unrelated: they will$weaselWord $unrelatedMsg")
}
if (nullCount == 2) // null == null
@@ -1174,15 +1191,19 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ // warn if one but not the other is a derived value class
+ // this is especially important to enable transitioning from
+ // regular to value classes without silent failures.
+ if (isNonsenseValueClassCompare)
+ unrelatedTypes()
// possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean
- if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
+ else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) {
// better to have lubbed and lost
def warnIfLubless(): Unit = {
val common = global.lub(List(actual.tpe, receiver.tpe))
if (ObjectClass.tpe <:< common)
unrelatedTypes()
}
- def eitherSubclasses = (actual isSubClass receiver) || (receiver isSubClass actual)
// warn if actual has a case parent that is not same as receiver's;
// if actual is not a case, then warn if no common supertype, as below
if (isCaseEquals) {
@@ -1195,14 +1216,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
//else
// if a class, it must be super to thisCase (and receiver) since not <: thisCase
if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq()
- else if (!eitherSubclasses) warnIfLubless()
+ else if (!haveSubclassRelationship) warnIfLubless()
case _ =>
}
}
- else if (actual isSubClass receiver) ()
- else if (receiver isSubClass actual) ()
// warn only if they have no common supertype below Object
- else {
+ else if (!haveSubclassRelationship) {
warnIfLubless()
}
}
@@ -1373,12 +1392,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
*/
private def checkMigration(sym: Symbol, pos: Position) = {
if (sym.hasMigrationAnnotation) {
- val changed = try
+ val changed = try
settings.Xmigration.value < ScalaVersion(sym.migrationVersion.get)
catch {
- case e : NumberFormatException =>
+ case e : NumberFormatException =>
unit.warning(pos, s"${sym.fullLocationString} has an unparsable version number: ${e.getMessage()}")
- // if we can't parse the format on the migration annotation just conservatively assume it changed
+ // if we can't parse the format on the migration annotation just conservatively assume it changed
true
}
if (changed)
@@ -1396,6 +1415,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
}
}
+ private def checkDelayedInitSelect(qual: Tree, sym: Symbol, pos: Position) = {
+ def isLikelyUninitialized = (
+ (sym.owner isSubClass DelayedInitClass)
+ && !qual.tpe.isInstanceOf[ThisType]
+ && sym.accessedOrSelf.isVal
+ )
+ if (settings.lint.value && isLikelyUninitialized)
+ unit.warning(pos, s"Selecting ${sym} from ${sym.owner}, which extends scala.DelayedInit, is likely to yield an uninitialized value")
+ }
+
private def lessAccessible(otherSym: Symbol, memberSym: Symbol): Boolean = (
(otherSym != NoSymbol)
&& !otherSym.isProtected
@@ -1595,6 +1624,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
if(settings.Xmigration.value != NoScalaVersion)
checkMigration(sym, tree.pos)
checkCompileTimeOnly(sym, tree.pos)
+ checkDelayedInitSelect(qual, sym, tree.pos)
if (sym eq NoSymbol) {
unit.warning(tree.pos, "Select node has NoSymbol! " + tree + " / " + tree.tpe)
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index fb8a111da1..fa72ad64bf 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -67,7 +67,10 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
case t => t
}
acc setInfoAndEnter (tpe cloneInfo acc)
- storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
+ // Diagnostic for SI-7091
+ if (!accDefs.contains(clazz))
+ reporter.error(sel.pos, s"Internal error: unable to store accessor definition in ${clazz}. clazz.isPackage=${clazz.isPackage}. Accessor required for ${sel} (${showRaw(sel)})")
+ else storeAccessorDefinition(clazz, DefDef(acc, EmptyTree))
acc
}
@@ -264,9 +267,16 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
}
- // direct calls to aliases of param accessors to the superclass in order to avoid
+
+ def isAccessibleFromSuper(sym: Symbol) = {
+ val pre = SuperType(sym.owner.tpe, qual.tpe)
+ localTyper.context.isAccessible(sym, pre, superAccess = true)
+ }
+
+ // Direct calls to aliases of param accessors to the superclass in order to avoid
// duplicating fields.
- if (sym.isParamAccessor && sym.alias != NoSymbol) {
+ // ... but, only if accessible (SI-6793)
+ if (sym.isParamAccessor && sym.alias != NoSymbol && isAccessibleFromSuper(sym.alias)) {
val result = (localTyper.typedPos(tree.pos) {
Select(Super(qual, tpnme.EMPTY) setPos qual.pos, sym.alias)
}).asInstanceOf[Select]
@@ -288,6 +298,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
currentClass.isTrait
&& sym.isProtected
&& sym.enclClass != currentClass
+ && !sym.owner.isPackageClass // SI-7091 no accessor needed package owned (ie, top level) symbols
&& !sym.owner.isTrait
&& (sym.owner.enclosingPackageClass != currentClass.enclosingPackageClass)
&& (qual.symbol.info.member(sym.name) ne NoSymbol)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index c5c3c560ea..88d10f1d72 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -313,6 +313,9 @@ abstract class TreeCheckers extends Analyzer {
}
for {
sym <- referencedSymbols
+ // Accessors are known to steal the type of the underlying field without cloning existential symbols at the new owner.
+ // This happens in Namer#accessorTypeCompleter. We just look the other way here.
+ if !tree.symbol.isAccessor
if (sym.isTypeParameter || sym.isLocal) && !(tree.symbol hasTransOwner sym.owner)
} errorFn(s"The symbol, tpe or info of tree `(${tree}) : ${info}` refers to a out-of-scope symbol, ${sym.fullLocationString}. tree.symbol.ownerChain: ${tree.symbol.ownerChain.mkString(", ")}")
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 4233bde770..4950a7efef 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -111,14 +111,9 @@ trait TypeDiagnostics {
"\n(Note that variables need to be initialized to be defined)"
else ""
- /** Only prints the parameter names if they're not synthetic,
- * since "x$1: Int" does not offer any more information than "Int".
- */
private def methodTypeErrorString(tp: Type) = tp match {
case mt @ MethodType(params, resultType) =>
- def forString =
- if (params exists (_.isSynthetic)) params map (_.tpe)
- else params map (_.defString)
+ def forString = params map (_.defString)
forString.mkString("(", ",", ")") + resultType
case x => x.toString
@@ -291,6 +286,24 @@ trait TypeDiagnostics {
)
}
+ def typePatternAdvice(sym: Symbol, ptSym: Symbol) = {
+ val clazz = if (sym.isModuleClass) sym.companionClass else sym
+ val caseString =
+ if (clazz.isCaseClass && (clazz isSubClass ptSym))
+ ( clazz.caseFieldAccessors
+ map (_ => "_") // could use the actual param names here
+ mkString (s"`case ${clazz.name}(", ",", ")`")
+ )
+ else
+ "`case _: " + (clazz.typeParams match {
+ case Nil => "" + clazz.name
+ case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]")
+ })+ "`"
+
+ "\nNote: if you intended to match against the class, try "+ caseString
+
+ }
+
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
// save the name because it will be mutated until it has been
// distinguished from the other types in the same error message
@@ -427,17 +440,24 @@ trait TypeDiagnostics {
contextWarning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString))
object checkDead {
- private var expr: Symbol = NoSymbol
+ private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol)
+ // The method being applied to `tree` when `apply` is called.
+ private def expr = exprStack.top
private def exprOK =
(expr != Object_synchronized) &&
!(expr.isLabel && treeInfo.isSynthCaseSymbol(expr)) // it's okay to jump to matchEnd (or another case) with an argument of type nothing
- private def treeOK(tree: Tree) = tree.tpe != null && tree.tpe.typeSymbol == NothingClass
+ private def treeOK(tree: Tree) = {
+ val isLabelDef = tree match { case _: LabelDef => true; case _ => false}
+ tree.tpe != null && tree.tpe.typeSymbol == NothingClass && !isLabelDef
+ }
- def updateExpr(fn: Tree) = {
- if (fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isConstructor)
- checkDead.expr = fn.symbol
+ @inline def updateExpr[A](fn: Tree)(f: => A) = {
+ if (fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isConstructor) {
+ exprStack push fn.symbol
+ try f finally exprStack.pop()
+ } else f
}
def apply(tree: Tree): Tree = {
// Error suppression will squash some of these warnings unless we circumvent it.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 6e9b3b3fcd..bb3ebb4e0f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -507,10 +507,7 @@ trait Typers extends Modes with Adaptations with Tags {
@inline
final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = {
- val res = f(newTyper(c))
- if (c.hasErrors)
- context.issue(c.errBuffer.head)
- res
+ f(newTyper(c))
}
@inline
@@ -780,16 +777,19 @@ trait Typers extends Modes with Adaptations with Tags {
if (!OK) {
val Some(AnnotationInfo(_, List(Literal(Constant(featureDesc: String)), Literal(Constant(required: Boolean))), _)) =
featureTrait getAnnotation LanguageFeatureAnnot
- val req = if (required) "needs to" else "should"
- var raw = featureDesc + " " + req + " be enabled\n" +
- "by making the implicit value language." + featureName + " visible."
- if (!(currentRun.reportedFeature contains featureTrait))
- raw += "\nThis can be achieved by adding the import clause 'import scala.language." + featureName + "'\n" +
- "or by setting the compiler option -language:" + featureName + ".\n" +
- "See the Scala docs for value scala.language." + featureName + " for a discussion\n" +
- "why the feature " + req + " be explicitly enabled."
+ val req = if (required) "needs to" else "should"
+ val fqname = "scala.language." + featureName
+ val explain = (
+ if (currentRun.reportedFeature contains featureTrait) "" else
+ s"""|
+ |This can be achieved by adding the import clause 'import $fqname'
+ |or by setting the compiler option -language:$featureName.
+ |See the Scala docs for value $fqname for a discussion
+ |why the feature $req be explicitly enabled.""".stripMargin
+ )
currentRun.reportedFeature += featureTrait
- val msg = raw replace ("#", construct)
+
+ val msg = s"$featureDesc $req be enabled\nby making the implicit value $fqname visible.$explain" replace ("#", construct)
if (required) unit.error(pos, msg)
else currentRun.featureWarnings.warn(pos, msg)
}
@@ -1375,9 +1375,8 @@ trait Typers extends Modes with Adaptations with Tags {
def onError(reportError: => Tree): Tree = {
context.tree match {
case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty =>
- silent(_.typedArgs(args, mode)) match {
- case SilentResultValue(xs) =>
- val args = xs.asInstanceOf[List[Tree]]
+ silent(_.typedArgs(args.map(_.duplicate), mode)) match {
+ case SilentResultValue(args) =>
if (args exists (_.isErrorTyped))
reportError
else
@@ -1576,7 +1575,7 @@ trait Typers extends Modes with Adaptations with Tags {
if (sarg != EmptyTree && supertpe.typeSymbol != firstParent)
ConstrArgsInTraitParentTpeError(sarg, firstParent)
if (!supertparams.isEmpty)
- supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus
+ supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos
case _ =>
if (!supertparams.isEmpty)
MissingTypeArgumentsParentTpeError(supertpt)
@@ -1748,9 +1747,7 @@ trait Typers extends Modes with Adaptations with Tags {
assert(clazz != NoSymbol, cdef)
reenterTypeParams(cdef.tparams)
val tparams1 = cdef.tparams mapConserve (typedTypeDef)
- val impl1 = typerReportAnyContextErrors(context.make(cdef.impl, clazz, newScope)) {
- _.typedTemplate(cdef.impl, parentTypes(cdef.impl))
- }
+ val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, parentTypes(cdef.impl))
val impl2 = finishMethodSynthesis(impl1, clazz, context)
if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass)
checkEphemeral(clazz, impl2.body)
@@ -1791,17 +1788,16 @@ trait Typers extends Modes with Adaptations with Tags {
|| !linkedClass.isSerializable
|| clazz.isSerializable
)
- val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, newScope)) {
- _.typedTemplate(mdef.impl, {
- parentTypes(mdef.impl) ++ (
- if (noSerializable) Nil
- else {
- clazz.makeSerializable()
- List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
- }
- )
- })
- }
+ val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, {
+ parentTypes(mdef.impl) ++ (
+ if (noSerializable) Nil
+ else {
+ clazz.makeSerializable()
+ List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus)
+ }
+ )
+ })
+
val impl2 = finishMethodSynthesis(impl1, clazz, context)
// SI-5954. On second compile of a companion class contained in a package object we end up
@@ -3312,8 +3308,6 @@ trait Typers extends Modes with Adaptations with Tags {
// but behaves as if it were (=> T) => T) we need to know what is the actual
// target of a call. Since this information is no longer available from
// typedArg, it is recorded here.
- checkDead.updateExpr(fun)
-
val args1 =
// no expected type when jumping to a match label -- anything goes (this is ok since we're typing the translation of well-typed code)
// ... except during erasure: we must take the expected type into account as it drives the insertion of casts!
@@ -3368,7 +3362,9 @@ trait Typers extends Modes with Adaptations with Tags {
else
constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe))
}
- handleMonomorphicCall
+ checkDead.updateExpr(fun) {
+ handleMonomorphicCall
+ }
} else if (needsInstantiation(tparams, formals, args)) {
//println("needs inst "+fun+" "+tparams+"/"+(tparams map (_.info)))
inferExprInstance(fun, tparams)
@@ -3481,8 +3477,8 @@ trait Typers extends Modes with Adaptations with Tags {
else {
val resTp = fun1.tpe.finalResultType.normalize
val nbSubPats = args.length
-
- val (formals, formalsExpanded) = extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol)
+ val (formals, formalsExpanded) =
+ extractorFormalTypes(fun0.pos, resTp, nbSubPats, fun1.symbol, treeInfo.effectivePatternArity(args))
if (formals == null) duplErrorTree(WrongNumberOfArgsError(tree, fun))
else {
val args1 = typedArgs(args, mode, formals, formalsExpanded)
@@ -3768,77 +3764,9 @@ trait Typers extends Modes with Adaptations with Tags {
} else res
}
- def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
- sym.isTypeParameter && sym.owner.isJavaDefined
-
- /** If we map a set of hidden symbols to their existential bounds, we
- * have a problem: the bounds may themselves contain references to the
- * hidden symbols. So this recursively calls existentialBound until
- * the typeSymbol is not amongst the symbols being hidden.
- */
- def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
- def safeBound(t: Type): Type =
- if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
-
- def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
- case tp @ RefinedType(parents, decls) =>
- val parents1 = parents mapConserve safeBound
- if (parents eq parents1) tp
- else copyRefinedType(tp, parents1, decls)
- case tp => tp
- }
-
- // Hanging onto lower bound in case anything interesting
- // happens with it.
- mapFrom(hidden)(s => s.existentialBound match {
- case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
- case _ => hiBound(s)
- })
- }
-
- /** Given a set `rawSyms` of term- and type-symbols, and a type
- * `tp`, produce a set of fresh type parameters and a type so that
- * it can be abstracted to an existential type. Every type symbol
- * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
- * type `T` in `rawSyms` is given an associated type symbol of the
- * following form:
- *
- * type x.type <: T with Singleton
- *
- * The name of the type parameter is `x.type`, to produce nice
- * diagnostics. The Singleton parent ensures that the type
- * parameter is still seen as a stable type. Type symbols in
- * rawSyms are fully replaced by the new symbols. Term symbols are
- * also replaced, except for term symbols of an Ident tree, where
- * only the type of the Ident is changed.
- */
- protected def existentialTransform[T](rawSyms: List[Symbol], tp: Type)(creator: (List[Symbol], Type) => T): T = {
- val allBounds = existentialBoundsExcludingHidden(rawSyms)
- val typeParams: List[Symbol] = rawSyms map { sym =>
- val name = sym.name match {
- case x: TypeName => x
- case x => tpnme.singletonName(x)
- }
- val bound = allBounds(sym)
- val sowner = if (isRawParameter(sym)) context.owner else sym.owner
- val quantified = sowner.newExistential(name, sym.pos)
-
- quantified setInfo bound.cloneInfo(quantified)
- }
- // Higher-kinded existentials are not yet supported, but this is
- // tpeHK for when they are: "if a type constructor is expected/allowed,
- // tpeHK must be called instead of tpe."
- val typeParamTypes = typeParams map (_.tpeHK)
- def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
-
- creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
- }
-
/** Compute an existential type from raw hidden symbols `syms` and type `tp`
*/
- def packSymbols(hidden: List[Symbol], tp: Type): Type =
- if (hidden.isEmpty) tp
- else existentialTransform(hidden, tp)(existentialAbstraction)
+ def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, Some(context0.owner))
def isReferencedFrom(ctx: Context, sym: Symbol): Boolean =
ctx.owner.isTerm &&
@@ -3956,8 +3884,16 @@ trait Typers extends Modes with Adaptations with Tags {
if (vd.symbol.tpe.isVolatile)
AbstractionFromVolatileTypeError(vd)
val tpt1 = typedType(tree.tpt, mode)
- existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) =>
- TypeTree(newExistentialType(tparams, tp)) setOriginal tree
+ existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) => {
+ val original = tpt1 match {
+ case tpt : TypeTree => atPos(tree.pos)(ExistentialTypeTree(tpt.original, tree.whereClauses))
+ case _ => {
+ debuglog(s"cannot reconstruct the original for $tree, because $tpt1 is not a TypeTree")
+ tree
+ }
+ }
+ TypeTree(newExistentialType(tparams, tp)) setOriginal original
+ }
)
}
@@ -4462,6 +4398,12 @@ trait Typers extends Modes with Adaptations with Tags {
treeCopy.New(tree, tpt1).setType(tp)
}
+ def functionTypeWildcard(tree: Tree, arity: Int): Type = {
+ val tp = functionType(List.fill(arity)(WildcardType), WildcardType)
+ if (tp == NoType) MaxFunctionArityError(tree)
+ tp
+ }
+
def typedEta(expr1: Tree): Tree = expr1.tpe match {
case TypeRef(_, ByNameParamClass, _) =>
val expr2 = Function(List(), expr1) setPos expr1.pos
@@ -4473,7 +4415,7 @@ trait Typers extends Modes with Adaptations with Tags {
typed1(expr2, mode, pt)
case PolyType(_, MethodType(formals, _)) =>
if (isFunctionType(pt)) expr1
- else adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
+ else adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
case MethodType(formals, _) =>
if (isFunctionType(pt)) expr1
else expr1 match {
@@ -4492,7 +4434,7 @@ trait Typers extends Modes with Adaptations with Tags {
val rhs = Apply(f, args)
typed(rhs)
case _ =>
- adapt(expr1, mode, functionType(formals map (t => WildcardType), WildcardType))
+ adapt(expr1, mode, functionTypeWildcard(expr1, formals.length))
}
case ErrorType =>
expr1
@@ -4587,8 +4529,13 @@ trait Typers extends Modes with Adaptations with Tags {
def normalTypedApply(tree: Tree, fun: Tree, args: List[Tree]) = {
val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable
- if (stableApplication && isPatternMode) {
+ if (args.isEmpty && stableApplication && isPatternMode) {
// treat stable function applications f() as expressions.
+ //
+ // [JZ] According to Martin, this is related to the old pattern matcher, which
+ // needs to typecheck after a the translation of `x.f` to `x.f()` in a prior
+ // compilation phase. As part of SI-7377, this has been tightened with `args.isEmpty`,
+ // but we should remove it altogether in Scala 2.11.
typed1(tree, mode & ~PATTERNmode | EXPRmode, pt)
} else {
val funpt = if (isPatternMode) pt else WildcardType
@@ -4813,7 +4760,9 @@ trait Typers extends Modes with Adaptations with Tags {
if (!reallyExists(sym)) {
def handleMissing: Tree = {
- if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) {
+ if (context.unit.isJava && name.isTypeName) {
+ // SI-3120 Java uses the same syntax, A.B, to express selection from the
+ // value A and from the type A. We have to try both.
val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) }
if (tree1 != EmptyTree) return typed1(tree1, mode, pt)
}
@@ -5046,7 +4995,25 @@ trait Typers extends Modes with Adaptations with Tags {
else cx.depth - (cx.scope.nestingLevel - defEntry.owner.nestingLevel)
var impSym: Symbol = NoSymbol // the imported symbol
var imports = context.imports // impSym != NoSymbol => it is imported from imports.head
- while (!reallyExists(impSym) && !imports.isEmpty && imports.head.depth > symDepth) {
+
+ // Java: A single-type-import declaration d in a compilation unit c of package p
+ // that imports a type named n shadows, throughout c, the declarations of:
+ //
+ // 1) any top level type named n declared in another compilation unit of p
+ //
+ // A type-import-on-demand declaration never causes any other declaration to be shadowed.
+ //
+ // Scala: Bindings of different kinds have a precedence defined on them:
+ //
+ // 1) Definitions and declarations that are local, inherited, or made available by a
+ // package clause in the same compilation unit where the definition occurs have
+ // highest precedence.
+ // 2) Explicit imports have next highest precedence.
+ def depthOk(imp: ImportInfo) = (
+ imp.depth > symDepth
+ || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symDepth)
+ )
+ while (!reallyExists(impSym) && !imports.isEmpty && depthOk(imports.head)) {
impSym = imports.head.importedSymbol(name)
if (!impSym.exists) imports = imports.tail
}
@@ -5292,8 +5259,7 @@ trait Typers extends Modes with Adaptations with Tags {
def typedDocDef(docdef: DocDef) = {
if (forScaladoc && (sym ne null) && (sym ne NoSymbol)) {
val comment = docdef.comment
- docComments(sym) = comment
- comment.defineVariables(sym)
+ fillDocComment(sym, comment)
val typer1 = newTyper(context.makeNewScope(tree, context.owner))
for (useCase <- comment.useCases) {
typer1.silent(_.typedUseCase(useCase)) match {
@@ -5333,7 +5299,7 @@ trait Typers extends Modes with Adaptations with Tags {
def typedUnApply(tree: UnApply) = {
val fun1 = typed(tree.fun)
- val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args.length), tree.args.length)
+ val tpes = formalTypes(unapplyTypeList(tree.fun.pos, tree.fun.symbol, fun1.tpe, tree.args), tree.args.length)
val args1 = map2(tree.args, tpes)(typedPattern)
treeCopy.UnApply(tree, fun1, args1) setType pt
}
@@ -5741,7 +5707,10 @@ trait Typers extends Modes with Adaptations with Tags {
// as a compromise, context.enrichmentEnabled tells adaptToMember to go ahead and enrich,
// but arbitrary conversions (in adapt) are disabled
// TODO: can we achieve the pattern matching bit of the string interpolation SIP without this?
- typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt)))
+ typingInPattern(context.withImplicitsDisabledAllowEnrichment(typed(tree, PATTERNmode, pt))) match {
+ case tpt if tpt.isType => PatternMustBeValue(tpt, pt); tpt
+ case pat => pat
+ }
}
/** Types a (fully parameterized) type tree */
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index b51dc0ccd5..31c5a61a8c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -34,12 +34,13 @@ trait Unapplies extends ast.TreeDSL
/** returns type list for return type of the extraction
* @see extractorFormalTypes
*/
- def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, nbSubPats: Int) = {
+ def unapplyTypeList(pos: Position, ufn: Symbol, ufntpe: Type, args: List[Tree]) = {
assert(ufn.isMethod, ufn)
+ val nbSubPats = args.length
//Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol)
ufn.name match {
case nme.unapply | nme.unapplySeq =>
- val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn)
+ val (formals, _) = extractorFormalTypes(pos, unapplyUnwrap(ufntpe), nbSubPats, ufn, treeInfo.effectivePatternArity(args))
if (formals == null) throw new TypeError(s"$ufn of type $ufntpe cannot extract $nbSubPats sub-patterns")
else formals
case _ => throw new TypeError(ufn+" is not an unapply or unapplySeq")
diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
index b103ae9cb0..2601798b96 100644
--- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
+++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala
@@ -10,7 +10,7 @@ import java.io.PrintStream
* @param enabled: A condition that must be true for trace info to be produced.
*/
class SimpleTracer(out: PrintStream, enabled: Boolean = true) {
- def apply[T](msg: String)(value: T): T = {
+ def apply[T](msg: => String)(value: T): T = {
if (enabled) out.println(msg+value)
value
}
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
index 3cdbcc5110..d2e9238e8f 100644
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ b/src/compiler/scala/tools/nsc/util/TreeSet.scala
@@ -40,12 +40,22 @@ class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
tree = add(tree)
}
- def iterator = {
- def elems(t: Tree): Iterator[T] = {
- if (t eq null) Iterator.empty
- else elems(t.l) ++ (Iterator single t.elem) ++ elems(t.r)
+ def iterator = toList.iterator
+
+ override def foreach[U](f: T => U) {
+ def loop(t: Tree) {
+ if (t ne null) {
+ loop(t.l)
+ f(t.elem)
+ loop(t.r)
+ }
}
- elems(tree)
+ loop(tree)
+ }
+ override def toList = {
+ val xs = scala.collection.mutable.ListBuffer[T]()
+ foreach(xs += _)
+ xs.toList
}
override def toString(): String = {
diff --git a/src/compiler/scala/tools/reflect/MacroImplementations.scala b/src/compiler/scala/tools/reflect/MacroImplementations.scala
index 86cd845c54..f4f385f8b3 100644
--- a/src/compiler/scala/tools/reflect/MacroImplementations.scala
+++ b/src/compiler/scala/tools/reflect/MacroImplementations.scala
@@ -4,6 +4,7 @@ import scala.reflect.macros.{ReificationException, UnexpectedReificationExceptio
import scala.reflect.macros.runtime.Context
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Stack
+import scala.reflect.internal.util.OffsetPosition
abstract class MacroImplementations {
val c: Context
@@ -26,16 +27,12 @@ abstract class MacroImplementations {
c.abort(args(parts.length-1).pos,
"too many arguments for interpolated string")
}
- val stringParts = parts map {
- case Literal(Constant(s: String)) => s;
- case _ => throw new IllegalArgumentException("argument parts must be a list of string literals")
- }
- val pi = stringParts.iterator
+ val pi = parts.iterator
val bldr = new java.lang.StringBuilder
val evals = ListBuffer[ValDef]()
val ids = ListBuffer[Ident]()
- val argsStack = Stack(args : _*)
+ val argStack = Stack(args : _*)
def defval(value: Tree, tpe: Type): Unit = {
val freshName = newTermName(c.fresh("arg$"))
@@ -82,49 +79,73 @@ abstract class MacroImplementations {
}
def copyString(first: Boolean): Unit = {
- val str = StringContext.treatEscapes(pi.next())
+ val strTree = pi.next()
+ val rawStr = strTree match {
+ case Literal(Constant(str: String)) => str
+ case _ => throw new IllegalArgumentException("internal error: argument parts must be a list of string literals")
+ }
+ val str = StringContext.treatEscapes(rawStr)
val strLen = str.length
val strIsEmpty = strLen == 0
- var start = 0
+ def charAtIndexIs(idx: Int, ch: Char) = idx < strLen && str(idx) == ch
+ def isPercent(idx: Int) = charAtIndexIs(idx, '%')
+ def isConversion(idx: Int) = isPercent(idx) && !charAtIndexIs(idx + 1, 'n') && !charAtIndexIs(idx + 1, '%')
var idx = 0
+ def errorAtIndex(idx: Int, msg: String) = c.error(new OffsetPosition(strTree.pos.source, strTree.pos.point + idx), msg)
+ def wrongConversionString(idx: Int) = errorAtIndex(idx, "wrong conversion string")
+ def illegalConversionCharacter(idx: Int) = errorAtIndex(idx, "illegal conversion character")
+ def nonEscapedPercent(idx: Int) = errorAtIndex(idx, "percent signs not directly following splicees must be escaped")
+
+ // STEP 1: handle argument conversion
+ // 1) "...${smth}" => okay, equivalent to "...${smth}%s"
+ // 2) "...${smth}blahblah" => okay, equivalent to "...${smth}%sblahblah"
+ // 3) "...${smth}%" => error
+ // 4) "...${smth}%n" => okay, equivalent to "...${smth}%s%n"
+ // 5) "...${smth}%%" => okay, equivalent to "...${smth}%s%%"
+ // 6) "...${smth}[%legalJavaConversion]" => okay, according to http://docs.oracle.com/javase/1.5.0/docs/api/java/util/Formatter.html
+ // 7) "...${smth}[%illegalJavaConversion]" => error
if (!first) {
- val arg = argsStack.pop
- if (strIsEmpty || (str charAt 0) != '%') {
- bldr append "%s"
- defval(arg, AnyTpe)
- } else {
+ val arg = argStack.pop
+ if (isConversion(0)) {
// PRE str is not empty and str(0) == '%'
// argument index parameter is not allowed, thus parse
// [flags][width][.precision]conversion
var pos = 1
- while(pos < strLen && isFlag(str charAt pos)) pos += 1
- while(pos < strLen && Character.isDigit(str charAt pos)) pos += 1
- if(pos < strLen && str.charAt(pos) == '.') { pos += 1
- while(pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ while (pos < strLen && isFlag(str charAt pos)) pos += 1
+ while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
+ if (pos < strLen && str.charAt(pos) == '.') {
+ pos += 1
+ while (pos < strLen && Character.isDigit(str charAt pos)) pos += 1
}
- if(pos < strLen) {
+ if (pos < strLen) {
conversionType(str charAt pos, arg) match {
case Some(tpe) => defval(arg, tpe)
- case None => c.error(arg.pos, "illegal conversion character")
+ case None => illegalConversionCharacter(pos)
}
} else {
- // TODO: place error message on conversion string
- c.error(arg.pos, "wrong conversion string")
+ wrongConversionString(pos - 1)
}
+ idx = 1
+ } else {
+ bldr append "%s"
+ defval(arg, AnyTpe)
}
- idx = 1
}
+
+ // STEP 2: handle the rest of the text
+ // 1) %n tokens are left as is
+ // 2) %% tokens are left as is
+ // 3) other usages of percents are reported as errors
if (!strIsEmpty) {
- val len = str.length
- while (idx < len) {
- if (str(idx) == '%') {
- bldr append (str substring (start, idx)) append "%%"
- start = idx + 1
+ while (idx < strLen) {
+ if (isPercent(idx)) {
+ if (isConversion(idx)) nonEscapedPercent(idx)
+ else idx += 1 // skip n and % in %n and %%
}
idx += 1
}
- bldr append (str substring (start, idx))
+ bldr append (str take idx)
}
}
diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala
index 670540187e..75817350e5 100644
--- a/src/library/scala/collection/immutable/ListMap.scala
+++ b/src/library/scala/collection/immutable/ListMap.scala
@@ -156,8 +156,12 @@ extends AbstractMap[A, B]
* @return the value associated with the given key.
*/
override def apply(k: A): B1 = apply0(this, k)
-
- @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = if (k == cur.key) cur.value else apply0(cur.tail, k)
+
+
+ @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 =
+ if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k)
+ else if (k == cur.key) cur.value
+ else apply0(cur.tail, k)
/** Checks if this map maps `key` to a value and return the
* value if it exists.
diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala
index 0877bfbb69..f1cfd2d69a 100644
--- a/src/library/scala/collection/mutable/ArrayBuffer.scala
+++ b/src/library/scala/collection/mutable/ArrayBuffer.scala
@@ -107,7 +107,7 @@ class ArrayBuffer[A](override protected val initialSize: Int)
* the identity of the buffer. It takes time linear in
* the buffer size.
*
- * @param elem the element to append.
+ * @param elem the element to prepend.
* @return the updated buffer.
*/
def +=:(elem: A): this.type = {
diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala
index bb938a7aeb..25ba7e4ce6 100644
--- a/src/library/scala/collection/mutable/ArrayOps.scala
+++ b/src/library/scala/collection/mutable/ArrayOps.scala
@@ -76,18 +76,21 @@ trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomParalleliza
* @return An array obtained by replacing elements of this arrays with rows the represent.
*/
def transpose[U](implicit asArray: T => Array[U]): Array[Array[U]] = {
- def mkRowBuilder() = Array.newBuilder(ClassTag[U](arrayElementClass(elementClass)))
- val bs = asArray(head) map (_ => mkRowBuilder())
- for (xs <- this) {
- var i = 0
- for (x <- asArray(xs)) {
- bs(i) += x
- i += 1
+ val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass))
+ if (isEmpty) bb.result()
+ else {
+ def mkRowBuilder() = Array.newBuilder(ClassTag[U](arrayElementClass(elementClass)))
+ val bs = asArray(head) map (_ => mkRowBuilder())
+ for (xs <- this) {
+ var i = 0
+ for (x <- asArray(xs)) {
+ bs(i) += x
+ i += 1
+ }
}
+ for (b <- bs) bb += b.result()
+ bb.result()
}
- val bb: Builder[Array[U], Array[Array[U]]] = Array.newBuilder(ClassTag[Array[U]](elementClass))
- for (b <- bs) bb += b.result
- bb.result
}
def seq = thisCollection
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index 83aa99ad11..988886b4ea 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -42,11 +42,8 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
private[parallel] def getTaskSupport: TaskSupport =
- if (scala.util.Properties.isJavaAtLeast("1.6")) {
- val vendor = scala.util.Properties.javaVmVendor
- if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinTaskSupport
- else new ThreadPoolTaskSupport
- } else new ThreadPoolTaskSupport
+ if (scala.util.Properties.isJavaAtLeast("1.6")) new ForkJoinTaskSupport
+ else new ThreadPoolTaskSupport
val defaultTaskSupport: TaskSupport = getTaskSupport
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 5a51e97072..6b6ad29074 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -299,8 +299,8 @@ trait Future[+T] extends Awaitable[T] {
* val f = future { 5 }
* val g = f filter { _ % 2 == 1 }
* val h = f filter { _ % 2 == 0 }
- * await(g, 0) // evaluates to 5
- * await(h, 0) // throw a NoSuchElementException
+ * Await.result(g, Duration.Zero) // evaluates to 5
+ * Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
def filter(pred: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
@@ -348,8 +348,8 @@ trait Future[+T] extends Awaitable[T] {
* val h = f collect {
* case x if x > 0 => x * 2
* }
- * await(g, 0) // evaluates to 5
- * await(h, 0) // throw a NoSuchElementException
+ * Await.result(g, Duration.Zero) // evaluates to 5
+ * Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = {
@@ -454,7 +454,7 @@ trait Future[+T] extends Awaitable[T] {
* val f = future { sys.error("failed") }
* val g = future { 5 }
* val h = f fallbackTo g
- * await(h, 0) // evaluates to 5
+ * Await.result(h, Duration.Zero) // evaluates to 5
* }}}
*/
def fallbackTo[U >: T](that: Future[U]): Future[U] = {
@@ -634,7 +634,7 @@ object Future {
*
* Example:
* {{{
- * val result = Await.result(Futures.reduce(futures)(_ + _), 5 seconds)
+ * val result = Await.result(Future.reduce(futures)(_ + _), 5 seconds)
* }}}
*/
def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = {
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index 441bf5aa4d..fdba0ec716 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -253,7 +253,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo
*/
def gcd (that: BigInt): BigInt = new BigInt(this.bigInteger.gcd(that.bigInteger))
- /** Returns a BigInt whose value is (this mod m).
+ /** Returns a BigInt whose value is (this mod that).
* This method differs from `%` in that it always returns a non-negative BigInt.
*/
def mod (that: BigInt): BigInt = new BigInt(this.bigInteger.mod(that.bigInteger))
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index e9b92541c2..11b12050c9 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -26,7 +26,7 @@ import scala.language.{implicitConversions, higherKinds}
* val pairs = Array(("a", 5, 2), ("c", 3, 1), ("b", 1, 3))
*
* // sort by 2nd element
- * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2)
+ * Sorting.quickSort(pairs)(Ordering.by[(String, Int, Int), Int](_._2))
*
* // sort by the 3rd element, then 1st
* Sorting.quickSort(pairs)(Ordering[(Int, String)].on(x => (x._3, x._1)))
diff --git a/src/library/scala/runtime/BoxedUnit.java b/src/library/scala/runtime/BoxedUnit.java
index 035c05d12a..f436b7c209 100644
--- a/src/library/scala/runtime/BoxedUnit.java
+++ b/src/library/scala/runtime/BoxedUnit.java
@@ -17,6 +17,8 @@ public final class BoxedUnit implements java.io.Serializable {
public final static BoxedUnit UNIT = new BoxedUnit();
public final static Class<Void> TYPE = java.lang.Void.TYPE;
+
+ private Object readResolve() { return UNIT; }
private BoxedUnit() { }
diff --git a/src/library/scala/xml/Utility.scala b/src/library/scala/xml/Utility.scala
index 9429e9caa7..030a89773e 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/library/scala/xml/Utility.scala
@@ -64,7 +64,7 @@ object Utility extends AnyRef with parsing.TokenTests {
val key = md.key
val smaller = sort(md.filter { m => m.key < key })
val greater = sort(md.filter { m => m.key > key })
- smaller.copy(md.copy ( greater ))
+ smaller.foldRight (md copy greater) ((x, xs) => x copy xs)
}
/** Return the node with its attribute list sorted alphabetically
diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala
index d73d99bc89..848deef8c5 100644
--- a/src/partest/scala/tools/partest/CompilerTest.scala
+++ b/src/partest/scala/tools/partest/CompilerTest.scala
@@ -19,7 +19,7 @@ abstract class CompilerTest extends DirectTest {
def check(source: String, unit: global.CompilationUnit): Unit
lazy val global: Global = newCompiler()
- lazy val units = compilationUnits(global)(sources: _ *)
+ lazy val units: List[global.CompilationUnit] = compilationUnits(global)(sources: _ *)
import global._
import definitions._
diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala
index 483cb491a1..e2dac2fd55 100644
--- a/src/partest/scala/tools/partest/DirectTest.scala
+++ b/src/partest/scala/tools/partest/DirectTest.scala
@@ -7,7 +7,7 @@ package scala.tools.partest
import scala.tools.nsc._
import io.Directory
-import util.{BatchSourceFile, CommandLineParser}
+import util.{ SourceFile, BatchSourceFile, CommandLineParser }
import reporters.{Reporter, ConsoleReporter}
/** A class for testing code which is embedded as a string.
@@ -49,18 +49,32 @@ abstract class DirectTest extends App {
def reporter(settings: Settings): Reporter = new ConsoleReporter(settings)
- def newSources(sourceCodes: String*) = sourceCodes.toList.zipWithIndex map {
- case (src, idx) => new BatchSourceFile("newSource" + (idx + 1), src)
- }
+ private def newSourcesWithExtension(ext: String)(codes: String*): List[BatchSourceFile] =
+ codes.toList.zipWithIndex map {
+ case (src, idx) => new BatchSourceFile(s"newSource${idx + 1}.$ext", src)
+ }
+
+ def newJavaSources(codes: String*) = newSourcesWithExtension("java")(codes: _*)
+ def newSources(codes: String*) = newSourcesWithExtension("scala")(codes: _*)
+
def compileString(global: Global)(sourceCode: String): Boolean = {
withRun(global)(_ compileSources newSources(sourceCode))
!global.reporter.hasErrors
}
- def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
- val units = withRun(global) { run =>
- run compileSources newSources(sourceCodes: _*)
+
+ def javaCompilationUnits(global: Global)(sourceCodes: String*) = {
+ sourceFilesToCompiledUnits(global)(newJavaSources(sourceCodes: _*))
+ }
+
+ def sourceFilesToCompiledUnits(global: Global)(files: List[SourceFile]) = {
+ withRun(global) { run =>
+ run compileSources files
run.units.toList
}
+ }
+
+ def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = {
+ val units = sourceFilesToCompiledUnits(global)(newSources(sourceCodes: _*))
if (global.reporter.hasErrors) {
global.reporter.flush()
sys.error("Compilation failure.")
diff --git a/src/partest/scala/tools/partest/nest/Diff.java b/src/partest/scala/tools/partest/nest/Diff.java
deleted file mode 100644
index f69fc6858b..0000000000
--- a/src/partest/scala/tools/partest/nest/Diff.java
+++ /dev/null
@@ -1,873 +0,0 @@
-
-package scala.tools.partest.nest;
-
-import java.util.Hashtable;
-
-/** A class to compare IndexedSeqs of objects. The result of comparison
- is a list of <code>change</code> objects which form an
- edit script. The objects compared are traditionally lines
- of text from two files. Comparison options such as "ignore
- whitespace" are implemented by modifying the <code>equals</code>
- and <code>hashcode</code> methods for the objects compared.
-<p>
- The basic algorithm is described in: </br>
- "An O(ND) Difference Algorithm and its Variations", Eugene Myers,
- Algorithmica Vol. 1 No. 2, 1986, p 251.
-<p>
- This class outputs different results from GNU diff 1.15 on some
- inputs. Our results are actually better (smaller change list, smaller
- total size of changes), but it would be nice to know why. Perhaps
- there is a memory overwrite bug in GNU diff 1.15.
-
- @author Stuart D. Gathman, translated from GNU diff 1.15
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the <a href=COPYING.txt>
- GNU General Public License</a>
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-
- */
-
-public class Diff {
-
- /** Prepare to find differences between two arrays. Each element of
- the arrays is translated to an "equivalence number" based on
- the result of <code>equals</code>. The original Object arrays
- are no longer needed for computing the differences. They will
- be needed again later to print the results of the comparison as
- an edit script, if desired.
- */
- public Diff(Object[] a,Object[] b) {
- Hashtable<Object, Integer> h = new Hashtable<Object, Integer>(a.length + b.length);
- filevec[0] = new file_data(a,h);
- filevec[1] = new file_data(b,h);
- }
-
- /** 1 more than the maximum equivalence value used for this or its
- sibling file. */
- private int equiv_max = 1;
-
- /** When set to true, the comparison uses a heuristic to speed it up.
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
- public boolean heuristic = false;
-
- /** When set to true, the algorithm returns a guarranteed minimal
- set of changes. This makes things slower, sometimes much slower. */
- public boolean no_discards = false;
-
- private int[] xvec, yvec; /* IndexedSeqs being compared. */
- private int[] fdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the forward
- search of the edit matrix. */
- private int[] bdiag; /* IndexedSeq, indexed by diagonal, containing
- the X coordinate of the point furthest
- along the given diagonal in the backward
- search of the edit matrix. */
- private int fdiagoff, bdiagoff;
- private final file_data[] filevec = new file_data[2];
- private int cost;
-
- /** Find the midpoint of the shortest edit script for a specified
- portion of the two files.
-
- We scan from the beginnings of the files, and simultaneously from the ends,
- doing a breadth-first search through the space of edit-sequence.
- When the two searches meet, we have found the midpoint of the shortest
- edit sequence.
-
- The value returned is the number of the diagonal on which the midpoint lies.
- The diagonal number equals the number of inserted lines minus the number
- of deleted lines (counting only lines before the midpoint).
- The edit cost is stored into COST; this is the total number of
- lines inserted or deleted (counting only lines before the midpoint).
-
- This function assumes that the first lines of the specified portions
- of the two files do not match, and likewise that the last lines do not
- match. The caller must trim matching lines from the beginning and end
- of the portions it is going to specify.
-
- Note that if we return the "wrong" diagonal value, or if
- the value of bdiag at that diagonal is "wrong",
- the worst this can do is cause suboptimal diff output.
- It cannot cause incorrect diff output. */
-
- private int diag (int xoff, int xlim, int yoff, int ylim) {
- final int[] fd = fdiag; // Give the compiler a chance.
- final int[] bd = bdiag; // Additional help for the compiler.
- final int[] xv = xvec; // Still more help for the compiler.
- final int[] yv = yvec; // And more and more . . .
- final int dmin = xoff - ylim; // Minimum valid diagonal.
- final int dmax = xlim - yoff; // Maximum valid diagonal.
- final int fmid = xoff - yoff; // Center diagonal of top-down search.
- final int bmid = xlim - ylim; // Center diagonal of bottom-up search.
- int fmin = fmid, fmax = fmid; // Limits of top-down search.
- int bmin = bmid, bmax = bmid; // Limits of bottom-up search.
- /* True if southeast corner is on an odd
- diagonal with respect to the northwest. */
- final boolean odd = (fmid - bmid & 1) != 0;
-
- fd[fdiagoff + fmid] = xoff;
- bd[bdiagoff + bmid] = xlim;
-
- for (int c = 1;; ++c)
- {
- int d; /* Active diagonal. */
- boolean big_snake = false;
-
- /* Extend the top-down search by an edit step in each diagonal. */
- if (fmin > dmin)
- fd[fdiagoff + --fmin - 1] = -1;
- else
- ++fmin;
- if (fmax < dmax)
- fd[fdiagoff + ++fmax + 1] = -1;
- else
- --fmax;
- for (d = fmax; d >= fmin; d -= 2)
- {
- int x, y, oldx, tlo = fd[fdiagoff + d - 1], thi = fd[fdiagoff + d + 1];
-
- if (tlo >= thi)
- x = tlo + 1;
- else
- x = thi;
- oldx = x;
- y = x - d;
- while (x < xlim && y < ylim && xv[x] == yv[y]) {
- ++x; ++y;
- }
- if (x - oldx > 20)
- big_snake = true;
- fd[fdiagoff + d] = x;
- if (odd && bmin <= d && d <= bmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c - 1;
- return d;
- }
- }
-
- /* Similar extend the bottom-up search. */
- if (bmin > dmin)
- bd[bdiagoff + --bmin - 1] = Integer.MAX_VALUE;
- else
- ++bmin;
- if (bmax < dmax)
- bd[bdiagoff + ++bmax + 1] = Integer.MAX_VALUE;
- else
- --bmax;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int x, y, oldx, tlo = bd[bdiagoff + d - 1], thi = bd[bdiagoff + d + 1];
-
- if (tlo < thi)
- x = tlo;
- else
- x = thi - 1;
- oldx = x;
- y = x - d;
- while (x > xoff && y > yoff && xv[x - 1] == yv[y - 1]) {
- --x; --y;
- }
- if (oldx - x > 20)
- big_snake = true;
- bd[bdiagoff + d] = x;
- if (!odd && fmin <= d && d <= fmax && bd[bdiagoff + d] <= fd[fdiagoff + d])
- {
- cost = 2 * c;
- return d;
- }
- }
-
- /* Heuristic: check occasionally for a diagonal that has made
- lots of progress compared with the edit distance.
- If we have any such, find the one that has made the most
- progress and return it as if it had succeeded.
-
- With this heuristic, for files with a constant small density
- of changes, the algorithm is linear in the file size. */
-
- if (c > 200 && big_snake && heuristic)
- {
- int best = 0;
- int bestpos = -1;
-
- for (d = fmax; d >= fmin; d -= 2)
- {
- int dd = d - fmid;
- if ((fd[fdiagoff + d] - xoff)*2 - dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if (fd[fdiagoff + d] * 2 - dd > best
- && fd[fdiagoff + d] - xoff > 20
- && fd[fdiagoff + d] - d - yoff > 20)
- {
- int k;
- int x = fd[fdiagoff + d];
-
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- for (k = 1; k <= 20; k++)
- if (xvec[x - k] != yvec[x - d - k])
- break;
-
- if (k == 21)
- {
- best = fd[fdiagoff + d] * 2 - dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
-
- best = 0;
- for (d = bmax; d >= bmin; d -= 2)
- {
- int dd = d - bmid;
- if ((xlim - bd[bdiagoff + d])*2 + dd > 12 * (c + (dd > 0 ? dd : -dd)))
- {
- if ((xlim - bd[bdiagoff + d]) * 2 + dd > best
- && xlim - bd[bdiagoff + d] > 20
- && ylim - (bd[bdiagoff + d] - d) > 20)
- {
- /* We have a good enough best diagonal;
- now insist that it end with a significant snake. */
- int k;
- int x = bd[bdiagoff + d];
-
- for (k = 0; k < 20; k++)
- if (xvec[x + k] != yvec[x - d + k])
- break;
- if (k == 20)
- {
- best = (xlim - bd[bdiagoff + d]) * 2 + dd;
- bestpos = d;
- }
- }
- }
- }
- if (best > 0)
- {
- cost = 2 * c - 1;
- return bestpos;
- }
- }
- }
- }
-
- /** Compare in detail contiguous subsequences of the two files
- which are known, as a whole, to match each other.
-
- The results are recorded in the IndexedSeqs filevec[N].changed_flag, by
- storing a 1 in the element for each line that is an insertion or deletion.
-
- The subsequence of file 0 is [XOFF, XLIM) and likewise for file 1.
-
- Note that XLIM, YLIM are exclusive bounds.
- All line numbers are origin-0 and discarded lines are not counted. */
-
- private void compareseq (int xoff, int xlim, int yoff, int ylim) {
- /* Slide down the bottom initial diagonal. */
- while (xoff < xlim && yoff < ylim && xvec[xoff] == yvec[yoff]) {
- ++xoff; ++yoff;
- }
- /* Slide up the top initial diagonal. */
- while (xlim > xoff && ylim > yoff && xvec[xlim - 1] == yvec[ylim - 1]) {
- --xlim; --ylim;
- }
-
- /* Handle simple cases. */
- if (xoff == xlim)
- while (yoff < ylim)
- filevec[1].changed_flag[1+filevec[1].realindexes[yoff++]] = true;
- else if (yoff == ylim)
- while (xoff < xlim)
- filevec[0].changed_flag[1+filevec[0].realindexes[xoff++]] = true;
- else
- {
- /* Find a point of correspondence in the middle of the files. */
-
- int d = diag (xoff, xlim, yoff, ylim);
- int c = cost;
- int f = fdiag[fdiagoff + d];
- int b = bdiag[bdiagoff + d];
-
- if (c == 1)
- {
- /* This should be impossible, because it implies that
- one of the two subsequences is empty,
- and that case was handled above without calling `diag'.
- Let's verify that this is true. */
- throw new IllegalArgumentException("Empty subsequence");
- }
- else
- {
- /* Use that point to split this problem into two subproblems. */
- compareseq (xoff, b, yoff, b - d);
- /* This used to use f instead of b,
- but that is incorrect!
- It is not necessarily the case that diagonal d
- has a snake from b to f. */
- compareseq (b, xlim, b - d, ylim);
- }
- }
- }
-
- /** Discard lines from one file that have no matches in the other file.
- */
-
- private void discard_confusing_lines() {
- filevec[0].discard_confusing_lines(filevec[1]);
- filevec[1].discard_confusing_lines(filevec[0]);
- }
-
- private boolean inhibit = false;
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
- */
-
- private void shift_boundaries() {
- if (inhibit)
- return;
- filevec[0].shift_boundaries(filevec[1]);
- filevec[1].shift_boundaries(filevec[0]);
- }
-
- public interface ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script.
- @param changed0 true for lines in first file which do not match 2nd
- @param len0 number of lines in first file
- @param changed1 true for lines in 2nd file which do not match 1st
- @param len1 number of lines in 2nd file
- @return a linked list of changes - or null
- */
- public change build_script(
- boolean[] changed0,int len0,
- boolean[] changed1,int len1
- );
- }
-
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in reverse order. */
-
- static class ReverseScript implements ScriptBuilder {
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = 0, i1 = 0;
- while (i0 < len0 || i1 < len1) {
- if (changed0[1+i0] || changed1[1+i1]) {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[1+i0]) ++i0;
- while (changed1[1+i1]) ++i1;
-
- /* Record this change. */
- script = new change(line0, line1, i0 - line0, i1 - line1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0++; i1++;
- }
-
- return script;
- }
- }
-
- static class ForwardScript implements ScriptBuilder {
- /** Scan the tables of which lines are inserted and deleted,
- producing an edit script in forward order. */
- public change build_script(
- final boolean[] changed0,int len0,
- final boolean[] changed1,int len1)
- {
- change script = null;
- int i0 = len0, i1 = len1;
-
- while (i0 >= 0 || i1 >= 0)
- {
- if (changed0[i0] || changed1[i1])
- {
- int line0 = i0, line1 = i1;
-
- /* Find # lines changed here in each file. */
- while (changed0[i0]) --i0;
- while (changed1[i1]) --i1;
-
- /* Record this change. */
- script = new change(i0, i1, line0 - i0, line1 - i1, script);
- }
-
- /* We have reached lines in the two files that match each other. */
- i0--; i1--;
- }
-
- return script;
- }
- }
-
- /** Standard ScriptBuilders. */
- public final static ScriptBuilder
- forwardScript = new ForwardScript(),
- reverseScript = new ReverseScript();
-
- /* Report the differences of two files. DEPTH is the current directory
- depth. */
- public final change diff_2(final boolean reverse) {
- return diff(reverse ? reverseScript : forwardScript);
- }
-
- /** Get the results of comparison as an edit script. The script
- is described by a list of changes. The standard ScriptBuilder
- implementations provide for forward and reverse edit scripts.
- Alternate implementations could, for instance, list common elements
- instead of differences.
- @param bld an object to build the script from change flags
- @return the head of a list of changes
- */
- public change diff(final ScriptBuilder bld) {
-
- /* Some lines are obviously insertions or deletions
- because they don't match anything. Detect them now,
- and avoid even thinking about them in the main comparison algorithm. */
-
- discard_confusing_lines ();
-
- /* Now do the main comparison algorithm, considering just the
- undiscarded lines. */
-
- xvec = filevec[0].undiscarded;
- yvec = filevec[1].undiscarded;
-
- int diags =
- filevec[0].nondiscarded_lines + filevec[1].nondiscarded_lines + 3;
- fdiag = new int[diags];
- fdiagoff = filevec[1].nondiscarded_lines + 1;
- bdiag = new int[diags];
- bdiagoff = filevec[1].nondiscarded_lines + 1;
-
- compareseq (0, filevec[0].nondiscarded_lines,
- 0, filevec[1].nondiscarded_lines);
- fdiag = null;
- bdiag = null;
-
- /* Modify the results slightly to make them prettier
- in cases where that can validly be done. */
-
- shift_boundaries ();
-
- /* Get the results of comparison in the form of a chain
- of `struct change's -- an edit script. */
- return bld.build_script(
- filevec[0].changed_flag,
- filevec[0].buffered_lines,
- filevec[1].changed_flag,
- filevec[1].buffered_lines
- );
-
- }
-
- /** The result of comparison is an "edit script": a chain of change objects.
- Each change represents one place where some lines are deleted
- and some are inserted.
-
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
-
- public static class change {
- /** Previous or next edit command. */
- public change link;
- /** # lines of file 1 changed here. */
- public final int inserted;
- /** # lines of file 0 changed here. */
- public final int deleted;
- /** Line number of 1st deleted line. */
- public final int line0;
- /** Line number of 1st inserted line. */
- public final int line1;
-
- /** Cons an additional entry onto the front of an edit script OLD.
- LINE0 and LINE1 are the first affected lines in the two files (origin 0).
- DELETED is the number of lines deleted here from file 0.
- INSERTED is the number of lines inserted here in file 1.
-
- If DELETED is 0 then LINE0 is the number of the line before
- which the insertion was done; vice versa for INSERTED and LINE1. */
- public change(int line0, int line1, int deleted, int inserted, change old) {
- this.line0 = line0;
- this.line1 = line1;
- this.inserted = inserted;
- this.deleted = deleted;
- this.link = old;
- //System.err.println(line0+","+line1+","+inserted+","+deleted);
- }
- }
-
- /** Data on one input file being compared.
- */
-
- class file_data {
-
- /** Allocate changed array for the results of comparison. */
- void clear() {
- /* Allocate a flag for each line of each file, saying whether that line
- is an insertion or deletion.
- Allocate an extra element, always zero, at each end of each IndexedSeq.
- */
- changed_flag = new boolean[buffered_lines + 2];
- }
-
- /** Return equiv_count[I] as the number of lines in this file
- that fall in equivalence class I.
- @return the array of equivalence class counts.
- */
- int[] equivCount() {
- int[] equiv_count = new int[equiv_max];
- for (int i = 0; i < buffered_lines; ++i)
- ++equiv_count[equivs[i]];
- return equiv_count;
- }
-
- /** Discard lines that have no matches in another file.
-
- A line which is discarded will not be considered by the actual
- comparison algorithm; it will be as if that line were not in the file.
- The file's `realindexes' table maps virtual line numbers
- (which don't count the discarded lines) into real line numbers;
- this is how the actual comparison algorithm produces results
- that are comprehensible when the discarded lines are counted.
-<p>
- When we discard a line, we also mark it as a deletion or insertion
- so that it will be printed in the output.
- @param f the other file
- */
- void discard_confusing_lines(file_data f) {
- clear();
- /* Set up table of which lines are going to be discarded. */
- final byte[] discarded = discardable(f.equivCount());
-
- /* Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
- filterDiscards(discarded);
-
- /* Actually discard the lines. */
- discard(discarded);
- }
-
- /** Mark to be discarded each line that matches no line of another file.
- If a line matches many lines, mark it as provisionally discardable.
- @see equivCount()
- @param counts The count of each equivalence number for the other file.
- @return 0=nondiscardable, 1=discardable or 2=provisionally discardable
- for each line
- */
-
- private byte[] discardable(final int[] counts) {
- final int end = buffered_lines;
- final byte[] discards = new byte[end];
- final int[] equivs = this.equivs;
- int many = 5;
- int tem = end / 64;
-
- /* Multiply MANY by approximate square root of number of lines.
- That is the threshold for provisionally discardable lines. */
- while ((tem = tem >> 2) > 0)
- many *= 2;
-
- for (int i = 0; i < end; i++)
- {
- int nmatch;
- if (equivs[i] == 0)
- continue;
- nmatch = counts[equivs[i]];
- if (nmatch == 0)
- discards[i] = 1;
- else if (nmatch > many)
- discards[i] = 2;
- }
- return discards;
- }
-
- /** Don't really discard the provisional lines except when they occur
- in a run of discardables, with nonprovisionals at the beginning
- and end. */
-
- private void filterDiscards(final byte[] discards) {
- final int end = buffered_lines;
-
- for (int i = 0; i < end; i++)
- {
- /* Cancel provisional discards not in middle of run of discards. */
- if (discards[i] == 2)
- discards[i] = 0;
- else if (discards[i] != 0)
- {
- /* We have found a nonprovisional discard. */
- int j;
- int length;
- int provisional = 0;
-
- /* Find end of this run of discardable lines.
- Count how many are provisionally discardable. */
- for (j = i; j < end; j++)
- {
- if (discards[j] == 0)
- break;
- if (discards[j] == 2)
- ++provisional;
- }
-
- /* Cancel provisional discards at end, and shrink the run. */
- while (j > i && discards[j - 1] == 2) {
- discards[--j] = 0; --provisional;
- }
-
- /* Now we have the length of a run of discardable lines
- whose first and last are not provisional. */
- length = j - i;
-
- /* If 1/4 of the lines in the run are provisional,
- cancel discarding of all provisional lines in the run. */
- if (provisional * 4 > length)
- {
- while (j > i)
- if (discards[--j] == 2)
- discards[j] = 0;
- }
- else
- {
- int consec;
- int minimum = 1;
- int tem = length / 4;
-
- /* MINIMUM is approximate square root of LENGTH/4.
- A subrun of two or more provisionals can stand
- when LENGTH is at least 16.
- A subrun of 4 or more can stand when LENGTH >= 64. */
- while ((tem = tem >> 2) > 0)
- minimum *= 2;
- minimum++;
-
- /* Cancel any subrun of MINIMUM or more provisionals
- within the larger run. */
- for (j = 0, consec = 0; j < length; j++)
- if (discards[i + j] != 2)
- consec = 0;
- else if (minimum == ++consec)
- /* Back up to start of subrun, to cancel it all. */
- j -= consec;
- else if (minimum < consec)
- discards[i + j] = 0;
-
- /* Scan from beginning of run
- until we find 3 or more nonprovisionals in a row
- or until the first nonprovisional at least 8 lines in.
- Until that point, cancel any provisionals. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i + j] == 1)
- break;
- if (discards[i + j] == 2) {
- consec = 0; discards[i + j] = 0;
- }
- else if (discards[i + j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
-
- /* I advances to the last line of the run. */
- i += length - 1;
-
- /* Same thing, from end. */
- for (j = 0, consec = 0; j < length; j++)
- {
- if (j >= 8 && discards[i - j] == 1)
- break;
- if (discards[i - j] == 2) {
- consec = 0; discards[i - j] = 0;
- }
- else if (discards[i - j] == 0)
- consec = 0;
- else
- consec++;
- if (consec == 3)
- break;
- }
- }
- }
- }
- }
-
- /** Actually discard the lines.
- @param discards flags lines to be discarded
- */
- private void discard(final byte[] discards) {
- final int end = buffered_lines;
- int j = 0;
- for (int i = 0; i < end; ++i)
- if (no_discards || discards[i] == 0)
- {
- undiscarded[j] = equivs[i];
- realindexes[j++] = i;
- }
- else
- changed_flag[1+i] = true;
- nondiscarded_lines = j;
- }
-
- file_data(Object[] data, Hashtable<Object, Integer> h) {
- buffered_lines = data.length;
-
- equivs = new int[buffered_lines];
- undiscarded = new int[buffered_lines];
- realindexes = new int[buffered_lines];
-
- for (int i = 0; i < data.length; ++i) {
- Integer ir = h.get(data[i]);
- if (ir == null)
- h.put(data[i], new Integer(equivs[i] = equiv_max++));
- else
- equivs[i] = ir.intValue();
- }
- }
-
- /** Adjust inserts/deletes of blank lines to join changes
- as much as possible.
-
- We do something when a run of changed lines include a blank
- line at one end and have an excluded blank line at the other.
- We are free to choose which blank line is included.
- `compareseq' always chooses the one at the beginning,
- but usually it is cleaner to consider the following blank line
- to be the "change". The only exception is if the preceding blank line
- would join this change to other changes.
- @param f the file being compared against
- */
-
- void shift_boundaries(file_data f) {
- final boolean[] changed = changed_flag;
- final boolean[] other_changed = f.changed_flag;
- int i = 0;
- int j = 0;
- int i_end = buffered_lines;
- int preceding = -1;
- int other_preceding = -1;
-
- for (;;)
- {
- int start, end, other_start;
-
- /* Scan forwards to find beginning of another run of changes.
- Also keep track of the corresponding point in the other file. */
-
- while (i < i_end && !changed[1+i])
- {
- while (other_changed[1+j++])
- /* Non-corresponding lines in the other file
- will count as the preceding batch of changes. */
- other_preceding = j;
- i++;
- }
-
- if (i == i_end)
- break;
-
- start = i;
- other_start = j;
-
- for (;;)
- {
- /* Now find the end of this run of changes. */
-
- while (i < i_end && changed[1+i]) i++;
- end = i;
-
- /* If the first changed line matches the following unchanged one,
- and this run does not follow right after a previous run,
- and there are no lines deleted from the other file here,
- then classify the first changed line as unchanged
- and the following line as changed in its place. */
-
- /* You might ask, how could this run follow right after another?
- Only because the previous run was shifted here. */
-
- if (end != i_end
- && equivs[start] == equivs[end]
- && !other_changed[1+j]
- && end != i_end
- && !((preceding >= 0 && start == preceding)
- || (other_preceding >= 0
- && other_start == other_preceding)))
- {
- changed[1+end++] = true;
- changed[1+start++] = false;
- ++i;
- /* Since one line-that-matches is now before this run
- instead of after, we must advance in the other file
- to keep in synch. */
- ++j;
- }
- else
- break;
- }
-
- preceding = i;
- other_preceding = j;
- }
- }
-
- /** Number of elements (lines) in this file. */
- final int buffered_lines;
-
- /** IndexedSeq, indexed by line number, containing an equivalence code for
- each line. It is this IndexedSeq that is actually compared with that
- of another file to generate differences. */
- private final int[] equivs;
-
- /** IndexedSeq, like the previous one except that
- the elements for discarded lines have been squeezed out. */
- final int[] undiscarded;
-
- /** IndexedSeq mapping virtual line numbers (not counting discarded lines)
- to real ones (counting those lines). Both are origin-0. */
- final int[] realindexes;
-
- /** Total number of nondiscarded lines. */
- int nondiscarded_lines;
-
- /** Array, indexed by real origin-1 line number,
- containing true for a line that is an insertion or a deletion.
- The results of comparison are stored here. */
- boolean[] changed_flag;
-
- }
-}
diff --git a/src/partest/scala/tools/partest/nest/DiffPrint.java b/src/partest/scala/tools/partest/nest/DiffPrint.java
deleted file mode 100644
index 31f9a1bc79..0000000000
--- a/src/partest/scala/tools/partest/nest/DiffPrint.java
+++ /dev/null
@@ -1,606 +0,0 @@
-
-package scala.tools.partest.nest;
-
-import java.io.*;
-import java.util.Vector;
-import java.util.Date;
-//import com.objectspace.jgl.predicates.UnaryPredicate;
-
-interface UnaryPredicate {
- boolean execute(Object obj);
-}
-
-/** A simple framework for printing change lists produced by <code>Diff</code>.
- @see bmsi.util.Diff
- @author Stuart D. Gathman
- Copyright (C) 2000 Business Management Systems, Inc.
-<p>
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 1, or (at your option)
- any later version.
-<p>
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-<p>
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
- */
-public class DiffPrint {
- /** A Base class for printing edit scripts produced by Diff.
- This class divides the change list into "hunks", and calls
- <code>print_hunk</code> for each hunk. Various utility methods
- are provided as well.
- */
- public static abstract class Base {
- protected Base(Object[] a,Object[] b, Writer w) {
- outfile = new PrintWriter(w);
- file0 = a;
- file1 = b;
- }
- /** Set to ignore certain kinds of lines when printing
- an edit script. For example, ignoring blank lines or comments.
- */
- protected UnaryPredicate ignore = null;
-
- /** Set to the lines of the files being compared.
- */
- protected Object[] file0, file1;
-
- /** Divide SCRIPT into pieces by calling HUNKFUN and
- print each piece with PRINTFUN.
- Both functions take one arg, an edit script.
-
- PRINTFUN takes a subscript which belongs together (with a null
- link at the end) and prints it. */
- public void print_script(Diff.change script) {
- Diff.change next = script;
-
- while (next != null)
- {
- Diff.change t, end;
-
- /* Find a set of changes that belong together. */
- t = next;
- end = hunkfun(next);
-
- /* Disconnect them from the rest of the changes,
- making them a hunk, and remember the rest for next iteration. */
- next = end.link;
- end.link = null;
- //if (DEBUG)
- // debug_script(t);
-
- /* Print this hunk. */
- print_hunk(t);
-
- /* Reconnect the script so it will all be freed properly. */
- end.link = next;
- }
- outfile.flush();
- }
-
- /** Called with the tail of the script
- and returns the last link that belongs together with the start
- of the tail. */
-
- protected Diff.change hunkfun(Diff.change hunk) {
- return hunk;
- }
-
- protected int first0, last0, first1, last1, deletes, inserts;
- protected PrintWriter outfile;
-
- /** Look at a hunk of edit script and report the range of lines in each file
- that it applies to. HUNK is the start of the hunk, which is a chain
- of `struct change'. The first and last line numbers of file 0 are stored
- in *FIRST0 and *LAST0, and likewise for file 1 in *FIRST1 and *LAST1.
- Note that these are internal line numbers that count from 0.
-
- If no lines from file 0 are deleted, then FIRST0 is LAST0+1.
-
- Also set *DELETES nonzero if any lines of file 0 are deleted
- and set *INSERTS nonzero if any lines of file 1 are inserted.
- If only ignorable lines are inserted or deleted, both are
- set to 0. */
-
- protected void analyze_hunk(Diff.change hunk) {
- int f0, l0 = 0, f1, l1 = 0, show_from = 0, show_to = 0;
- int i;
- Diff.change next;
- boolean nontrivial = (ignore == null);
-
- show_from = show_to = 0;
-
- f0 = hunk.line0;
- f1 = hunk.line1;
-
- for (next = hunk; next != null; next = next.link)
- {
- l0 = next.line0 + next.deleted - 1;
- l1 = next.line1 + next.inserted - 1;
- show_from += next.deleted;
- show_to += next.inserted;
- for (i = next.line0; i <= l0 && ! nontrivial; i++)
- if (!ignore.execute(file0[i]))
- nontrivial = true;
- for (i = next.line1; i <= l1 && ! nontrivial; i++)
- if (!ignore.execute(file1[i]))
- nontrivial = true;
- }
-
- first0 = f0;
- last0 = l0;
- first1 = f1;
- last1 = l1;
-
- /* If all inserted or deleted lines are ignorable,
- tell the caller to ignore this hunk. */
-
- if (!nontrivial)
- show_from = show_to = 0;
-
- deletes = show_from;
- inserts = show_to;
- }
-
- /** Print the script header which identifies the files compared. */
- protected void print_header(String filea, String fileb) { }
-
- protected abstract void print_hunk(Diff.change hunk);
-
- protected void print_1_line(String pre,Object linbuf) {
- outfile.println(pre + linbuf.toString());
- }
-
- /** Print a pair of line numbers with SEPCHAR, translated for file FILE.
- If the two numbers are identical, print just one number.
-
- Args A and B are internal line numbers.
- We print the translated (real) line numbers. */
-
- protected void print_number_range (char sepchar, int a, int b) {
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (++b > ++a)
- outfile.print("" + a + sepchar + b);
- else
- outfile.print(b);
- }
-
- public static char change_letter(int inserts, int deletes) {
- if (inserts == 0)
- return 'd';
- else if (deletes == 0)
- return 'a';
- else
- return 'c';
- }
- }
-
- /** Print a change list in the standard diff format.
- */
- public static class NormalPrint extends Base {
-
- public NormalPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of a normal diff.
- This is a contiguous portion of a complete edit script,
- describing changes in consecutive lines. */
-
- protected void print_hunk (Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk(hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.print(change_letter(inserts, deletes));
- print_number_range (',', first1, last1);
- outfile.println();
-
- /* Print the lines that the first file has. */
- if (deletes != 0)
- for (int i = first0; i <= last0; i++)
- print_1_line ("< ", file0[i]);
-
- if (inserts != 0 && deletes != 0)
- outfile.println("---");
-
- /* Print the lines that the second file has. */
- if (inserts != 0)
- for (int i = first1; i <= last1; i++)
- print_1_line ("> ", file1[i]);
- }
- }
-
- /** Prints an edit script in a format suitable for input to <code>ed</code>.
- The edit script must be generated with the reverse option to
- be useful as actual <code>ed</code> input.
- */
- public static class EdPrint extends Base {
-
- public EdPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- /** Print a hunk of an ed diff */
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Print out the line number header for this hunk */
- print_number_range (',', first0, last0);
- outfile.println(change_letter(inserts, deletes));
-
- /* Print new/changed lines from second file, if needed */
- if (inserts != 0)
- {
- boolean inserting = true;
- for (int i = first1; i <= last1; i++)
- {
- /* Resume the insert, if we stopped. */
- if (! inserting)
- outfile.println(i - first1 + first0 + "a");
- inserting = true;
-
- /* If the file's line is just a dot, it would confuse `ed'.
- So output it with a double dot, and set the flag LEADING_DOT
- so that we will output another ed-command later
- to change the double dot into a single dot. */
-
- if (".".equals(file1[i]))
- {
- outfile.println("..");
- outfile.println(".");
- /* Now change that double dot to the desired single dot. */
- outfile.println(i - first1 + first0 + 1 + "s/^\\.\\././");
- inserting = false;
- }
- else
- /* Line is not `.', so output it unmodified. */
- print_1_line ("", file1[i]);
- }
-
- /* End insert mode, if we are still in it. */
- if (inserting)
- outfile.println(".");
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class ContextPrint extends Base {
-
- protected int context = 3;
-
- public ContextPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- protected void print_context_label (String mark, File inf, String label) {
- if (label != null)
- outfile.println(mark + ' ' + label);
- else if (inf.lastModified() > 0)
- // FIXME: use DateFormat to get precise format needed.
- outfile.println(
- mark + ' ' + inf.getPath() + '\t' + new Date(inf.lastModified())
- );
- else
- /* Don't pretend that standard input is ancient. */
- outfile.println(mark + ' ' + inf.getPath());
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("***", new File(filea), filea);
- print_context_label ("---", new File(fileb), fileb);
- }
-
- /** If function_regexp defined, search for start of function. */
- private String find_function(Object[] lines, int start) {
- return null;
- }
-
- protected void print_function(Object[] file,int start) {
- String function = find_function (file0, first0);
- if (function != null) {
- outfile.print(" ");
- outfile.print(
- (function.length() < 40) ? function : function.substring(0,40)
- );
- }
- }
-
- protected void print_hunk(Diff.change hunk) {
-
- /* Determine range of line numbers involved in each file. */
-
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
- outfile.print("***************");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function (file0, first0);
-
- outfile.println();
- outfile.print("*** ");
- print_number_range (',', first0, last0);
- outfile.println(" ****");
-
- if (deletes != 0) {
- Diff.change next = hunk;
-
- for (int i = first0; i <= last0; i++) {
- /* Skip past changes that apply (in file 0)
- only to lines before line I. */
-
- while (next != null && next.line0 + next.deleted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line0 <= i)
- /* The change NEXT covers this line.
- If lines were inserted here in file 1, this is "changed".
- Otherwise it is "deleted". */
- prefix = (next.inserted > 0) ? "!" : "-";
-
- print_1_line (prefix, file0[i]);
- }
- }
-
- outfile.print("--- ");
- print_number_range (',', first1, last1);
- outfile.println(" ----");
-
- if (inserts != 0) {
- Diff.change next = hunk;
-
- for (int i = first1; i <= last1; i++) {
- /* Skip past changes that apply (in file 1)
- only to lines before line I. */
-
- while (next != null && next.line1 + next.inserted <= i)
- next = next.link;
-
- /* Compute the marking for line I. */
-
- String prefix = " ";
- if (next != null && next.line1 <= i)
- /* The change NEXT covers this line.
- If lines were deleted here in file 0, this is "changed".
- Otherwise it is "inserted". */
- prefix = (next.deleted > 0) ? "!" : "+";
-
- print_1_line (prefix, file1[i]);
- }
- }
- }
- }
-
- /** Prints an edit script in context diff format. This and its
- 'unified' variation is used for source code patches.
- */
- public static class UnifiedPrint extends ContextPrint {
-
- public UnifiedPrint(Object[] a,Object[] b, Writer w) {
- super(a,b,w);
- }
-
- public void print_header(String filea,String fileb) {
- print_context_label ("---", new File(filea), filea);
- print_context_label ("+++", new File(fileb), fileb);
- }
-
- private void print_number_range (int a, int b) {
- //translate_range (file, a, b, &trans_a, &trans_b);
-
- /* Note: we can have B < A in the case of a range of no lines.
- In this case, we should print the line number before the range,
- which is B. */
- if (b < a)
- outfile.print(b + ",0");
- else
- super.print_number_range(',',a,b);
- }
-
- protected void print_hunk(Diff.change hunk) {
- /* Determine range of line numbers involved in each file. */
- analyze_hunk (hunk);
-
- if (deletes == 0 && inserts == 0)
- return;
-
- /* Include a context's width before and after. */
-
- first0 = Math.max(first0 - context, 0);
- first1 = Math.max(first1 - context, 0);
- last0 = Math.min(last0 + context, file0.length - 1);
- last1 = Math.min(last1 + context, file1.length - 1);
-
-
-
- outfile.print("@@ -");
- print_number_range (first0, last0);
- outfile.print(" +");
- print_number_range (first1, last1);
- outfile.print(" @@");
-
- /* If we looked for and found a function this is part of,
- include its name in the header of the diff section. */
- print_function(file0,first0);
-
- outfile.println();
-
- Diff.change next = hunk;
- int i = first0;
- int j = first1;
-
- while (i <= last0 || j <= last1) {
-
- /* If the line isn't a difference, output the context from file 0. */
-
- if (next == null || i < next.line0) {
- outfile.print(' ');
- print_1_line ("", file0[i++]);
- j++;
- }
- else {
- /* For each difference, first output the deleted part. */
-
- int k = next.deleted;
- while (k-- > 0) {
- outfile.print('-');
- print_1_line ("", file0[i++]);
- }
-
- /* Then output the inserted part. */
-
- k = next.inserted;
- while (k-- > 0) {
- outfile.print('+');
- print_1_line ("", file1[j++]);
- }
-
- /* We're done with this hunk, so on to the next! */
-
- next = next.link;
- }
- }
- }
- }
-
-
- /** Read a text file into an array of String. This provides basic diff
- functionality. A more advanced diff utility will use specialized
- objects to represent the text lines, with options to, for example,
- convert sequences of whitespace to a single space for comparison
- purposes.
- */
- static String[] slurp(String file) throws IOException {
- BufferedReader rdr = new BufferedReader(new FileReader(file));
- Vector<String> s = new Vector<String>();
- for (;;) {
- String line = rdr.readLine();
- if (line == null) break;
- s.addElement(line);
- }
- String[] a = new String[s.size()];
- s.copyInto(a);
- return a;
- }
-
- public static void main(String[] argv) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- System.err.println("No differences");
- else {
- Base p;
- Writer w = new OutputStreamWriter(System.out);
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
- public static void doDiff(String[] argv, Writer w) throws IOException {
- String filea = argv[argv.length - 2];
- String fileb = argv[argv.length - 1];
- String[] a = slurp(filea);
- String[] b = slurp(fileb);
- Diff d = new Diff(a,b);
- char style = 'n';
- for (int i = 0; i < argv.length - 2; ++i) {
- String f = argv[i];
- if (f.startsWith("-")) {
- for (int j = 1; j < f.length(); ++j) {
- switch (f.charAt(j)) {
- case 'e': // Ed style
- style = 'e'; break;
- case 'c': // Context diff
- style = 'c'; break;
- case 'u':
- style = 'u'; break;
- }
- }
- }
- }
- boolean reverse = style == 'e';
- Diff.change script = d.diff_2(reverse);
- if (script == null)
- w.write("No differences\n");
- else {
- Base p;
- switch (style) {
- case 'e':
- p = new EdPrint(a,b,w); break;
- case 'c':
- p = new ContextPrint(a,b,w); break;
- case 'u':
- p = new UnifiedPrint(a,b,w); break;
- default:
- p = new NormalPrint(a,b,w);
- }
- p.print_header(filea,fileb);
- p.print_script(script);
- }
- }
-
-}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 2823967ecf..70fdb33c6a 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -18,34 +18,31 @@ import scala.collection.mutable
trait FileUtil {
/**
- * Compares two files using a Java implementation of the GNU diff
- * available at http://www.bmsi.com/java/#diff.
+ * Compares two files using difflib to produce a unified diff.
*
* @param f1 the first file to be compared
* @param f2 the second file to be compared
- * @return the text difference between the compared files
+ * @return the unified diff of the compared files or the empty string if they're equal
*/
def compareFiles(f1: File, f2: File): String = {
- val diffWriter = new StringWriter
- val args = Array(f1.getAbsolutePath(), f2.getAbsolutePath())
-
- DiffPrint.doDiff(args, diffWriter)
- val res = diffWriter.toString
- if (res startsWith "No") "" else res
+ compareContents(io.Source.fromFile(f1).getLines.toSeq, io.Source.fromFile(f2).getLines.toSeq, f1.getName, f2.getName)
}
- def compareContents(lines1: Seq[String], lines2: Seq[String]): String = {
- val xs1 = lines1.toArray[AnyRef]
- val xs2 = lines2.toArray[AnyRef]
-
- val diff = new Diff(xs1, xs2)
- val change = diff.diff_2(false)
- val writer = new StringWriter
- val p = new DiffPrint.NormalPrint(xs1, xs2, writer)
-
- p.print_script(change)
- val res = writer.toString
- if (res startsWith "No ") ""
- else res
+
+ /**
+ * Compares two lists of lines using difflib to produce a unified diff.
+ *
+ * @param origLines the first seq of lines to be compared
+ * @param newLines the second seq of lines to be compared
+ * @param origName file name to be used in unified diff for `origLines`
+ * @param newName file name to be used in unified diff for `newLines`
+ * @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal
+ */
+ def compareContents(origLines: Seq[String], newLines: Seq[String], origName: String = "a", newName: String = "b"): String = {
+ import collection.JavaConverters._
+
+ val diff = difflib.DiffUtils.diff(origLines.asJava, newLines.asJava)
+ if (diff.getDeltas.isEmpty) ""
+ else difflib.DiffUtils.generateUnifiedDiff(origName, newName, origLines.asJava, diff, 1).asScala.mkString("\n")
}
}
object FileUtil extends FileUtil { }
diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala
index a42c2219b1..0ba34777a0 100644
--- a/src/partest/scala/tools/partest/nest/PathSettings.scala
+++ b/src/partest/scala/tools/partest/nest/PathSettings.scala
@@ -72,6 +72,9 @@ object PathSettings {
findJar(buildPackLibDir.files ++ srcLibDir.files, "scalacheck") getOrElse {
sys.error("No scalacheck jar found in '%s' or '%s'".format(buildPackLibDir, srcLibDir))
}
+
+ lazy val diffUtils: File =
+ findJar(buildPackLibDir.files, "diffutils") getOrElse sys.error(s"No diffutils.jar found in '$buildPackLibDir'.")
}
class PathSettings() {
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 5cb8589d66..700667afcf 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -53,13 +53,14 @@ class ReflectiveRunner {
Array(latestCompFile, latestReflectFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
+ // this seems to be the core classloader that determines which classes can be found when running partest from the test/partest script
var sepLoader = new URLClassLoader(sepUrls, null)
// this is a workaround for https://issues.scala-lang.org/browse/SI-5433
- // when that bug is fixed, this paragraph of code can be safely removed
// we hack into the classloader that will become parent classloader for scalac
// this way we ensure that reflective macro lookup will pick correct Code.lift
- sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: files) map (_.toURL), null)
+ // it's also used to inject diffutils into the classpath when running partest from the test/partest script
+ sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: (PathSettings.diffUtils +: files)) map (_.toURL), null)
if (isPartestDebug)
println("Loading classes from:\n" + sepUrls.mkString("\n"))
diff --git a/src/partest/scala/tools/partest/nest/RunnerManager.scala b/src/partest/scala/tools/partest/nest/RunnerManager.scala
index c5e944fbc0..f80f6f38fd 100644
--- a/src/partest/scala/tools/partest/nest/RunnerManager.scala
+++ b/src/partest/scala/tools/partest/nest/RunnerManager.scala
@@ -79,6 +79,7 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
val compileMgr = new CompileManager(fileManager)
fileManager.CLASSPATH += File.pathSeparator + PathSettings.scalaCheck
+ fileManager.CLASSPATH += File.pathSeparator + PathSettings.diffUtils // needed to put diffutils on test/partest's classpath
private def compareFiles(f1: File, f2: File): String =
try fileManager.compareFiles(f1, f2)
@@ -817,7 +818,7 @@ class RunnerManager(kind: String, val fileManager: FileManager, params: TestRunP
if (passed exists (x => !x)) {
if (fileManager.showDiff || isPartestDebug)
NestUI.normal(testDiff)
- else if (fileManager.showLog)
+ if (fileManager.showLog)
showLog(logFile)
}
}
diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala
index b53c700701..c8e03f1d91 100644
--- a/src/reflect/scala/reflect/api/Symbols.scala
+++ b/src/reflect/scala/reflect/api/Symbols.scala
@@ -946,7 +946,7 @@ trait Symbols { self: Universe =>
def knownDirectSubclasses: Set[Symbol]
/** The list of all base classes of this type (including its own typeSymbol)
- * in reverse linearization order, starting with the class itself and ending
+ * in linearization order, starting with the class itself and ending
* in class Any.
*
* @group Class
diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala
index e988971ace..7457910226 100644
--- a/src/reflect/scala/reflect/api/TypeTags.scala
+++ b/src/reflect/scala/reflect/api/TypeTags.scala
@@ -221,24 +221,7 @@ trait TypeTags { self: Universe =>
def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): WeakTypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => WeakTypeTag.Byte.asInstanceOf[WeakTypeTag[T]]
- case ShortTpe => WeakTypeTag.Short.asInstanceOf[WeakTypeTag[T]]
- case CharTpe => WeakTypeTag.Char.asInstanceOf[WeakTypeTag[T]]
- case IntTpe => WeakTypeTag.Int.asInstanceOf[WeakTypeTag[T]]
- case LongTpe => WeakTypeTag.Long.asInstanceOf[WeakTypeTag[T]]
- case FloatTpe => WeakTypeTag.Float.asInstanceOf[WeakTypeTag[T]]
- case DoubleTpe => WeakTypeTag.Double.asInstanceOf[WeakTypeTag[T]]
- case BooleanTpe => WeakTypeTag.Boolean.asInstanceOf[WeakTypeTag[T]]
- case UnitTpe => WeakTypeTag.Unit.asInstanceOf[WeakTypeTag[T]]
- case AnyTpe => WeakTypeTag.Any.asInstanceOf[WeakTypeTag[T]]
- case AnyValTpe => WeakTypeTag.AnyVal.asInstanceOf[WeakTypeTag[T]]
- case AnyRefTpe => WeakTypeTag.AnyRef.asInstanceOf[WeakTypeTag[T]]
- case ObjectTpe => WeakTypeTag.Object.asInstanceOf[WeakTypeTag[T]]
- case NothingTpe => WeakTypeTag.Nothing.asInstanceOf[WeakTypeTag[T]]
- case NullTpe => WeakTypeTag.Null.asInstanceOf[WeakTypeTag[T]]
- case _ => new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
+ new WeakTypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
def unapply[T](ttag: WeakTypeTag[T]): Option[Type] = Some(ttag.tpe)
}
@@ -299,24 +282,7 @@ trait TypeTags { self: Universe =>
val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null)
def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] =
- tpec1(mirror1) match {
- case ByteTpe => TypeTag.Byte.asInstanceOf[TypeTag[T]]
- case ShortTpe => TypeTag.Short.asInstanceOf[TypeTag[T]]
- case CharTpe => TypeTag.Char.asInstanceOf[TypeTag[T]]
- case IntTpe => TypeTag.Int.asInstanceOf[TypeTag[T]]
- case LongTpe => TypeTag.Long.asInstanceOf[TypeTag[T]]
- case FloatTpe => TypeTag.Float.asInstanceOf[TypeTag[T]]
- case DoubleTpe => TypeTag.Double.asInstanceOf[TypeTag[T]]
- case BooleanTpe => TypeTag.Boolean.asInstanceOf[TypeTag[T]]
- case UnitTpe => TypeTag.Unit.asInstanceOf[TypeTag[T]]
- case AnyTpe => TypeTag.Any.asInstanceOf[TypeTag[T]]
- case AnyValTpe => TypeTag.AnyVal.asInstanceOf[TypeTag[T]]
- case AnyRefTpe => TypeTag.AnyRef.asInstanceOf[TypeTag[T]]
- case ObjectTpe => TypeTag.Object.asInstanceOf[TypeTag[T]]
- case NothingTpe => TypeTag.Nothing.asInstanceOf[TypeTag[T]]
- case NullTpe => TypeTag.Null.asInstanceOf[TypeTag[T]]
- case _ => new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
- }
+ new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1)
def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe)
}
diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala
index 143438b8f5..72163ef0e9 100644
--- a/src/reflect/scala/reflect/api/Types.scala
+++ b/src/reflect/scala/reflect/api/Types.scala
@@ -149,7 +149,7 @@ trait Types { self: Universe =>
def =:= (that: Type): Boolean
/** The list of all base classes of this type (including its own typeSymbol)
- * in reverse linearization order, starting with the class itself and ending
+ * in linearization order, starting with the class itself and ending
* in class Any.
*/
def baseClasses: List[Symbol]
diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
index 7ccb661426..c198271fb1 100644
--- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala
+++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala
@@ -72,6 +72,9 @@ object ClassfileConstants {
final val CONSTANT_METHODREF = 10
final val CONSTANT_INTFMETHODREF = 11
final val CONSTANT_NAMEANDTYPE = 12
+ final val CONSTANT_METHODHANDLE = 15
+ final val CONSTANT_METHODTYPE = 16
+ final val CONSTANT_INVOKEDYNAMIC = 18
// tags describing the type of a literal in attribute values
final val BYTE_TAG = 'B'
@@ -306,7 +309,7 @@ object ClassfileConstants {
final val invokespecial = 0xb7
final val invokestatic = 0xb8
final val invokeinterface = 0xb9
- final val xxxunusedxxxx = 0xba
+ final val invokedynamic = 0xba
final val new_ = 0xbb
final val newarray = 0xbc
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 6e4ca76382..e5d9e54a16 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -1046,7 +1046,7 @@ trait Definitions extends api.StandardDefinitions {
getMemberIfDefined(owner, name) orElse {
if (phase.flatClasses && name.isTypeName && !owner.isPackageObjectOrClass) {
val pkg = owner.owner
- val flatname = nme.flattenedName(owner.name, name)
+ val flatname = tpnme.flattenedName(owner.name, name)
getMember(pkg, flatname)
}
else fatalMissingSymbol(owner, name)
diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
index 8b24678fd6..3bcb793926 100644
--- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
+++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala
@@ -32,4 +32,81 @@ trait ExistentialsAndSkolems {
}
(new Deskolemizer).typeSkolems
}
+
+ def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type?
+ sym.isTypeParameter && sym.owner.isJavaDefined
+
+ /** If we map a set of hidden symbols to their existential bounds, we
+ * have a problem: the bounds may themselves contain references to the
+ * hidden symbols. So this recursively calls existentialBound until
+ * the typeSymbol is not amongst the symbols being hidden.
+ */
+ private def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = {
+ def safeBound(t: Type): Type =
+ if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t
+
+ def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match {
+ case tp @ RefinedType(parents, decls) =>
+ val parents1 = parents mapConserve safeBound
+ if (parents eq parents1) tp
+ else copyRefinedType(tp, parents1, decls)
+ case tp => tp
+ }
+
+ // Hanging onto lower bound in case anything interesting
+ // happens with it.
+ mapFrom(hidden)(s => s.existentialBound match {
+ case TypeBounds(lo, hi) => TypeBounds(lo, hiBound(s))
+ case _ => hiBound(s)
+ })
+ }
+
+ /** Given a set `rawSyms` of term- and type-symbols, and a type
+ * `tp`, produce a set of fresh type parameters and a type so that
+ * it can be abstracted to an existential type. Every type symbol
+ * `T` in `rawSyms` is mapped to a clone. Every term symbol `x` of
+ * type `T` in `rawSyms` is given an associated type symbol of the
+ * following form:
+ *
+ * type x.type <: T with Singleton
+ *
+ * The name of the type parameter is `x.type`, to produce nice
+ * diagnostics. The Singleton parent ensures that the type
+ * parameter is still seen as a stable type. Type symbols in
+ * rawSyms are fully replaced by the new symbols. Term symbols are
+ * also replaced, except for term symbols of an Ident tree, where
+ * only the type of the Ident is changed.
+ */
+ final def existentialTransform[T](rawSyms: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None)(creator: (List[Symbol], Type) => T): T = {
+ val allBounds = existentialBoundsExcludingHidden(rawSyms)
+ val typeParams: List[Symbol] = rawSyms map { sym =>
+ val name = sym.name match {
+ case x: TypeName => x
+ case x => tpnme.singletonName(x)
+ }
+ def rawOwner0 = rawOwner.getOrElse(abort(s"no owner provided for existential transform over raw parameter: $sym"))
+ val bound = allBounds(sym)
+ val sowner = if (isRawParameter(sym)) rawOwner0 else sym.owner
+ val quantified = sowner.newExistential(name, sym.pos)
+
+ quantified setInfo bound.cloneInfo(quantified)
+ }
+ // Higher-kinded existentials are not yet supported, but this is
+ // tpeHK for when they are: "if a type constructor is expected/allowed,
+ // tpeHK must be called instead of tpe."
+ val typeParamTypes = typeParams map (_.tpeHK)
+ def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes)
+
+ creator(typeParams map (_ modifyInfo doSubst), doSubst(tp))
+ }
+
+ /**
+ * Compute an existential type from hidden symbols `hidden` and type `tp`.
+ * @param hidden The symbols that will be existentially abstracted
+ * @param hidden The original type
+ * @param rawOwner The owner for Java raw types.
+ */
+ final def packSymbols(hidden: List[Symbol], tp: Type, rawOwner: Option[Symbol] = None): Type =
+ if (hidden.isEmpty) tp
+ else existentialTransform(hidden, tp, rawOwner)(existentialAbstraction)
}
diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala
index 5853315479..5ebe02d95d 100644
--- a/src/reflect/scala/reflect/internal/Flags.scala
+++ b/src/reflect/scala/reflect/internal/Flags.scala
@@ -59,9 +59,9 @@ import scala.collection.{ mutable, immutable }
// 42: VBRIDGE
// 43: VARARGS
// 44: TRIEDCOOKING
-// 45:
-// 46:
-// 47:
+// 45: SYNCHRONIZED/M
+// 46: ARTIFACT
+// 47: DEFAULTMETHOD/M
// 48:
// 49:
// 50:
@@ -116,6 +116,8 @@ class ModifierFlags {
final val LAZY = 1L << 31 // symbol is a lazy val. can't have MUTABLE unless transformed by typer
final val PRESUPER = 1L << 37 // value is evaluated before super call
final val DEFAULTINIT = 1L << 41 // symbol is initialized to the default value: used by -Xcheckinit
+ // ARTIFACT at #46 in 2.11+
+ final val DEFAULTMETHOD = 1L << 47 // symbol is a java default method
// Overridden.
def flagToString(flag: Long): String = ""
@@ -239,7 +241,7 @@ class Flags extends ModifierFlags {
*/
final val ExplicitFlags =
PRIVATE | PROTECTED | ABSTRACT | FINAL | SEALED |
- OVERRIDE | CASE | IMPLICIT | ABSOVERRIDE | LAZY
+ OVERRIDE | CASE | IMPLICIT | ABSOVERRIDE | LAZY | DEFAULTMETHOD
/** The two bridge flags */
final val BridgeFlags = BRIDGE | VBRIDGE
@@ -274,7 +276,7 @@ class Flags extends ModifierFlags {
* from Modifiers. Others which may be applied at creation time are:
* SYNTHETIC.
*/
- final val ValueParameterFlags = BYNAMEPARAM | IMPLICIT | DEFAULTPARAM | STABLE
+ final val ValueParameterFlags = BYNAMEPARAM | IMPLICIT | DEFAULTPARAM | STABLE | SYNTHETIC
final val BeanPropertyFlags = DEFERRED | OVERRIDE | STATIC
final val VarianceFlags = COVARIANT | CONTRAVARIANT
@@ -421,7 +423,7 @@ class Flags extends ModifierFlags {
case TRIEDCOOKING => "<triedcooking>" // (1L << 44)
case SYNCHRONIZED => "<synchronized>" // (1L << 45)
case 0x400000000000L => "" // (1L << 46)
- case 0x800000000000L => "" // (1L << 47)
+ case DEFAULTMETHOD => "<defaultmethod>" // (1L << 47)
case 0x1000000000000L => "" // (1L << 48)
case 0x2000000000000L => "" // (1L << 49)
case 0x4000000000000L => "" // (1L << 50)
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index bcda2bc1ae..de7af4340d 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -381,13 +381,16 @@ trait StdNames {
/** If `name` is an expandedName name, the original name.
* Otherwise `name` itself.
*/
- def originalName(name: Name): Name = {
- var i = name.length
- while (i >= 2 && !(name.charAt(i - 1) == '$' && name.charAt(i - 2) == '$')) i -= 1
- if (i >= 2) {
- while (i >= 3 && name.charAt(i - 3) == '$') i -= 1
- name.subName(i, name.length)
- } else name
+ def originalName(name: Name): Name = name.toString lastIndexOf "$$" match {
+ case -1 | 0 => name
+ case idx0 =>
+ // Sketchville - We've found $$ but if it's part of $$$ or $$$$
+ // or something we need to keep the bonus dollars, so e.g. foo$$$outer
+ // has an original name of $outer.
+ var idx = idx0
+ while (idx > 0 && name.charAt(idx - 1) == '$')
+ idx -= 1
+ name drop idx + 2
}
def unspecializedName(name: Name): Name = (
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index d9eb48ff2d..45c16b7302 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -3022,7 +3022,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
if (Statistics.canEnable) Statistics.incCounter(nameCount)
if (needsFlatClasses) {
if (flatname eq null)
- flatname = nme.flattenedName(rawowner.name, rawname).toTypeName
+ flatname = tpnme.flattenedName(rawowner.name, rawname)
flatname
}
diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala
index 3040486076..fa4441e513 100644
--- a/src/reflect/scala/reflect/internal/TreeInfo.scala
+++ b/src/reflect/scala/reflect/internal/TreeInfo.scala
@@ -17,7 +17,7 @@ abstract class TreeInfo {
val global: SymbolTable
import global._
- import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
+ import definitions.{ isTupleSymbol, isVarArgsList, isCastSymbol, ThrowableClass, TupleClass, MacroContextClass, MacroContextPrefixType }
/* Does not seem to be used. Not sure what it does anyway.
def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -98,7 +98,8 @@ abstract class TreeInfo {
// However, before typing, applications of nullary functional values are also
// Apply(function, Nil) trees. To prevent them from being treated as pure,
// we check that the callee is a method.
- fn.symbol.isMethod && !fn.symbol.isLazy && isExprSafeToInline(fn)
+ // The callee might also be a Block, which has a null symbol, so we guard against that (SI-7185)
+ fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isLazy && isExprSafeToInline(fn)
case Typed(expr, _) =>
isExprSafeToInline(expr)
case Block(stats, expr) =>
@@ -514,6 +515,20 @@ abstract class TreeInfo {
case _ => false
}
+ /**
+ * {{{
+ * //------------------------ => effectivePatternArity(args)
+ * case Extractor(a) => 1
+ * case Extractor(a, b) => 2
+ * case Extractor((a, b)) => 2
+ * case Extractor(a @ (b, c)) => 2
+ * }}}
+ */
+ def effectivePatternArity(args: List[Tree]): Int = (args.map(unbind) match {
+ case Apply(fun, xs) :: Nil if isTupleSymbol(fun.symbol) => xs
+ case xs => xs
+ }).length
+
// used in the symbols for labeldefs and valdefs emitted by the pattern matcher
// tailcalls, cps,... use this flag combination to detect translated matches
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 754adcb80d..2585b541ed 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -1434,6 +1434,11 @@ trait Trees extends api.Trees { self: SymbolTable =>
/** Substitute symbols in `from` with symbols in `to`. Returns a new
* tree using the new symbols and whose Ident and Select nodes are
* name-consistent with the new symbols.
+ *
+ * Note: This is currently a destructive operation on the original Tree.
+ * Trees currently assigned a symbol in `from` will be assigned the new symbols
+ * without copying, and trees that define symbols with an `info` that refer
+ * a symbol in `from` will have a new type assigned.
*/
class TreeSymSubstituter(from: List[Symbol], to: List[Symbol]) extends Transformer {
val symSubst = new SubstSymMap(from, to)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 7299b439f8..ee584bed2c 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -391,8 +391,8 @@ trait Types extends api.Types { self: SymbolTable =>
* This is assessed to be the case if the class is final,
* and all type parameters (if any) are invariant.
*/
- def isFinalType =
- typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant)
+ def isFinalType: Boolean =
+ typeSymbol.isFinal && (typeSymbol.typeParams forall symbolIsNonVariant) && prefix.isStable
/** Is this type completed (i.e. not a lazy type)? */
def isComplete: Boolean = true
@@ -3052,6 +3052,12 @@ trait Types extends api.Types { self: SymbolTable =>
val origin: Type,
var constr: TypeConstraint
) extends Type {
+
+ // We don't want case class equality/hashing as TypeVar-s are mutable,
+ // and TypeRefs based on them get wrongly `uniqued` otherwise. See SI-7226.
+ override def hashCode(): Int = System.identityHashCode(this)
+ override def equals(other: Any): Boolean = this eq other.asInstanceOf[AnyRef]
+
def untouchable = false // by other typevars
override def params: List[Symbol] = Nil
override def typeArgs: List[Type] = Nil
@@ -3070,12 +3076,14 @@ trait Types extends api.Types { self: SymbolTable =>
/** The variable's skolemization level */
val level = skolemizationLevel
- /** Two occurrences of a higher-kinded typevar, e.g. `?CC[Int]` and `?CC[String]`, correspond to
- * ''two instances'' of `TypeVar` that share the ''same'' `TypeConstraint`.
+ /** Applies this TypeVar to type arguments, if arity matches.
+ *
+ * Different applications of the same type constructor variable `?CC`,
+ * e.g. `?CC[Int]` and `?CC[String]`, are modeled as distinct instances of `TypeVar`
+ * that share a `TypeConstraint`, so that the comparisons `?CC[Int] <:< List[Int]`
+ * and `?CC[String] <:< Iterable[String]` result in `?CC` being upper-bounded by `List` and `Iterable`.
*
- * `constr` for `?CC` only tracks type constructors anyway,
- * so when `?CC[Int] <:< List[Int]` and `?CC[String] <:< Iterable[String]`
- * `?CC's` hibounds contains List and Iterable.
+ * Applying the wrong number of type args results in a TypeVar whose instance is set to `ErrorType`.
*/
def applyArgs(newArgs: List[Type]): TypeVar = (
if (newArgs.isEmpty && typeArgs.isEmpty)
@@ -3085,7 +3093,7 @@ trait Types extends api.Types { self: SymbolTable =>
TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv)
}
else
- throw new Error("Invalid type application in TypeVar: " + params + ", " + newArgs)
+ TypeVar(typeSymbol).setInst(ErrorType)
)
// newArgs.length may differ from args.length (could've been empty before)
//
@@ -3115,13 +3123,14 @@ trait Types extends api.Types { self: SymbolTable =>
// <region name="constraint mutators + undoLog">
// invariant: before mutating constr, save old state in undoLog
// (undoLog is used to reset constraints to avoid piling up unrelated ones)
- def setInst(tp: Type) {
+ def setInst(tp: Type): this.type = {
// assert(!(tp containsTp this), this)
undoLog record this
// if we were compared against later typeskolems, repack the existential,
// because skolems are only compatible if they were created at the same level
val res = if (shouldRepackType) repackExistential(tp) else tp
constr.inst = TypeVar.trace("setInst", "In " + originLocation + ", " + originName + "=" + res)(res)
+ this
}
def addLoBound(tp: Type, isNumericBound: Boolean = false) {
@@ -5030,10 +5039,11 @@ trait Types extends api.Types { self: SymbolTable =>
if (tp.isTrivial) tp
else if (tp.prefix.typeSymbol isNonBottomSubClass owner) {
val widened = tp match {
- case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
- case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
+ case _: ConstantType => tp // Java enum constants: don't widen to the enum type!
+ case _ => tp.widen // C.X.type widens to C.this.X.type, otherwise `tp asSeenFrom (pre, C)` has no effect.
}
- widened asSeenFrom (pre, tp.typeSymbol.owner)
+ val memType = widened asSeenFrom (pre, tp.typeSymbol.owner)
+ if (tp eq widened) memType else memType.narrow
}
else loop(tp.prefix) memberType tp.typeSymbol
@@ -5365,7 +5375,9 @@ trait Types extends api.Types { self: SymbolTable =>
case _ =>
NoType
}
- patType match {
+ // See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
+ // generates an outer test based on `patType.prefix` with automatically dealises.
+ patType.dealias match {
case TypeRef(pre, sym, args) =>
val pre1 = maybeCreateDummyClone(pre, sym)
(pre1 ne NoType) && isPopulated(copyTypeRef(patType, pre1, sym, args), selType)
diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
index 6dc6a0f7b8..00c7c3de9a 100644
--- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala
+++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala
@@ -17,7 +17,14 @@ trait UnCurry {
val tp = expandAlias(tp0)
tp match {
case MethodType(params, MethodType(params1, restpe)) =>
- apply(MethodType(params ::: params1, restpe))
+ // This transformation is described in UnCurryTransformer.dependentParamTypeErasure
+ val packSymbolsMap = new TypeMap {
+ // Wrapping in a TypeMap to reuse the code that opts for a fast path if the function is an identity.
+ def apply(tp: Type): Type = packSymbols(params, tp)
+ }
+ val existentiallyAbstractedParam1s = packSymbolsMap.mapOver(params1)
+ val substitutedResult = restpe.substSym(params1, existentiallyAbstractedParam1s)
+ apply(MethodType(params ::: existentiallyAbstractedParam1s, substitutedResult))
case MethodType(params, ExistentialType(tparams, restpe @ MethodType(_, _))) =>
abort("unexpected curried method types with intervening existential")
case MethodType(h :: t, restpe) if h.isImplicit =>
@@ -60,4 +67,4 @@ trait UnCurry {
*/
def transformInfo(sym: Symbol, tp: Type): Type =
if (sym.isType) uncurryType(tp) else uncurry(tp)
-} \ No newline at end of file
+}
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index 2c90d2d525..cbd27b0d65 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -103,13 +103,13 @@ quant)
r <- q :: q.children.toList if r.prefix.nonEmpty) yield r
private def showPercent(x: Double, base: Double) =
- if (base == 0) "" else f" (${x / base * 100}%2.1f%)"
+ if (base == 0) "" else f" (${x / base * 100}%2.1f%%)"
/** The base trait for quantities.
* Quantities with non-empty prefix are printed in the statistics info.
*/
trait Quantity {
- if (prefix.nonEmpty) {
+ if (enabled && prefix.nonEmpty) {
val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix"
qs(key) = this
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index e18435d5b0..1b69ca4e89 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -17,7 +17,7 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def forInteractive = false
def forScaladoc = false
- def log(msg: => AnyRef): Unit = println(" [] "+msg)
+ def log(msg: => AnyRef): Unit = if (settings.debug.value) println(" [] "+msg)
type TreeCopier = InternalTreeCopierOps
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
diff --git a/starr.number b/starr.number
new file mode 100644
index 0000000000..7e6e869eaf
--- /dev/null
+++ b/starr.number
@@ -0,0 +1 @@
+starr.version=2.10.1 \ No newline at end of file
diff --git a/test/debug/OBSOLETE b/test/debug/OBSOLETE
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/debug/OBSOLETE
diff --git a/test/disabled/jvm/scala-concurrent-tck-akka.scala b/test/disabled/jvm/scala-concurrent-tck-akka.scala
deleted file mode 100644
index dfd906e59e..0000000000
--- a/test/disabled/jvm/scala-concurrent-tck-akka.scala
+++ /dev/null
@@ -1,391 +0,0 @@
-
-
-import akka.dispatch.{
- Future => future,
- Promise => promise
-}
-import akka.dispatch.Await.{result => await}
-
-// Duration required for await
-import akka.util.Duration
-import java.util.concurrent.TimeUnit
-import TimeUnit._
-
-import scala.concurrent.{
- TimeoutException,
- SyncVar,
- ExecutionException
-}
-//import scala.concurrent.future
-//import scala.concurrent.promise
-//import scala.concurrent.await
-
-
-
-trait TestBase {
-
- implicit val disp = akka.actor.ActorSystem().dispatcher
-
- def once(body: (() => Unit) => Unit) {
- val sv = new SyncVar[Boolean]
- body(() => sv put true)
- sv.take()
- }
-
-}
-
-
-trait FutureCallbacks extends TestBase {
-
- def testOnSuccess(): Unit = once {
- done =>
- var x = 0
- val f = future {
- x = 1
- }
- f onSuccess { case any =>
- done()
- assert(x == 1)
- }
- }
-
- def testOnSuccessWhenCompleted(): Unit = once {
- done =>
- var x = 0
- val f = future {
- x = 1
- }
- f onSuccess { case any =>
- assert(x == 1)
- x = 2
- f onSuccess { case any =>
- assert(x == 2)
- done()
- }
- }
- }
-
- def testOnSuccessWhenFailed(): Unit = once {
- done =>
- val f = future[Unit] {
- done()
- throw new Exception
- }
- f onSuccess { case any =>
- assert(false)
- }
- }
-
- def testOnFailure(): Unit = once {
- done =>
- var x = 0
- val f = future[Unit] {
- x = 1
- throw new Exception
- }
- f onSuccess { case any =>
- done()
- assert(false)
- }
- f onFailure {
- case _ =>
- done()
- assert(x == 1)
- }
- }
-
- def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
- done =>
- val f = future[Unit] {
- throw cause
- }
- f onSuccess { case any =>
- done()
- assert(false)
- }
- f onFailure {
- case e: ExecutionException if (e.getCause == cause) =>
- done()
- case _ =>
- done()
- assert(false)
- }
- }
-
- def testOnFailureWhenTimeoutException(): Unit = once {
- done =>
- val f = future[Unit] {
- throw new TimeoutException()
- }
- f onSuccess { case any =>
- done()
- assert(false)
- }
- f onFailure {
- case e: TimeoutException =>
- done()
- case other =>
- done()
- assert(false)
- }
- }
-
- testOnSuccess()
- testOnSuccessWhenCompleted()
- testOnSuccessWhenFailed()
- testOnFailure()
-// testOnFailureWhenSpecialThrowable(5, new Error)
-// testOnFailureWhenSpecialThrowable(6, new scala.util.control.ControlThrowable { })
-// testOnFailureWhenSpecialThrowable(7, new InterruptedException)
-// testOnFailureWhenTimeoutException()
-
-}
-
-
-trait FutureCombinators extends TestBase {
-
- // map: stub
- def testMapSuccess(): Unit = once {
- done =>
- done()
- }
-
- def testMapFailure(): Unit = once {
- done =>
- done()
- }
-
- // flatMap: stub
- def testFlatMapSuccess(): Unit = once {
- done =>
- done()
- }
-
- def testFlatMapFailure(): Unit = once {
- done =>
- done()
- }
-
- // filter: stub
- def testFilterSuccess(): Unit = once {
- done =>
- done()
- }
-
- def testFilterFailure(): Unit = once {
- done =>
- done()
- }
-
- // foreach: stub
- def testForeachSuccess(): Unit = once {
- done =>
- done()
- }
-
- def testForeachFailure(): Unit = once {
- done =>
- done()
- }
-
- def testRecoverSuccess(): Unit = once {
- done =>
- val cause = new RuntimeException
- val f = future {
- throw cause
- } recover {
- case re: RuntimeException =>
- "recovered"
- } onSuccess { case x =>
- done()
- assert(x == "recovered")
- } onFailure { case any =>
- done()
- assert(false)
- }
- }
-
- def testRecoverFailure(): Unit = once {
- done =>
- val cause = new RuntimeException
- val f = future {
- throw cause
- } recover {
- case te: TimeoutException => "timeout"
- } onSuccess { case x =>
- done()
- assert(false)
- } onFailure { case any =>
- done()
- assert(any == cause)
- }
- }
-
- testMapSuccess()
- testMapFailure()
- testFlatMapSuccess()
- testFlatMapFailure()
- testFilterSuccess()
- testFilterFailure()
- testForeachSuccess()
- testForeachFailure()
- testRecoverSuccess()
- testRecoverFailure()
-
-}
-
-/*
-trait FutureProjections extends TestBase {
-
- def testFailedFailureOnComplete(): Unit = once {
- done =>
- val cause = new RuntimeException
- val f = future {
- throw cause
- }
- f.failed onComplete {
- case Right(t) =>
- assert(t == cause)
- done()
- case Left(t) =>
- assert(false)
- }
- }
-
- def testFailedFailureOnSuccess(): Unit = once {
- done =>
- val cause = new RuntimeException
- val f = future {
- throw cause
- }
- f.failed onSuccess {
- t =>
- assert(t == cause)
- done()
- }
- }
-
- def testFailedSuccessOnComplete(): Unit = once {
- done =>
- val f = future { 0 }
- f.failed onComplete {
- case Right(t) =>
- assert(false)
- case Left(t) =>
- assert(t.isInstanceOf[NoSuchElementException])
- done()
- }
- }
-
- def testFailedSuccessOnFailure(): Unit = once {
- done =>
- val f = future { 0 }
- f.failed onFailure {
- case nsee: NoSuchElementException =>
- done()
- }
- }
-
- def testFailedFailureAwait(): Unit = once {
- done =>
- val cause = new RuntimeException
- val f = future {
- throw cause
- }
- assert(await(0, f.failed) == cause)
- done()
- }
-
- def testFailedSuccessAwait(): Unit = once {
- done =>
- val f = future { 0 }
- try {
- println(await(0, f.failed))
- assert(false)
- } catch {
- case nsee: NoSuchElementException => done()
- }
- }
-
- testFailedFailureOnComplete()
- testFailedFailureOnSuccess()
- testFailedSuccessOnComplete()
- testFailedSuccessOnFailure()
- testFailedFailureAwait()
- //testFailedSuccessAwait()
-
-}
-*/
-
-trait Blocking extends TestBase {
-
- def testAwaitSuccess(): Unit = once {
- done =>
- val f = future { 0 }
- await(f, Duration(500, "ms"))
- done()
- }
-
- def testAwaitFailure(): Unit = once {
- done =>
- val cause = new RuntimeException
- val f = future {
- throw cause
- }
- try {
- await(f, Duration(500, "ms"))
- assert(false)
- } catch {
- case t =>
- assert(t == cause)
- done()
- }
- }
-
- testAwaitSuccess()
- testAwaitFailure()
-
-}
-
-/*
-trait Promises extends TestBase {
-
- def testSuccess(): Unit = once {
- done =>
- val p = promise[Int]()
- val f = p.future
-
- f.onSuccess { x =>
- done()
- assert(x == 5)
- } onFailure { case any =>
- done()
- assert(false)
- }
-
- p.success(5)
- }
-
- testSuccess()
-
-}
-*/
-
-trait Exceptions extends TestBase {
-
-}
-
-
-object Test
-extends App
-with FutureCallbacks
-with FutureCombinators
-/*with FutureProjections*/
-/*with Promises*/
-with Blocking
-with Exceptions
-{
- System.exit(0)
-}
-
-
diff --git a/test/disabled/presentation/akka/src/akka/dispatch/Future.scala b/test/disabled/presentation/akka/src/akka/dispatch/Future.scala
deleted file mode 100644
index 1ad304d726..0000000000
--- a/test/disabled/presentation/akka/src/akka/dispatch/Future.scala
+++ /dev/null
@@ -1,832 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Scalable Solutions AB <http://scalablesolutions.se>
- */
-
-package akka.dispatch
-
-import akka.AkkaException
-import akka.event.EventHandler
-import akka.actor.{ Actor, Channel }
-import akka.util.Duration
-import akka.japi.{ Procedure, Function => JFunc }
-
-import scala.util.continuations._
-
-import java.util.concurrent.locks.ReentrantLock
-import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
-import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS => MILLIS }
-import java.util.concurrent.atomic.{ AtomicBoolean }
-import java.lang.{ Iterable => JIterable }
-import java.util.{ LinkedList => JLinkedList }
-import scala.collection.mutable.Stack
-import annotation.tailrec
-
-class FutureTimeoutException(message: String, cause: Throwable = null) extends AkkaException(message, cause)
-
-object Futures {
-
- /**
- * Java API, equivalent to Future.apply
- */
- def future[T](body: Callable[T]): Future[T] =
- Future(body.call)
-
- /**
- * Java API, equivalent to Future.apply
- */
- def future[T](body: Callable[T], timeout: Long): Future[T] =
- Future(body.call, timeout)
-
- /**
- * Java API, equivalent to Future.apply
- */
- def future[T](body: Callable[T], dispatcher: MessageDispatcher): Future[T] =
- Future(body.call)(dispatcher)
-
- /**
- * Java API, equivalent to Future.apply
- */
- def future[T](body: Callable[T], timeout: Long, dispatcher: MessageDispatcher): Future[T] =
- Future(body.call, timeout)(dispatcher)
-
- /**
- * Returns a Future to the result of the first future in the list that is completed
- */
- def firstCompletedOf[T](futures: Iterable[Future[T]], timeout: Long = Long.MaxValue): Future[T] = {
- val futureResult = new DefaultCompletableFuture[T](timeout)
-
- val completeFirst: Future[T] => Unit = _.value.foreach(futureResult complete _)
- for (f ← futures) f onComplete completeFirst
-
- futureResult
- }
-
- /**
- * Java API.
- * Returns a Future to the result of the first future in the list that is completed
- */
- def firstCompletedOf[T <: AnyRef](futures: java.lang.Iterable[Future[T]], timeout: Long): Future[T] =
- firstCompletedOf(scala.collection.JavaConversions.iterableAsScalaIterable(futures), timeout)
-
- /**
- * A non-blocking fold over the specified futures.
- * The fold is performed on the thread where the last future is completed,
- * the result will be the first failure of any of the futures, or any failure in the actual fold,
- * or the result of the fold.
- * Example:
- * <pre>
- * val result = Futures.fold(0)(futures)(_ + _).await.result
- * </pre>
- */
- def fold[T, R](zero: R, timeout: Long = Actor.TIMEOUT)(futures: Iterable[Future[T]])(foldFun: (R, T) => R): Future[R] = {
- if (futures.isEmpty) {
- new AlreadyCompletedFuture[R](Right(zero))
- } else {
- val result = new DefaultCompletableFuture[R](timeout)
- val results = new ConcurrentLinkedQueue[T]()
- val allDone = futures.size
-
- val aggregate: Future[T] => Unit = f => if (!result.isCompleted) { //TODO: This is an optimization, is it premature?
- f.value.get match {
- case r: Right[Throwable, T] =>
- results add r.b
- if (results.size == allDone) { //Only one thread can get here
- try {
- result completeWithResult scala.collection.JavaConversions.collectionAsScalaIterable(results).foldLeft(zero)(foldFun)
- } catch {
- case e: Exception =>
- EventHandler.error(e, this, e.getMessage)
- result completeWithException e
- }
- finally {
- results.clear
- }
- }
- case l: Left[Throwable, T] =>
- result completeWithException l.a
- results.clear
- }
- }
-
- futures foreach { _ onComplete aggregate }
- result
- }
- }
-
- /**
- * Java API
- * A non-blocking fold over the specified futures.
- * The fold is performed on the thread where the last future is completed,
- * the result will be the first failure of any of the futures, or any failure in the actual fold,
- * or the result of the fold.
- */
- def fold[T <: AnyRef, R <: AnyRef](zero: R, timeout: Long, futures: java.lang.Iterable[Future[T]], fun: akka.japi.Function2[R, T, R]): Future[R] =
- fold(zero, timeout)(scala.collection.JavaConversions.iterableAsScalaIterable(futures))(fun.apply _)
-
- /**
- * Initiates a fold over the supplied futures where the fold-zero is the result value of the Future that's completed first
- * Example:
- * <pre>
- * val result = Futures.reduce(futures)(_ + _).await.result
- * </pre>
- */
- def reduce[T, R >: T](futures: Iterable[Future[T]], timeout: Long = Actor.TIMEOUT)(op: (R, T) => T): Future[R] = {
- if (futures.isEmpty)
- new AlreadyCompletedFuture[R](Left(new UnsupportedOperationException("empty reduce left")))
- else {
- val result = new DefaultCompletableFuture[R](timeout)
- val seedFound = new AtomicBoolean(false)
- val seedFold: Future[T] => Unit = f => {
- if (seedFound.compareAndSet(false, true)) { //Only the first completed should trigger the fold
- f.value.get match {
- case r: Right[Throwable, T] =>
- result.completeWith(fold(r.b, timeout)(futures.filterNot(_ eq f))(op))
- case l: Left[Throwable, T] =>
- result.completeWithException(l.a)
- }
- }
- }
- for (f ← futures) f onComplete seedFold //Attach the listener to the Futures
- result
- }
- }
-
- /**
- * Java API.
- * Initiates a fold over the supplied futures where the fold-zero is the result value of the Future that's completed first
- */
- def reduce[T <: AnyRef, R >: T](futures: java.lang.Iterable[Future[T]], timeout: Long, fun: akka.japi.Function2[R, T, T]): Future[R] =
- reduce(scala.collection.JavaConversions.iterableAsScalaIterable(futures), timeout)(fun.apply _)
-
- /**
- * Java API.
- * Simple version of Futures.traverse. Transforms a java.lang.Iterable[Future[A]] into a Future[java.util.LinkedList[A]].
- * Useful for reducing many Futures into a single Future.
- */
- def sequence[A](in: JIterable[Future[A]], timeout: Long): Future[JLinkedList[A]] =
- scala.collection.JavaConversions.iterableAsScalaIterable(in).foldLeft(Future(new JLinkedList[A]()))((fr, fa) =>
- for (r ← fr; a ← fa) yield {
- r add a
- r
- })
-
- /**
- * Java API.
- * Simple version of Futures.traverse. Transforms a java.lang.Iterable[Future[A]] into a Future[java.util.LinkedList[A]].
- * Useful for reducing many Futures into a single Future.
- */
- def sequence[A](in: JIterable[Future[A]]): Future[JLinkedList[A]] = sequence(in, Actor.TIMEOUT)
-
- /**
- * Java API.
- * Transforms a java.lang.Iterable[A] into a Future[java.util.LinkedList[B]] using the provided Function A => Future[B].
- * This is useful for performing a parallel map. For example, to apply a function to all items of a list
- * in parallel.
- */
- def traverse[A, B](in: JIterable[A], timeout: Long, fn: JFunc[A, Future[B]]): Future[JLinkedList[B]] =
- scala.collection.JavaConversions.iterableAsScalaIterable(in).foldLeft(Future(new JLinkedList[B]())) { (fr, a) =>
- val fb = fn(a)
- for (r ← fr; b ← fb) yield {
- r add b
- r
- }
- }
-
- /**
- * Java API.
- * Transforms a java.lang.Iterable[A] into a Future[java.util.LinkedList[B]] using the provided Function A => Future[B].
- * This is useful for performing a parallel map. For example, to apply a function to all items of a list
- * in parallel.
- */
- def traverse[A, B](in: JIterable[A], fn: JFunc[A, Future[B]]): Future[JLinkedList[B]] = traverse(in, Actor.TIMEOUT, fn)
-
- // =====================================
- // Deprecations
- // =====================================
-
- /**
- * (Blocking!)
- */
- @deprecated("Will be removed after 1.1, if you must block, use: futures.foreach(_.await)", "1.1")
- def awaitAll(futures: List[Future[_]]): Unit = futures.foreach(_.await)
-
- /**
- * Returns the First Future that is completed (blocking!)
- */
- @deprecated("Will be removed after 1.1, if you must block, use: firstCompletedOf(futures).await", "1.1")
- def awaitOne(futures: List[Future[_]], timeout: Long = Long.MaxValue): Future[_] = firstCompletedOf[Any](futures, timeout).await
-
- /**
- * Applies the supplied function to the specified collection of Futures after awaiting each future to be completed
- */
- @deprecated("Will be removed after 1.1, if you must block, use: futures map { f => fun(f.await) }", "1.1")
- def awaitMap[A, B](in: Traversable[Future[A]])(fun: (Future[A]) => B): Traversable[B] =
- in map { f => fun(f.await) }
-
- /**
- * Returns Future.resultOrException of the first completed of the 2 Futures provided (blocking!)
- */
- @deprecated("Will be removed after 1.1, if you must block, use: firstCompletedOf(List(f1,f2)).await.resultOrException", "1.1")
- def awaitEither[T](f1: Future[T], f2: Future[T]): Option[T] = firstCompletedOf[T](List(f1, f2)).await.resultOrException
-}
-
-object Future {
- /**
- * This method constructs and returns a Future that will eventually hold the result of the execution of the supplied body
- * The execution is performed by the specified Dispatcher.
- */
- def apply[T](body: => T, timeout: Long = Actor.TIMEOUT)(implicit dispatcher: MessageDispatcher): Future[T] =
- dispatcher.dispatchFuture(() => body, timeout)
-
- /**
- * Construct a completable channel
- */
- def channel(timeout: Long = Actor.TIMEOUT) = new Channel[Any] {
- val future = empty[Any](timeout)
- def !(msg: Any) = future completeWithResult msg
- }
-
- /**
- * Create an empty Future with default timeout
- */
- def empty[T](timeout: Long = Actor.TIMEOUT) = new DefaultCompletableFuture[T](timeout)
-
- import scala.collection.mutable.Builder
- import scala.collection.generic.CanBuildFrom
-
- /**
- * Simple version of Futures.traverse. Transforms a Traversable[Future[A]] into a Future[Traversable[A]].
- * Useful for reducing many Futures into a single Future.
- */
- def sequence[A, M[_] <: Traversable[_]](in: M[Future[A]], timeout: Long = Actor.TIMEOUT)(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]]): Future[M[A]] =
- in.foldLeft(new DefaultCompletableFuture[Builder[A, M[A]]](timeout).completeWithResult(cbf(in)): Future[Builder[A, M[A]]])((fr, fa) => for (r ← fr; a ← fa.asInstanceOf[Future[A]]) yield (r += a)).map(_.result)
-
- /**
- * Transforms a Traversable[A] into a Future[Traversable[B]] using the provided Function A => Future[B].
- * This is useful for performing a parallel map. For example, to apply a function to all items of a list
- * in parallel:
- * <pre>
- * val myFutureList = Futures.traverse(myList)(x => Future(myFunc(x)))
- * </pre>
- */
- def traverse[A, B, M[_] <: Traversable[_]](in: M[A], timeout: Long = Actor.TIMEOUT)(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]]): Future[M[B]] =
- in.foldLeft(new DefaultCompletableFuture[Builder[B, M[B]]](timeout).completeWithResult(cbf(in)): Future[Builder[B, M[B]]]) { (fr, a) =>
- val fb = fn(a.asInstanceOf[A])
- for (r ← fr; b ← fb) yield (r += b)
- }.map(_.result)
-
- /**
- * Captures a block that will be transformed into 'Continuation Passing Style' using Scala's Delimited
- * Continuations plugin.
- *
- * Within the block, the result of a Future may be accessed by calling Future.apply. At that point
- * execution is suspended with the rest of the block being stored in a continuation until the result
- * of the Future is available. If an Exception is thrown while processing, it will be contained
- * within the resulting Future.
- *
- * This allows working with Futures in an imperative style without blocking for each result.
- *
- * Completing a Future using 'CompletableFuture << Future' will also suspend execution until the
- * value of the other Future is available.
- *
- * The Delimited Continuations compiler plugin must be enabled in order to use this method.
- */
- def flow[A](body: => A @cps[Future[Any]], timeout: Long = Actor.TIMEOUT): Future[A] = {
- val future = Promise[A](timeout)
- (reset(future.asInstanceOf[CompletableFuture[Any]].completeWithResult(body)): Future[Any]) onComplete { f =>
- val opte = f.exception
- if (opte.isDefined) future completeWithException (opte.get)
- }
- future
- }
-
- private[akka] val callbacksPendingExecution = new ThreadLocal[Option[Stack[() => Unit]]]() {
- override def initialValue = None
- }
-}
-
-sealed trait Future[+T] {
-
- /**
- * For use only within a Future.flow block or another compatible Delimited Continuations reset block.
- *
- * Returns the result of this Future without blocking, by suspending execution and storing it as a
- * continuation until the result is available.
- *
- * If this Future is untyped (a Future[Nothing]), a type parameter must be explicitly provided or
- * execution will fail. The normal result of getting a Future from an ActorRef using !!! will return
- * an untyped Future.
- */
- def apply[A >: T](): A @cps[Future[Any]] = shift(this flatMap (_: A => Future[Any]))
-
- /**
- * Blocks awaiting completion of this Future, then returns the resulting value,
- * or throws the completed exception
- *
- * Scala & Java API
- *
- * throws FutureTimeoutException if this Future times out when waiting for completion
- */
- def get: T = this.await.resultOrException.get
-
- /**
- * Blocks the current thread until the Future has been completed or the
- * timeout has expired. In the case of the timeout expiring a
- * FutureTimeoutException will be thrown.
- */
- def await: Future[T]
-
- /**
- * Blocks the current thread until the Future has been completed or the
- * timeout has expired. The timeout will be the least value of 'atMost' and the timeout
- * supplied at the constructuion of this Future.
- * In the case of the timeout expiring a FutureTimeoutException will be thrown.
- */
- def await(atMost: Duration): Future[T]
-
- /**
- * Blocks the current thread until the Future has been completed. Use
- * caution with this method as it ignores the timeout and will block
- * indefinitely if the Future is never completed.
- */
- @deprecated("Will be removed after 1.1, it's dangerous and can cause deadlocks, agony and insanity.", "1.1")
- def awaitBlocking: Future[T]
-
- /**
- * Tests whether this Future has been completed.
- */
- final def isCompleted: Boolean = value.isDefined
-
- /**
- * Tests whether this Future's timeout has expired.
- *
- * Note that an expired Future may still contain a value, or it may be
- * completed with a value.
- */
- def isExpired: Boolean
-
- /**
- * This Future's timeout in nanoseconds.
- */
- def timeoutInNanos: Long
-
- /**
- * The contained value of this Future. Before this Future is completed
- * the value will be None. After completion the value will be Some(Right(t))
- * if it contains a valid result, or Some(Left(error)) if it contains
- * an exception.
- */
- def value: Option[Either[Throwable, T]]
-
- /**
- * Returns the successful result of this Future if it exists.
- */
- final def result: Option[T] = {
- val v = value
- if (v.isDefined) v.get.right.toOption
- else None
- }
-
- /**
- * Returns the contained exception of this Future if it exists.
- */
- final def exception: Option[Throwable] = {
- val v = value
- if (v.isDefined) v.get.left.toOption
- else None
- }
-
- /**
- * When this Future is completed, apply the provided function to the
- * Future. If the Future has already been completed, this will apply
- * immediately.
- */
- def onComplete(func: Future[T] => Unit): Future[T]
-
- /**
- * When the future is completed with a valid result, apply the provided
- * PartialFunction to the result.
- * <pre>
- * val result = future receive {
- * case Foo => "foo"
- * case Bar => "bar"
- * }.await.result
- * </pre>
- */
- final def receive(pf: PartialFunction[Any, Unit]): Future[T] = onComplete { f =>
- val optr = f.result
- if (optr.isDefined) {
- val r = optr.get
- if (pf.isDefinedAt(r)) pf(r)
- }
- }
-
- /**
- * Creates a new Future by applying a PartialFunction to the successful
- * result of this Future if a match is found, or else return a MatchError.
- * If this Future is completed with an exception then the new Future will
- * also contain this exception.
- * Example:
- * <pre>
- * val future1 = for {
- * a <- actor !!! Req("Hello") collect { case Res(x: Int) => x }
- * b <- actor !!! Req(a) collect { case Res(x: String) => x }
- * c <- actor !!! Req(7) collect { case Res(x: String) => x }
- * } yield b + "-" + c
- * </pre>
- */
- final def collect[A](pf: PartialFunction[Any, A]): Future[A] = {
- val fa = new DefaultCompletableFuture[A](timeoutInNanos, NANOS)
- onComplete { ft =>
- val v = ft.value.get
- fa complete {
- if (v.isLeft) v.asInstanceOf[Either[Throwable, A]]
- else {
- try {
- val r = v.right.get
- if (pf isDefinedAt r) Right(pf(r))
- else Left(new MatchError(r))
- } catch {
- case e: Exception =>
- EventHandler.error(e, this, e.getMessage)
- Left(e)
- }
- }
- }
- }
- fa
- }
-
- /**
- * Creates a new Future that will handle any matching Throwable that this
- * Future might contain. If there is no match, or if this Future contains
- * a valid result then the new Future will contain the same.
- * Example:
- * <pre>
- * Future(6 / 0) failure { case e: ArithmeticException => 0 } // result: 0
- * Future(6 / 0) failure { case e: NotFoundException => 0 } // result: exception
- * Future(6 / 2) failure { case e: ArithmeticException => 0 } // result: 3
- * </pre>
- */
- final def failure[A >: T](pf: PartialFunction[Throwable, A]): Future[A] = {
- val fa = new DefaultCompletableFuture[A](timeoutInNanos, NANOS)
- onComplete { ft =>
- val opte = ft.exception
- fa complete {
- if (opte.isDefined) {
- val e = opte.get
- try {
- if (pf isDefinedAt e) Right(pf(e))
- else Left(e)
- } catch {
- case x: Exception => Left(x)
- }
- } else ft.value.get
- }
- }
- fa
- }
-
- /**
- * Creates a new Future by applying a function to the successful result of
- * this Future. If this Future is completed with an exception then the new
- * Future will also contain this exception.
- * Example:
- * <pre>
- * val future1 = for {
- * a: Int <- actor !!! "Hello" // returns 5
- * b: String <- actor !!! a // returns "10"
- * c: String <- actor !!! 7 // returns "14"
- * } yield b + "-" + c
- * </pre>
- */
- final def map[A](f: T => A): Future[A] = {
- val fa = new DefaultCompletableFuture[A](timeoutInNanos, NANOS)
- onComplete { ft =>
- val optv = ft.value
- if (optv.isDefined) {
- val v = optv.get
- if (v.isLeft)
- fa complete v.asInstanceOf[Either[Throwable, A]]
- else {
- fa complete (try {
- Right(f(v.right.get))
- } catch {
- case e: Exception =>
- EventHandler.error(e, this, e.getMessage)
- Left(e)
- })
- }
- }
- }
- fa
- }
-
- /**
- * Creates a new Future by applying a function to the successful result of
- * this Future, and returns the result of the function as the new Future.
- * If this Future is completed with an exception then the new Future will
- * also contain this exception.
- * Example:
- * <pre>
- * val future1 = for {
- * a: Int <- actor !!! "Hello" // returns 5
- * b: String <- actor !!! a // returns "10"
- * c: String <- actor !!! 7 // returns "14"
- * } yield b + "-" + c
- * </pre>
- */
- final def flatMap[A](f: T => Future[A]): Future[A] = {
- val fa = new DefaultCompletableFuture[A](timeoutInNanos, NANOS)
- onComplete { ft =>
- val optv = ft.value
- if (optv.isDefined) {
- val v = optv.get
- if (v.isLeft)
- fa complete v.asInstanceOf[Either[Throwable, A]]
- else {
- try {
- fa.completeWith(f(v.right.get))
- } catch {
- case e: Exception =>
- EventHandler.error(e, this, e.getMessage)
- fa completeWithException e
- }
- }
- }
- }
- fa
- }
-
- final def foreach(f: T => Unit): Unit = onComplete { ft =>
- val optr = ft.result
- if (optr.isDefined)
- f(optr.get)
- }
-
- final def filter(p: Any => Boolean): Future[Any] = {
- val f = new DefaultCompletableFuture[T](timeoutInNanos, NANOS)
- onComplete { ft =>
- val optv = ft.value
- if (optv.isDefined) {
- val v = optv.get
- if (v.isLeft)
- f complete v
- else {
- val r = v.right.get
- f complete (try {
- if (p(r)) Right(r)
- else Left(new MatchError(r))
- } catch {
- case e: Exception =>
- EventHandler.error(e, this, e.getMessage)
- Left(e)
- })
- }
- }
- }
- f
- }
-
- /**
- * Returns the current result, throws the exception is one has been raised, else returns None
- */
- final def resultOrException: Option[T] = {
- val v = value
- if (v.isDefined) {
- val r = v.get
- if (r.isLeft) throw r.left.get
- else r.right.toOption
- } else None
- }
-
- /* Java API */
- final def onComplete[A >: T](proc: Procedure[Future[A]]): Future[T] = onComplete(proc(_))
-
- final def map[A >: T, B](f: JFunc[A, B]): Future[B] = map(f(_))
-
- final def flatMap[A >: T, B](f: JFunc[A, Future[B]]): Future[B] = flatMap(f(_))
-
- final def foreach[A >: T](proc: Procedure[A]): Unit = foreach(proc(_))
-
- final def filter(p: JFunc[Any, Boolean]): Future[Any] = filter(p(_))
-
-}
-
-object Promise {
-
- def apply[A](timeout: Long): CompletableFuture[A] = new DefaultCompletableFuture[A](timeout)
-
- def apply[A](): CompletableFuture[A] = apply(Actor.TIMEOUT)
-
-}
-
-/**
- * Essentially this is the Promise (or write-side) of a Future (read-side).
- */
-trait CompletableFuture[T] extends Future[T] {
- /**
- * Completes this Future with the specified result, if not already completed.
- * @return this
- */
- def complete(value: Either[Throwable, T]): Future[T]
-
- /**
- * Completes this Future with the specified result, if not already completed.
- * @return this
- */
- final def completeWithResult(result: T): Future[T] = complete(Right(result))
-
- /**
- * Completes this Future with the specified exception, if not already completed.
- * @return this
- */
- final def completeWithException(exception: Throwable): Future[T] = complete(Left(exception))
-
- /**
- * Completes this Future with the specified other Future, when that Future is completed,
- * unless this Future has already been completed.
- * @return this.
- */
- final def completeWith(other: Future[T]): Future[T] = {
- other onComplete { f => complete(f.value.get) }
- this
- }
-
- final def <<(value: T): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] => Future[Any]) => cont(complete(Right(value))) }
-
- final def <<(other: Future[T]): Future[T] @cps[Future[Any]] = shift { cont: (Future[T] => Future[Any]) =>
- val fr = new DefaultCompletableFuture[Any](Actor.TIMEOUT)
- this completeWith other onComplete { f =>
- try {
- fr completeWith cont(f)
- } catch {
- case e: Exception =>
- EventHandler.error(e, this, e.getMessage)
- fr completeWithException e
- }
- }
- fr
- }
-
-}
-
-/**
- * The default concrete Future implementation.
- */
-class DefaultCompletableFuture[T](timeout: Long, timeunit: TimeUnit) extends CompletableFuture[T] {
-
- def this() = this(0, MILLIS)
-
- def this(timeout: Long) = this(timeout, MILLIS)
-
- val timeoutInNanos = timeunit.toNanos(timeout)
- private val _startTimeInNanos = currentTimeInNanos
- private val _lock = new ReentrantLock
- private val _signal = _lock.newCondition
- private var _value: Option[Either[Throwable, T]] = None
- private var _listeners: List[Future[T] => Unit] = Nil
-
- /**
- * Must be called inside _lock.lock<->_lock.unlock
- */
- @tailrec
- private def awaitUnsafe(waitTimeNanos: Long): Boolean = {
- if (_value.isEmpty && waitTimeNanos > 0) {
- val start = currentTimeInNanos
- val remainingNanos = try {
- _signal.awaitNanos(waitTimeNanos)
- } catch {
- case e: InterruptedException =>
- waitTimeNanos - (currentTimeInNanos - start)
- }
- awaitUnsafe(remainingNanos)
- } else {
- _value.isDefined
- }
- }
-
- def await(atMost: Duration) = {
- _lock.lock
- if (try { awaitUnsafe(atMost.toNanos min timeLeft()) } finally { _lock.unlock }) this
- else throw new FutureTimeoutException("Futures timed out after [" + NANOS.toMillis(timeoutInNanos) + "] milliseconds")
- }
-
- def await = {
- _lock.lock
- if (try { awaitUnsafe(timeLeft()) } finally { _lock.unlock }) this
- else throw new FutureTimeoutException("Futures timed out after [" + NANOS.toMillis(timeoutInNanos) + "] milliseconds")
- }
-
- def awaitBlocking = {
- _lock.lock
- try {
- while (_value.isEmpty) {
- _signal.await
- }
- this
- } finally {
- _lock.unlock
- }
- }
-
- def isExpired: Boolean = timeLeft() <= 0
-
- def value: Option[Either[Throwable, T]] = {
- _lock.lock
- try {
- _value
- } finally {
- _lock.unlock
- }
- }
-
- def complete(value: Either[Throwable, T]): DefaultCompletableFuture[T] = {
- _lock.lock
- val notifyTheseListeners = try {
- if (_value.isEmpty && !isExpired) { //Only complete if we aren't expired
- _value = Some(value)
- val existingListeners = _listeners
- _listeners = Nil
- existingListeners
- } else Nil
- } finally {
- _signal.signalAll
- _lock.unlock
- }
-
- if (notifyTheseListeners.nonEmpty) { // Steps to ensure we don't run into a stack-overflow situation
- @tailrec
- def runCallbacks(rest: List[Future[T] => Unit], callbacks: Stack[() => Unit]) {
- if (rest.nonEmpty) {
- notifyCompleted(rest.head)
- while (callbacks.nonEmpty) { callbacks.pop().apply() }
- runCallbacks(rest.tail, callbacks)
- }
- }
-
- val pending = Future.callbacksPendingExecution.get
- if (pending.isDefined) { //Instead of nesting the calls to the callbacks (leading to stack overflow)
- pending.get.push(() => { // Linearize/aggregate callbacks at top level and then execute
- val doNotify = notifyCompleted _ //Hoist closure to avoid garbage
- notifyTheseListeners foreach doNotify
- })
- } else {
- try {
- val callbacks = Stack[() => Unit]() // Allocate new aggregator for pending callbacks
- Future.callbacksPendingExecution.set(Some(callbacks)) // Specify the callback aggregator
- runCallbacks(notifyTheseListeners, callbacks) // Execute callbacks, if they trigger new callbacks, they are aggregated
- } finally { Future.callbacksPendingExecution.set(None) } // Ensure cleanup
- }
- }
-
- this
- }
-
- def onComplete(func: Future[T] => Unit): CompletableFuture[T] = {
- _lock.lock
- val notifyNow = try {
- if (_value.isEmpty) {
- if (!isExpired) { //Only add the listener if the future isn't expired
- _listeners ::= func
- false
- } else false //Will never run the callback since the future is expired
- } else true
- } finally {
- _lock.unlock
- }
-
- if (notifyNow) notifyCompleted(func)
-
- this
- }
-
- private def notifyCompleted(func: Future[T] => Unit) {
- try {
- func(this)
- } catch {
- case e => EventHandler notify EventHandler.Error(e, this)
- }
- }
-
- @inline
- private def currentTimeInNanos: Long = MILLIS.toNanos(System.currentTimeMillis)
- @inline
- private def timeLeft(): Long = timeoutInNanos - (currentTimeInNanos - _startTimeInNanos)
-}
-
-/**
- * An already completed Future is seeded with it's result at creation, is useful for when you are participating in
- * a Future-composition but you already have a value to contribute.
- */
-sealed class AlreadyCompletedFuture[T](suppliedValue: Either[Throwable, T]) extends CompletableFuture[T] {
- val value = Some(suppliedValue)
-
- def complete(value: Either[Throwable, T]): CompletableFuture[T] = this
- def onComplete(func: Future[T] => Unit): Future[T] = { func(this); this }
- def await(atMost: Duration): Future[T] = this
- def await: Future[T] = this
- def awaitBlocking: Future[T] = this
- def isExpired: Boolean = true
- def timeoutInNanos: Long = 0
-}
diff --git a/test/disabled/presentation/akka/src/pi.scala b/test/disabled/presentation/akka/src/pi.scala
deleted file mode 100644
index b4c644052c..0000000000
--- a/test/disabled/presentation/akka/src/pi.scala
+++ /dev/null
@@ -1,108 +0,0 @@
-
-import akka.actor.{Actor, PoisonPill}
-import Actor._
-import akka.routing.{Routing, CyclicIterator}
-import Routing._
-
-import java.util.concurrent.CountDownLatch
-
-object Pi extends App {
-
- calculate/*#*/(nrOfWorkers = 4, nrOfElements = 10000, nrOfMessages = 10000)
-
- // ====================
- // ===== Messages =====
- // ====================
- sealed trait PiMessage
- case object Calculate extends PiMessage/*#*/
- case class Work(start: Int, nrOfElements: Int) extends PiMessage
- case class Result(value: Double) extends PiMessage
-
- // ==================
- // ===== Worker =====
- // ==================
- class Worker extends Actor/*#*/ {
-
- // define the work
- def calculatePiFor(start: Int, nrOfElements: Int): Double = {
- var acc = 0.0
- for (i <- start until (start + nrOfElements))
- acc += 4.0 * (1 - (i % 2) * 2) / (2 * i + 1)
- acc
- }
-
- def receive /*?*/ = {
- case Work(start, nrOfElements) =>
- self reply/*#*/ Result(calculatePiFor(start, nrOfElements)) // perform the work // TODO: this currently returns wrong position for the symbol
- }
- }
-
- // ==================
- // ===== Master =====
- // ==================
- class Master(
- nrOfWorkers: Int, nrOfMessages: Int, nrOfElements: Int, latch: CountDownLatch)
- extends Actor {
-
- var pi: Double = _
- var nrOfResults: Int = _
- var start: Long = _
-
- // create the workers
- val workers = Vector.fill(nrOfWorkers)(actorOf[Worker]./*!*/start())
-
- // wrap them with a load-balancing router
- val router = Routing./*!*/loadBalancerActor(CyclicIterator(workers))./*!*/start()
-
- // message handler
- def receive = {
- case Calculate =>
- // schedule work
- //for (start <- 0 until nrOfMessages) router ! Work(start, nrOfElements)
- for (i <- 0 until nrOfMessages) router ! Work(i * nrOfElements, nrOfElements)
-
- // send a PoisonPill to all workers telling them to shut down themselves
- router./*!*/!(Broadcast(PoisonPill))
-
- // send a PoisonPill to the router, telling him to shut himself down
- router ! PoisonPill
-
- case Result(value) =>
- // handle result from the worker
- pi += value
- nrOfResults/*#*/ += 1
- if (nrOfResults == nrOfMessages) self./*!*/stop()
- }
-
- override def preStart() {
- start = System.currentTimeMillis
- }
-
- override def postStop() {
- // tell the world that the calculation is complete
- println(
- "\n\tPi estimate: \t\t%s\n\tCalculation time: \t%s millis"
- .format(pi, (System.currentTimeMillis - start)))
- latch/*#*/.countDown()
- }
- }
-
- // ==================
- // ===== Run it =====
- // ==================
- def calculate(nrOfWorkers: Int, nrOfElements: Int, nrOfMessages: Int) {
-
- // this latch is only plumbing to know when the calculation is completed
- val latch = new CountDownLatch(1)
-
- // create the master
- val master = actorOf(
- new Master(nrOfWorkers, nrOfMessages, nrOfElements, latch)).start()
-
- // start the calculation
- master ! Calculate
-
- // wait for master to shut down
- latch.await()
- }
-}
diff --git a/test/files/jvm/t7253.check b/test/files/jvm/t7253.check
new file mode 100644
index 0000000000..43f53aba12
--- /dev/null
+++ b/test/files/jvm/t7253.check
@@ -0,0 +1 @@
+bytecode identical
diff --git a/test/files/jvm/t7253/Base_1.scala b/test/files/jvm/t7253/Base_1.scala
new file mode 100644
index 0000000000..a531ebb69d
--- /dev/null
+++ b/test/files/jvm/t7253/Base_1.scala
@@ -0,0 +1,5 @@
+trait A { def f(): Int }
+trait B1 extends A
+abstract class B2 extends A
+class B3 extends A { def f(): Int = 1 }
+class B4 extends B3
diff --git a/test/files/jvm/t7253/JavaClient_1.java b/test/files/jvm/t7253/JavaClient_1.java
new file mode 100644
index 0000000000..43475de2f5
--- /dev/null
+++ b/test/files/jvm/t7253/JavaClient_1.java
@@ -0,0 +1,9 @@
+public class JavaClient_1 {
+ int foo() {
+ ((A) null).f();
+ ((B1) null).f();
+ ((B2) null).f();
+ ((B3) null).f();
+ return ((B4) null).f();
+ }
+}
diff --git a/test/files/jvm/t7253/ScalaClient_1.scala b/test/files/jvm/t7253/ScalaClient_1.scala
new file mode 100644
index 0000000000..d244b326a8
--- /dev/null
+++ b/test/files/jvm/t7253/ScalaClient_1.scala
@@ -0,0 +1,9 @@
+class ScalaClient_1 {
+ def foo() = {
+ (null: A).f()
+ (null: B1).f()
+ (null: B2).f()
+ (null: B3).f()
+ (null: B4).f()
+ }
+}
diff --git a/test/files/jvm/t7253/test.scala b/test/files/jvm/t7253/test.scala
new file mode 100644
index 0000000000..7fe08e8813
--- /dev/null
+++ b/test/files/jvm/t7253/test.scala
@@ -0,0 +1,28 @@
+import scala.tools.partest.BytecodeTest
+
+import scala.tools.nsc.util.JavaClassPath
+import java.io.InputStream
+import scala.tools.asm
+import asm.ClassReader
+import asm.tree.{ClassNode, InsnList}
+import scala.collection.JavaConverters._
+
+object Test extends BytecodeTest {
+ import instructions._
+
+ def show: Unit = {
+ val instrBaseSeqs = Seq("ScalaClient_1", "JavaClient_1") map (name => instructions.fromMethod(getMethod(loadClassNode(name), "foo")))
+ val instrSeqs = instrBaseSeqs map (_ filter isInvoke)
+ cmpInstructions(instrSeqs(0), instrSeqs(1))
+ }
+
+ def cmpInstructions(isa: List[Instruction], isb: List[Instruction]) = {
+ if (isa == isb) println("bytecode identical")
+ else diffInstructions(isa, isb)
+ }
+
+ def isInvoke(node: Instruction): Boolean = {
+ val opcode = node.opcode
+ (opcode == "INVOKEVIRTUAL") || (opcode == "INVOKEINTERFACE")
+ }
+}
diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check
new file mode 100644
index 0000000000..42ccabed1b
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.check
@@ -0,0 +1,10 @@
+delayed-init-ref.scala:17: error: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println(O.vall) // warn
+ ^
+delayed-init-ref.scala:19: error: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println(vall) // warn
+ ^
+delayed-init-ref.scala:40: error: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value
+ println({locally(()); this}.foo) // warn (spurious, but we can't discriminate)
+ ^
+three errors found
diff --git a/test/files/neg/delayed-init-ref.flags b/test/files/neg/delayed-init-ref.flags
new file mode 100644
index 0000000000..7949c2afa2
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/neg/delayed-init-ref.scala b/test/files/neg/delayed-init-ref.scala
new file mode 100644
index 0000000000..f2aa804e28
--- /dev/null
+++ b/test/files/neg/delayed-init-ref.scala
@@ -0,0 +1,42 @@
+trait T {
+ val traitVal = ""
+}
+
+object O extends App with T {
+ val vall = ""
+ lazy val lazyy = ""
+ def deff = ""
+
+ println(vall) // no warn
+ new {
+ println(vall) // no warn
+ }
+}
+
+object Client {
+ println(O.vall) // warn
+ import O.vall
+ println(vall) // warn
+
+ println(O.lazyy) // no warn
+ println(O.deff) // no warn
+ println(O.traitVal) // no warn
+}
+
+// Delayed init usage pattern from Specs2
+// See: https://groups.google.com/d/msg/scala-sips/wP6dL8nIAQs/ogjoPE-MSVAJ
+trait Before extends DelayedInit {
+ def before()
+ override def delayedInit(x: => Unit): Unit = { before; x }
+}
+object Spec {
+ trait UserContext extends Before {
+ def before() = ()
+ val foo = "foo"
+ }
+ new UserContext {
+ println(foo) // no warn
+ println(this.foo) // no warn
+ println({locally(()); this}.foo) // warn (spurious, but we can't discriminate)
+ }
+}
diff --git a/test/files/neg/macro-without-xmacros-a.check b/test/files/neg/macro-without-xmacros-a.check
index ae6c6c695a..ec194be3a9 100644
--- a/test/files/neg/macro-without-xmacros-a.check
+++ b/test/files/neg/macro-without-xmacros-a.check
@@ -1,5 +1,5 @@
Macros_2.scala:5: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
This can be achieved by adding the import clause 'import scala.language.experimental.macros'
or by setting the compiler option -language:experimental.macros.
See the Scala docs for value scala.language.experimental.macros for a discussion
@@ -7,11 +7,11 @@ why the feature needs to be explicitly enabled.
def foo(x: Int): Int = macro foo_impl
^
Macros_2.scala:7: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def bar(x: Int): Int = macro bar_impl
^
Macros_2.scala:11: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def quux(x: Int): Int = macro quux_impl
^
three errors found
diff --git a/test/files/neg/macro-without-xmacros-b.check b/test/files/neg/macro-without-xmacros-b.check
index c3cadcf36a..c97850f0a9 100644
--- a/test/files/neg/macro-without-xmacros-b.check
+++ b/test/files/neg/macro-without-xmacros-b.check
@@ -1,5 +1,5 @@
Macros_2.scala:3: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
This can be achieved by adding the import clause 'import scala.language.experimental.macros'
or by setting the compiler option -language:experimental.macros.
See the Scala docs for value scala.language.experimental.macros for a discussion
@@ -7,11 +7,11 @@ why the feature needs to be explicitly enabled.
def foo(x: Int): Int = macro Impls.foo_impl
^
Macros_2.scala:5: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def bar(x: Int): Int = macro Impls.bar_impl
^
Macros_2.scala:9: error: macro definition needs to be enabled
-by making the implicit value language.experimental.macros visible.
+by making the implicit value scala.language.experimental.macros visible.
def quux(x: Int): Int = macro Impls.quux_impl
^
three errors found
diff --git a/test/files/neg/stringinterpolation_macro-neg.check b/test/files/neg/stringinterpolation_macro-neg.check
index 8986b899a3..457f497f2f 100644
--- a/test/files/neg/stringinterpolation_macro-neg.check
+++ b/test/files/neg/stringinterpolation_macro-neg.check
@@ -66,5 +66,5 @@ Note that implicit conversions are not applicable because they are ambiguous:
^
stringinterpolation_macro-neg.scala:30: error: illegal conversion character
f"$s%i"
- ^
+ ^
15 errors found
diff --git a/test/files/neg/t414.check b/test/files/neg/t414.check
index e15dbaea71..30211eef8e 100644
--- a/test/files/neg/t414.check
+++ b/test/files/neg/t414.check
@@ -1,7 +1,7 @@
t414.scala:5: error: pattern type is incompatible with expected type;
found : Empty.type
required: IntMap[a]
-Note: if you intended to match against the class, try `case _: Empty[_]` or `case Empty()`
+Note: if you intended to match against the class, try `case Empty()`
case Empty =>
^
t414.scala:7: error: type mismatch;
diff --git a/test/files/neg/t4879.check b/test/files/neg/t4879.check
index 21cd329640..c7edd583c8 100644
--- a/test/files/neg/t4879.check
+++ b/test/files/neg/t4879.check
@@ -1,13 +1,13 @@
t4879.scala:6: error: pattern type is incompatible with expected type;
found : C.type
required: C
-Note: if you intended to match against the class, try `case _: C` or `case C(_)`
+Note: if you intended to match against the class, try `case C(_)`
case C => true
^
t4879.scala:10: error: pattern type is incompatible with expected type;
found : D.type
required: D[T,U,V]
-Note: if you intended to match against the class, try `case _: D[_,_,_]` or `case D(_,_,_)`
+Note: if you intended to match against the class, try `case D(_,_,_)`
case D => true
^
two errors found
diff --git a/test/files/neg/t5510.check b/test/files/neg/t5510.check
index 04220e79bb..322a2f5e25 100644
--- a/test/files/neg/t5510.check
+++ b/test/files/neg/t5510.check
@@ -1,15 +1,15 @@
t5510.scala:2: error: unclosed string literal
val s1 = s"xxx
- ^
+ ^
t5510.scala:3: error: unclosed string literal
val s2 = s"xxx $x
^
t5510.scala:4: error: unclosed string literal
val s3 = s"xxx $$
- ^
+ ^
t5510.scala:5: error: unclosed string literal
val s4 = ""s"
- ^
+ ^
t5510.scala:6: error: unclosed multi-line string literal
val s5 = ""s""" $s1 $s2 s"
^
diff --git a/test/files/neg/t5663-badwarneq.check b/test/files/neg/t5663-badwarneq.check
index 00c2234e9d..242be8de68 100644
--- a/test/files/neg/t5663-badwarneq.check
+++ b/test/files/neg/t5663-badwarneq.check
@@ -1,22 +1,40 @@
-t5663-badwarneq.scala:42: error: comparing case class values of types Some[Int] and None.type using `==' will always yield false
+t5663-badwarneq.scala:47: error: comparing case class values of types Some[Int] and None.type using `==' will always yield false
println(new Some(1) == None) // Should complain on type, was: spuriously complains on fresh object
^
-t5663-badwarneq.scala:43: error: comparing case class values of types Some[Int] and Thing using `==' will always yield false
+t5663-badwarneq.scala:48: error: comparing case class values of types Some[Int] and Thing using `==' will always yield false
println(Some(1) == new Thing(1)) // Should complain on type, was: spuriously complains on fresh object
^
-t5663-badwarneq.scala:51: error: ThingOne and Thingy are unrelated: they will most likely never compare equal
+t5663-badwarneq.scala:56: error: ThingOne and Thingy are unrelated: they will most likely never compare equal
println(t1 == t2) // true, but apparently unrelated, a compromise warning
^
-t5663-badwarneq.scala:52: error: ThingThree and Thingy are unrelated: they will most likely never compare equal
+t5663-badwarneq.scala:57: error: ThingThree and Thingy are unrelated: they will most likely never compare equal
println(t4 == t2) // true, complains because ThingThree is final and Thingy not a subclass, stronger claim than unrelated
^
-t5663-badwarneq.scala:55: error: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
+t5663-badwarneq.scala:60: error: comparing case class values of types ThingTwo and Some[Int] using `==' will always yield false
println(t3 == Some(1)) // false, warn on different cases
^
-t5663-badwarneq.scala:56: error: comparing values of types ThingOne and Cousin using `==' will always yield false
+t5663-badwarneq.scala:61: error: comparing values of types ThingOne and Cousin using `==' will always yield false
println(t1 == c) // should warn
^
-t5663-badwarneq.scala:64: error: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
+t5663-badwarneq.scala:69: error: comparing case class values of types Simple and SimpleSibling.type using `==' will always yield false
println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case
^
-7 errors found
+t5663-badwarneq.scala:72: error: ValueClass1 and Int are unrelated: they will never compare equal
+ println(new ValueClass1(5) == 5) // bad
+ ^
+t5663-badwarneq.scala:74: error: comparing values of types Int and ValueClass1 using `==' will always yield false
+ println(5 == new ValueClass1(5)) // bad
+ ^
+t5663-badwarneq.scala:78: error: ValueClass2[String] and String are unrelated: they will never compare equal
+ println(new ValueClass2("abc") == "abc") // bad
+ ^
+t5663-badwarneq.scala:79: error: ValueClass2[Int] and ValueClass1 are unrelated: they will never compare equal
+ println(new ValueClass2(5) == new ValueClass1(5)) // bad - different value classes
+ ^
+t5663-badwarneq.scala:81: error: comparing values of types ValueClass3 and ValueClass2[Int] using `==' will always yield false
+ println(ValueClass3(5) == new ValueClass2(5)) // bad
+ ^
+t5663-badwarneq.scala:82: error: comparing values of types ValueClass3 and Int using `==' will always yield false
+ println(ValueClass3(5) == 5) // bad
+ ^
+13 errors found
diff --git a/test/files/neg/t5663-badwarneq.scala b/test/files/neg/t5663-badwarneq.scala
index 56ec389c03..3c0376914e 100644
--- a/test/files/neg/t5663-badwarneq.scala
+++ b/test/files/neg/t5663-badwarneq.scala
@@ -17,6 +17,11 @@ class SimpleParent
case class Simple() extends SimpleParent
case object SimpleSibling extends SimpleParent
+// value classes
+final class ValueClass1(val value: Int) extends AnyVal
+final class ValueClass2[T](val value: T) extends AnyVal
+final case class ValueClass3(val value: Int) extends AnyVal
+
/* It's not possible to run partest without -deprecation.
* Since detecting the warnings requires a neg test with
* -Xfatal-warnings, and deprecation terminates the compile,
@@ -63,6 +68,19 @@ object Test {
println(new Simple() == SimpleSibling) // like Some(1) == None, but needn't be final case
+ println(new ValueClass1(5) == new ValueClass1(5)) // ok
+ println(new ValueClass1(5) == 5) // bad
+ println(new ValueClass1(5) == (5: Any)) // ok, have to let it through
+ println(5 == new ValueClass1(5)) // bad
+ println((5: Any) == new ValueClass1(5) == (5: Any)) // ok
+
+ println(new ValueClass2("abc") == new ValueClass2("abc")) // ok
+ println(new ValueClass2("abc") == "abc") // bad
+ println(new ValueClass2(5) == new ValueClass1(5)) // bad - different value classes
+ println(ValueClass3(5) == new ValueClass3(5)) // ok
+ println(ValueClass3(5) == new ValueClass2(5)) // bad
+ println(ValueClass3(5) == 5) // bad
+
/*
val mine = new MyThing
val some = new SomeThing
diff --git a/test/files/neg/t5856.check b/test/files/neg/t5856.check
index ac49d4b9ac..08a61bdc07 100644
--- a/test/files/neg/t5856.check
+++ b/test/files/neg/t5856.check
@@ -1,6 +1,6 @@
t5856.scala:10: error: invalid string interpolation: `$$', `$'ident or `$'BlockExpr expected
val s9 = s"$"
- ^
+ ^
t5856.scala:10: error: unclosed string literal
val s9 = s"$"
^
diff --git a/test/files/neg/t6040.check b/test/files/neg/t6040.check
index f91df0c46d..16c90ede7e 100644
--- a/test/files/neg/t6040.check
+++ b/test/files/neg/t6040.check
@@ -1,5 +1,5 @@
t6040.scala:1: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
diff --git a/test/files/neg/t6323a.check b/test/files/neg/t6323a.check
index a80c9a0a81..4d682e5422 100644
--- a/test/files/neg/t6323a.check
+++ b/test/files/neg/t6323a.check
@@ -1,3 +1,9 @@
+t6323a.scala:10: materializing requested scala.reflect.type.ClassTag[Test] using `package`.this.materializeClassTag[Test]()
+ val lookAtMe = m.reflect(Test("a",List(5)))
+ ^
+t6323a.scala:11: materializing requested reflect.runtime.universe.type.TypeTag[Test] using `package`.this.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe)
+ val value = u.typeOf[Test]
+ ^
t6323a.scala:11: `package`.this.materializeTypeTag[Test](scala.reflect.runtime.`package`.universe) is not a valid implicit value for reflect.runtime.universe.TypeTag[Test] because:
failed to typecheck the materialized tag:
cannot create a TypeTag referring to local class Test.Test: use WeakTypeTag instead
diff --git a/test/files/neg/t6771b.check b/test/files/neg/t6771b.check
new file mode 100644
index 0000000000..ba99e9178d
--- /dev/null
+++ b/test/files/neg/t6771b.check
@@ -0,0 +1,6 @@
+t6771b.scala:14: error: type mismatch;
+ found : x.type (with underlying type String)
+ required: Test.a.type
+ b = b match { case x => x }
+ ^
+one error found
diff --git a/test/files/neg/t6771b.scala b/test/files/neg/t6771b.scala
new file mode 100644
index 0000000000..78f11f7750
--- /dev/null
+++ b/test/files/neg/t6771b.scala
@@ -0,0 +1,16 @@
+// Currently, the pattern matcher widens the type of the
+// scrutinee, so this doesn't typecheck. This test just
+// confirms this behaviour, although it would be an improvement
+// to change this and make this a `pos` test.
+//
+// But, to the intrepid hacker who works on this, a few notes:
+// You'll have to look into places in the pattern matcher that
+// call `dealias`, and see if they need to be `dealiasWiden`.
+// For example, if `checkableType` used only `dealias`, `pos/t6671.scala`
+// would fail.
+object Test {
+ val a = ""; var b: a.type = a
+
+ b = b match { case x => x }
+}
+
diff --git a/test/files/neg/t6952.check b/test/files/neg/t6952.check
index f1e1881404..1a591d02c6 100644
--- a/test/files/neg/t6952.check
+++ b/test/files/neg/t6952.check
@@ -1,5 +1,5 @@
t6952.scala:2: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
This can be achieved by adding the import clause 'import scala.language.dynamics'
or by setting the compiler option -language:dynamics.
See the Scala docs for value scala.language.dynamics for a discussion
@@ -7,7 +7,7 @@ why the feature needs to be explicitly enabled.
trait B extends Dynamic
^
t6952.scala:3: error: extension of type scala.Dynamic needs to be enabled
-by making the implicit value language.dynamics visible.
+by making the implicit value scala.language.dynamics visible.
trait C extends A with Dynamic
^
two errors found
diff --git a/test/files/neg/t7171.check b/test/files/neg/t7171.check
new file mode 100644
index 0000000000..8bdf08129b
--- /dev/null
+++ b/test/files/neg/t7171.check
@@ -0,0 +1,7 @@
+t7171.scala:2: error: The outer reference in this type test cannot be checked at run time.
+ final case class A()
+ ^
+t7171.scala:9: error: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
+two errors found
diff --git a/test/files/neg/t7171.flags b/test/files/neg/t7171.flags
new file mode 100644
index 0000000000..464cc20ea6
--- /dev/null
+++ b/test/files/neg/t7171.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked \ No newline at end of file
diff --git a/test/files/neg/t7171.scala b/test/files/neg/t7171.scala
new file mode 100644
index 0000000000..534b2070a3
--- /dev/null
+++ b/test/files/neg/t7171.scala
@@ -0,0 +1,11 @@
+trait T {
+ final case class A()
+
+ // Was:
+ // error: scrutinee is incompatible with pattern type;
+ // found : T.this.A
+ // required: T#A
+ def foo(a: T#A) = a match {
+ case _: A => true; case _ => false
+ }
+}
diff --git a/test/files/neg/t7171b.check b/test/files/neg/t7171b.check
new file mode 100644
index 0000000000..bd6b2bcfb4
--- /dev/null
+++ b/test/files/neg/t7171b.check
@@ -0,0 +1,10 @@
+t7171b.scala:2: error: The outer reference in this type test cannot be checked at run time.
+ final case class A()
+ ^
+t7171b.scala:8: error: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
+t7171b.scala:13: error: The outer reference in this type test cannot be checked at run time.
+ case _: A => true; case _ => false
+ ^
+three errors found
diff --git a/test/files/neg/t7171b.flags b/test/files/neg/t7171b.flags
new file mode 100644
index 0000000000..464cc20ea6
--- /dev/null
+++ b/test/files/neg/t7171b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -unchecked \ No newline at end of file
diff --git a/test/files/neg/t7171b.scala b/test/files/neg/t7171b.scala
new file mode 100644
index 0000000000..53c7787f8b
--- /dev/null
+++ b/test/files/neg/t7171b.scala
@@ -0,0 +1,15 @@
+trait T {
+ final case class A()
+}
+
+final class U extends T {
+ // this match should also not be deemed impossible
+ def foo(a: U#A) = a match {
+ case _: A => true; case _ => false
+ }
+
+ // this match should also not be deemed impossible
+ def bar(a: T#A) = a match {
+ case _: A => true; case _ => false
+ }
+}
diff --git a/test/files/neg/t7185.check b/test/files/neg/t7185.check
new file mode 100644
index 0000000000..46f2cc797e
--- /dev/null
+++ b/test/files/neg/t7185.check
@@ -0,0 +1,7 @@
+t7185.scala:2: error: overloaded method value apply with alternatives:
+ (f: scala.xml.Node => Boolean)scala.xml.NodeSeq <and>
+ (i: Int)scala.xml.Node
+ cannot be applied to ()
+ <e></e>()
+ ^
+one error found
diff --git a/test/files/neg/t7185.scala b/test/files/neg/t7185.scala
new file mode 100644
index 0000000000..2f9284bc5f
--- /dev/null
+++ b/test/files/neg/t7185.scala
@@ -0,0 +1,3 @@
+object Test {
+ <e></e>()
+}
diff --git a/test/files/neg/t7235.check b/test/files/neg/t7235.check
new file mode 100644
index 0000000000..357a3dfd83
--- /dev/null
+++ b/test/files/neg/t7235.check
@@ -0,0 +1,4 @@
+t7235.scala:9: error: implementation restriction: cannot reify refinement type trees with non-empty bodies
+ val Block(List(ValDef(_, _, tpt: CompoundTypeTree, _)), _) = reify{ val x: C { def x: Int } = ??? }.tree
+ ^
+one error found
diff --git a/test/files/neg/t7235.scala b/test/files/neg/t7235.scala
new file mode 100644
index 0000000000..cfebad3fae
--- /dev/null
+++ b/test/files/neg/t7235.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class C
+
+object Test extends App {
+ val Block(List(ValDef(_, _, tpt: CompoundTypeTree, _)), _) = reify{ val x: C { def x: Int } = ??? }.tree
+ println(tpt)
+ println(tpt.templ.parents)
+ println(tpt.templ.self)
+ println(tpt.templ.body)
+}
diff --git a/test/files/neg/t7238.check b/test/files/neg/t7238.check
new file mode 100644
index 0000000000..b87f83ff65
--- /dev/null
+++ b/test/files/neg/t7238.check
@@ -0,0 +1,6 @@
+t7238.scala:6: error: type mismatch;
+ found : Seq[Any]
+ required: Seq[String]
+ c.c()(Seq[Any](): _*)
+ ^
+one error found
diff --git a/test/files/neg/t7238.scala b/test/files/neg/t7238.scala
new file mode 100644
index 0000000000..d42dc8d385
--- /dev/null
+++ b/test/files/neg/t7238.scala
@@ -0,0 +1,7 @@
+trait Main {
+ trait C {
+ def c(x: Any = 0)(bs: String*)
+ }
+ def c: C
+ c.c()(Seq[Any](): _*)
+}
diff --git a/test/files/neg/t7251.check b/test/files/neg/t7251.check
new file mode 100644
index 0000000000..8df8984d63
--- /dev/null
+++ b/test/files/neg/t7251.check
@@ -0,0 +1,4 @@
+B_2.scala:5: error: object s.Outer$Triple$ is not a value
+ println( s.Outer$Triple$ )
+ ^
+one error found
diff --git a/test/files/neg/t7251/A_1.scala b/test/files/neg/t7251/A_1.scala
new file mode 100644
index 0000000000..d05373ed28
--- /dev/null
+++ b/test/files/neg/t7251/A_1.scala
@@ -0,0 +1,10 @@
+package s
+
+object Outer {
+ type Triple[+A, +B, +C] = Tuple3[A, B, C]
+ object Triple {
+ def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z)
+ def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x)
+ }
+}
+
diff --git a/test/files/neg/t7251/B_2.scala b/test/files/neg/t7251/B_2.scala
new file mode 100644
index 0000000000..eb59b30902
--- /dev/null
+++ b/test/files/neg/t7251/B_2.scala
@@ -0,0 +1,7 @@
+package s
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ println( s.Outer$Triple$ )
+ }
+}
diff --git a/test/files/neg/t7259.check b/test/files/neg/t7259.check
new file mode 100644
index 0000000000..0ad627fc3b
--- /dev/null
+++ b/test/files/neg/t7259.check
@@ -0,0 +1,7 @@
+t7259.scala:1: error: not found: type xxxxx
+@xxxxx // error: not found: type xxxx
+ ^
+t7259.scala:8: error: type xxxxx is not a member of package annotation
+@annotation.xxxxx // error: not found: type scala
+ ^
+two errors found
diff --git a/test/files/neg/t7259.scala b/test/files/neg/t7259.scala
new file mode 100644
index 0000000000..0fdfe18822
--- /dev/null
+++ b/test/files/neg/t7259.scala
@@ -0,0 +1,9 @@
+@xxxxx // error: not found: type xxxx
+class Ok
+
+//
+// This had the wrong error message in 2.9 and 2.10.
+//
+
+@annotation.xxxxx // error: not found: type scala
+class WrongErrorMessage
diff --git a/test/files/neg/t7285.check b/test/files/neg/t7285.check
new file mode 100644
index 0000000000..108f4292a8
--- /dev/null
+++ b/test/files/neg/t7285.check
@@ -0,0 +1,13 @@
+t7285.scala:15: error: match may not be exhaustive.
+It would fail on the following input: (Up, Down)
+ (d1, d2) match {
+ ^
+t7285.scala:33: error: match may not be exhaustive.
+It would fail on the following input: Down
+ (d1) match {
+ ^
+t7285.scala:51: error: match may not be exhaustive.
+It would fail on the following input: (Up, Down)
+ (d1, d2) match {
+ ^
+three errors found
diff --git a/test/files/neg/t7285.flags b/test/files/neg/t7285.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7285.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7285.scala b/test/files/neg/t7285.scala
new file mode 100644
index 0000000000..14121d92b1
--- /dev/null
+++ b/test/files/neg/t7285.scala
@@ -0,0 +1,55 @@
+sealed abstract class Base
+
+
+object Test1 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Up) => true
+ }
+ }
+}
+
+object Test2 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ (d1: Base, d2: Base) =>
+ (d1) match {
+ case Test2.Base.Up => false
+ }
+ }
+}
+
+
+object Test4 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base
+
+ case object Up extends Base
+ }
+
+ import Test4.Base._
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Test4.Base.Up) => true
+ }
+}
diff --git a/test/files/neg/t7289.check b/test/files/neg/t7289.check
new file mode 100644
index 0000000000..e4aeebbc6c
--- /dev/null
+++ b/test/files/neg/t7289.check
@@ -0,0 +1,4 @@
+t7289.scala:8: error: could not find implicit value for parameter e: Test.Schtroumpf[scala.collection.immutable.Nil.type]
+ implicitly[Schtroumpf[Nil.type]]
+ ^
+one error found
diff --git a/test/files/neg/t7289.scala b/test/files/neg/t7289.scala
new file mode 100644
index 0000000000..f4ed3daf76
--- /dev/null
+++ b/test/files/neg/t7289.scala
@@ -0,0 +1,39 @@
+object Test extends App {
+ trait Schtroumpf[T]
+
+ implicit def schtroumpf[T, U <: Coll[T], Coll[X] <: Traversable[X]]
+ (implicit minorSchtroumpf: Schtroumpf[T]): Schtroumpf[U] = ???
+
+ implicit val qoo: Schtroumpf[Int] = new Schtroumpf[Int]{}
+ implicitly[Schtroumpf[Nil.type]]
+}
+
+/*
+info1 = {scala.tools.nsc.typechecker.Implicits$ImplicitInfo@3468}"qoo: => Test.Schtroumpf[Int]"
+info2 = {scala.tools.nsc.typechecker.Implicits$ImplicitInfo@3469}"schtroumpf: [T, U <: Coll[T], Coll[_] <: Traversable[_]](implicit minorSchtroumpf: Test.Schtroumpf[T])Test.Schtroumpf[U]"
+isStrictlyMoreSpecific(info1, info2)
+ isSubType(Test.Schtroumpf[Int], Test.Schtroumpf[U] forSome { T; U <: Coll[T]; Coll[_] <: Traversable[_] })
+ isAsSpecificValueType(Test.Schtroumpf[Int], Test.Schtroumpf[U], undef2 = List(type T, type U, type Coll))
+
+ val et: ExistentialType = Test.Schtroumpf[U] forSome { T; U <: Coll[T]; Coll[_] <: Traversable[_] }
+ val tp1 = Test.Schtroumpf[Int]
+ et.withTypeVars(isSubType(tp1, _, depth))
+ solve()
+ tvars = tList(=?Nothing, =?Int, =?=?Int)
+
+
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?U ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?Coll ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Nothing ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Nothing )
+[ setInst] scala.collection.immutable.Nil.type( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], U=scala.collection.immutable.Nil.type )
+[ setInst] =?scala.collection.immutable.Nil.type( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], Coll==?scala.collection.immutable.Nil.type )
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Int )
+[ create] ?T ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?U ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ create] ?Coll ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]] )
+[ setInst] Nothing ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], T=Nothing )
+[ setInst] Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], U=Int )
+[ setInst] =?Int ( In Test#schtroumpf[T,U <: Coll[T],Coll[_] <: Traversable[_]], Coll==?Int )
+*/ \ No newline at end of file
diff --git a/test/files/neg/t7289_status_quo.check b/test/files/neg/t7289_status_quo.check
new file mode 100644
index 0000000000..31c072e969
--- /dev/null
+++ b/test/files/neg/t7289_status_quo.check
@@ -0,0 +1,22 @@
+t7289_status_quo.scala:9: error: could not find implicit value for parameter e: Test1.Ext[List[Int]]
+ implicitly[Ext[List[Int]]] // fails - not found
+ ^
+t7289_status_quo.scala:11: error: could not find implicit value for parameter e: Test1.Ext[List[List[List[Int]]]]
+ implicitly[Ext[List[List[List[Int]]]]] // fails - not found
+ ^
+t7289_status_quo.scala:15: error: ambiguous implicit values:
+ both method f in object Test1 of type [A, Coll <: CC[A], CC[X] <: Traversable[X]](implicit xi: Test1.Ext[A])Test1.Ext[Coll]
+ and value m in object Test1 of type => Test1.Ext[List[List[Int]]]
+ match expected type Test1.Ext[_ <: List[List[Int]]]
+ implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous
+ ^
+t7289_status_quo.scala:20: error: could not find implicit value for parameter e: Test1.ExtCov[List[Int]]
+ implicitly[ExtCov[List[Int]]] // fails - not found
+ ^
+t7289_status_quo.scala:21: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[Int]]]
+ implicitly[ExtCov[List[List[Int]]]] // fails - not found
+ ^
+t7289_status_quo.scala:22: error: could not find implicit value for parameter e: Test1.ExtCov[List[List[List[Int]]]]
+ implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found
+ ^
+6 errors found
diff --git a/test/files/neg/t7289_status_quo.scala b/test/files/neg/t7289_status_quo.scala
new file mode 100644
index 0000000000..39621429a1
--- /dev/null
+++ b/test/files/neg/t7289_status_quo.scala
@@ -0,0 +1,23 @@
+// record the status quo after this fix
+// not clear to @adriaanm why an upper-bounded existential in an invariant position
+// is different from putting that upper bound in a covariant position
+object Test1 {
+ trait Ext[T]
+ implicit def f[A, Coll <: CC[A], CC[X] <: Traversable[X]](implicit xi: Ext[A]): Ext[Coll] = ???
+ implicit val m: Ext[List[List[Int]]] = new Ext[List[List[Int]]]{}
+
+ implicitly[Ext[List[Int]]] // fails - not found
+ implicitly[Ext[List[List[Int]]]] // compiles
+ implicitly[Ext[List[List[List[Int]]]]] // fails - not found
+
+ // Making Ext[+T] should incur the same behavior as these. (so says @paulp)
+ implicitly[Ext[_ <: List[Int]]] // compiles
+ implicitly[Ext[_ <: List[List[Int]]]] // fails - ambiguous
+ implicitly[Ext[_ <: List[List[List[Int]]]]] // compiles
+
+ // But, we currently get:
+ trait ExtCov[+T]
+ implicitly[ExtCov[List[Int]]] // fails - not found
+ implicitly[ExtCov[List[List[Int]]]] // fails - not found
+ implicitly[ExtCov[List[List[List[Int]]]]] // fails - not found
+} \ No newline at end of file
diff --git a/test/files/neg/t7290.check b/test/files/neg/t7290.check
new file mode 100644
index 0000000000..85bedbac95
--- /dev/null
+++ b/test/files/neg/t7290.check
@@ -0,0 +1,10 @@
+t7290.scala:4: error: Pattern contains duplicate alternatives: 0
+ case 0 | 0 => 0
+ ^
+t7290.scala:5: error: Pattern contains duplicate alternatives: 2, 3
+ case 2 | 2 | 2 | 3 | 2 | 3 => 0
+ ^
+t7290.scala:6: error: Pattern contains duplicate alternatives: 4
+ case 4 | (_ @ 4) => 0
+ ^
+three errors found
diff --git a/test/files/neg/t7290.flags b/test/files/neg/t7290.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7290.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7290.scala b/test/files/neg/t7290.scala
new file mode 100644
index 0000000000..b9db7f7e8a
--- /dev/null
+++ b/test/files/neg/t7290.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ val y = (0: Int) match {
+ case 1 => 1
+ case 0 | 0 => 0
+ case 2 | 2 | 2 | 3 | 2 | 3 => 0
+ case 4 | (_ @ 4) => 0
+ case _ => -1
+ }
+ assert(y == 0, y)
+}
diff --git a/test/files/neg/t7299.check b/test/files/neg/t7299.check
new file mode 100644
index 0000000000..74340c4841
--- /dev/null
+++ b/test/files/neg/t7299.check
@@ -0,0 +1,7 @@
+t7299.scala:4: error: implementation restricts functions to 22 parameters
+ val eta1 = f _
+ ^
+t7299.scala:5: error: implementation restricts functions to 22 parameters
+ val eta2 = g[Any] _
+ ^
+two errors found
diff --git a/test/files/neg/t7299.scala b/test/files/neg/t7299.scala
new file mode 100644
index 0000000000..f3aae5ce5d
--- /dev/null
+++ b/test/files/neg/t7299.scala
@@ -0,0 +1,6 @@
+object Test {
+ def f(a1: Int, a2: Int, a3: Int, a4: Int, a5: Int, a6: Int, a7: Int, a8: Int, a9: Int, a10: Int, a11: Int, a12: Int, a13: Int, a14: Int, a15: Int, a16: Int, a17: Int, a18: Int, a19: Int, a20: Int, a21: Int, a22: Int, a23: Int) = 0
+ def g[A](a1: Int, a2: Int, a3: Int, a4: Int, a5: Int, a6: Int, a7: Int, a8: Int, a9: Int, a10: Int, a11: Int, a12: Int, a13: Int, a14: Int, a15: Int, a16: Int, a17: Int, a18: Int, a19: Int, a20: Int, a21: Int, a22: Int, a23: Int) = 0
+ val eta1 = f _
+ val eta2 = g[Any] _
+}
diff --git a/test/files/neg/t7325.check b/test/files/neg/t7325.check
new file mode 100644
index 0000000000..709ab6db3e
--- /dev/null
+++ b/test/files/neg/t7325.check
@@ -0,0 +1,19 @@
+t7325.scala:2: error: percent signs not directly following splicees must be escaped
+ println(f"%")
+ ^
+t7325.scala:4: error: percent signs not directly following splicees must be escaped
+ println(f"%%%")
+ ^
+t7325.scala:6: error: percent signs not directly following splicees must be escaped
+ println(f"%%%%%")
+ ^
+t7325.scala:16: error: wrong conversion string
+ println(f"${0}%")
+ ^
+t7325.scala:19: error: percent signs not directly following splicees must be escaped
+ println(f"${0}%%%d")
+ ^
+t7325.scala:21: error: percent signs not directly following splicees must be escaped
+ println(f"${0}%%%%%d")
+ ^
+6 errors found
diff --git a/test/files/neg/t7325.scala b/test/files/neg/t7325.scala
new file mode 100644
index 0000000000..adfd8dd47a
--- /dev/null
+++ b/test/files/neg/t7325.scala
@@ -0,0 +1,25 @@
+object Test extends App {
+ println(f"%")
+ println(f"%%")
+ println(f"%%%")
+ println(f"%%%%")
+ println(f"%%%%%")
+ println(f"%%%%%%")
+
+ println(f"%%n")
+ println(f"%%%n")
+ println(f"%%%%n")
+ println(f"%%%%%n")
+ println(f"%%%%%%n")
+ println(f"%%%%%%%n")
+
+ println(f"${0}%")
+ println(f"${0}%d")
+ println(f"${0}%%d")
+ println(f"${0}%%%d")
+ println(f"${0}%%%%d")
+ println(f"${0}%%%%%d")
+
+ println(f"${0}%n")
+ println(f"${0}%d%n")
+} \ No newline at end of file
diff --git a/test/files/neg/t7330.check b/test/files/neg/t7330.check
new file mode 100644
index 0000000000..b96d656d2b
--- /dev/null
+++ b/test/files/neg/t7330.check
@@ -0,0 +1,5 @@
+t7330.scala:4: error: pattern must be a value: Y[_]
+Note: if you intended to match against the class, try `case _: Y[_]`
+ 0 match { case Y[_] => }
+ ^
+one error found
diff --git a/test/files/neg/t7330.scala b/test/files/neg/t7330.scala
new file mode 100644
index 0000000000..13a943a02b
--- /dev/null
+++ b/test/files/neg/t7330.scala
@@ -0,0 +1,5 @@
+class Y[T]
+class Test {
+ // TypeTree is not a valid tree for a pattern
+ 0 match { case Y[_] => }
+} \ No newline at end of file
diff --git a/test/files/neg/t7369.check b/test/files/neg/t7369.check
new file mode 100644
index 0000000000..4f101e145a
--- /dev/null
+++ b/test/files/neg/t7369.check
@@ -0,0 +1,13 @@
+t7369.scala:6: error: unreachable code
+ case Tuple1(X) => // unreachable
+ ^
+t7369.scala:13: error: unreachable code
+ case Tuple1(true) => // unreachable
+ ^
+t7369.scala:31: error: unreachable code
+ case Tuple1(X) => // unreachable
+ ^
+t7369.scala:40: error: unreachable code
+ case Tuple1(null) => // unreachable
+ ^
+four errors found
diff --git a/test/files/neg/t7369.flags b/test/files/neg/t7369.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/neg/t7369.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7369.scala b/test/files/neg/t7369.scala
new file mode 100644
index 0000000000..87ddfe98b7
--- /dev/null
+++ b/test/files/neg/t7369.scala
@@ -0,0 +1,43 @@
+object Test {
+ val X, Y = true
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) =>
+ case Tuple1(X) => // unreachable
+ case _ =>
+ }
+
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(true) =>
+ case Tuple1(false) =>
+ case Tuple1(true) => // unreachable
+ case _ =>
+ }
+}
+
+
+sealed abstract class B;
+case object True extends B;
+case object False extends B;
+
+object Test2 {
+
+ val X: B = True
+ val Y: B = False
+
+ (null: Tuple1[B]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) =>
+ case Tuple1(X) => // unreachable
+ case _ =>
+ }
+}
+
+object Test3 {
+ (null: Tuple1[B]) match {
+ case Tuple1(null) =>
+ case Tuple1(True) =>
+ case Tuple1(null) => // unreachable
+ case _ =>
+ }
+}
diff --git a/test/files/neg/t7385.check b/test/files/neg/t7385.check
new file mode 100644
index 0000000000..70d3c3fb61
--- /dev/null
+++ b/test/files/neg/t7385.check
@@ -0,0 +1,10 @@
+t7385.scala:2: error: '(' expected but identifier found.
+ do { println("bippy") } while i<10
+ ^
+t7385.scala:6: error: '(' expected but identifier found.
+ while i<10 { () }
+ ^
+t7385.scala:7: error: illegal start of simple expression
+}
+^
+three errors found
diff --git a/test/files/neg/t7385.scala b/test/files/neg/t7385.scala
new file mode 100644
index 0000000000..a7f801098b
--- /dev/null
+++ b/test/files/neg/t7385.scala
@@ -0,0 +1,7 @@
+object Bar {
+ do { println("bippy") } while i<10
+}
+
+object Foo {
+ while i<10 { () }
+}
diff --git a/test/files/neg/t7388.check b/test/files/neg/t7388.check
new file mode 100644
index 0000000000..0a29e04896
--- /dev/null
+++ b/test/files/neg/t7388.check
@@ -0,0 +1,4 @@
+t7388.scala:1: error: doesnotexist is not an enclosing class
+class Test private[doesnotexist]()
+ ^
+one error found
diff --git a/test/files/neg/t7388.scala b/test/files/neg/t7388.scala
new file mode 100644
index 0000000000..9ce9ea11b3
--- /dev/null
+++ b/test/files/neg/t7388.scala
@@ -0,0 +1 @@
+class Test private[doesnotexist]()
diff --git a/test/files/neg/t7441.check b/test/files/neg/t7441.check
new file mode 100644
index 0000000000..f259457197
--- /dev/null
+++ b/test/files/neg/t7441.check
@@ -0,0 +1,6 @@
+t7441.scala:4: error: type mismatch;
+ found : Int(1)
+ required: List[Any]
+ def test = apply(1)
+ ^
+one error found
diff --git a/test/files/neg/t7441.scala b/test/files/neg/t7441.scala
new file mode 100644
index 0000000000..dad7421e3f
--- /dev/null
+++ b/test/files/neg/t7441.scala
@@ -0,0 +1,7 @@
+object Test {
+ object Bar {
+ def apply(xs: List[Any]): Int = 0
+ def test = apply(1)
+ }
+ implicit def foo = 1
+}
diff --git a/test/files/pos/spec-t6286.scala b/test/files/pos/spec-t6286.scala
new file mode 100755
index 0000000000..4d87998ec6
--- /dev/null
+++ b/test/files/pos/spec-t6286.scala
@@ -0,0 +1,10 @@
+trait Foo[@specialized(Int) A] {
+ def fun[@specialized(Int) B](init: B)(f: (B, A) => B): B
+}
+
+class Bar(values: Array[Int]) extends Foo[Int] {
+ def fun[@specialized(Int) C](init: C)(f: (C, Int) => C): C = {
+ val arr = values
+ f(init, arr(0))
+ }
+}
diff --git a/test/files/pos/t3120/J1.java b/test/files/pos/t3120/J1.java
new file mode 100644
index 0000000000..12b23c1c98
--- /dev/null
+++ b/test/files/pos/t3120/J1.java
@@ -0,0 +1,4 @@
+class J1 {
+ public class Inner1 { }
+ public static class Inner2 { }
+}
diff --git a/test/files/pos/t3120/J2.java b/test/files/pos/t3120/J2.java
new file mode 100644
index 0000000000..db6e859020
--- /dev/null
+++ b/test/files/pos/t3120/J2.java
@@ -0,0 +1,4 @@
+public class J2 {
+ public void f1(J1.Inner1 p) { }
+ public void f2(J1.Inner2 p) { }
+}
diff --git a/test/files/pos/t3120/Q.java b/test/files/pos/t3120/Q.java
new file mode 100644
index 0000000000..fe2269308a
--- /dev/null
+++ b/test/files/pos/t3120/Q.java
@@ -0,0 +1,3 @@
+public class Q {
+ public static void passInner(J1.Inner1 myInner) {}
+}
diff --git a/test/files/pos/t3120/Test.scala b/test/files/pos/t3120/Test.scala
new file mode 100644
index 0000000000..c02146fba1
--- /dev/null
+++ b/test/files/pos/t3120/Test.scala
@@ -0,0 +1,3 @@
+object Test {
+ Q.passInner(null)
+}
diff --git a/test/files/pos/t5744/Macros_1.scala b/test/files/pos/t5744/Macros_1.scala
new file mode 100644
index 0000000000..288a88653d
--- /dev/null
+++ b/test/files/pos/t5744/Macros_1.scala
@@ -0,0 +1,22 @@
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+object Macros {
+ def foo[U: Numeric](x: U) = macro foo_impl[U]
+ def bar[U: Numeric : Equiv, Y <% String](x: U)(implicit s: String) = macro bar_impl[U, Y]
+
+ def foo_impl[U](c: Context)(x: c.Expr[U])(numeric: c.Expr[Numeric[U]]) = {
+ import c.universe._
+ val plusOne = Apply(Select(numeric.tree, newTermName("plus")), List(x.tree, Literal(Constant(1))))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(plusOne))
+ c.Expr[Unit](body)
+ }
+
+ def bar_impl[U, Y](c: Context)(x: c.Expr[U])(numeric: c.Expr[Numeric[U]], equiv: c.Expr[Equiv[U]], viewAsString: c.Expr[Y => String], s: c.Expr[String]) = {
+ import c.universe._
+ val plusOne = Apply(Select(numeric.tree, newTermName("plus")), List(x.tree, Literal(Constant(1))))
+ val plusLen = Apply(Select(numeric.tree, newTermName("plus")), List(plusOne, Select(s.tree, newTermName("length"))))
+ val body = Apply(Select(Ident(definitions.PredefModule), newTermName("println")), List(plusLen))
+ c.Expr[Unit](body)
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t5744/Test_2.scala b/test/files/pos/t5744/Test_2.scala
new file mode 100644
index 0000000000..64b57e6032
--- /dev/null
+++ b/test/files/pos/t5744/Test_2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ import Macros._
+ foo(42)
+ implicit val s = ""
+ bar(43)
+} \ No newline at end of file
diff --git a/test/files/pos/t6091.flags b/test/files/pos/t6091.flags
new file mode 100644
index 0000000000..954eaba352
--- /dev/null
+++ b/test/files/pos/t6091.flags
@@ -0,0 +1 @@
+-Xfatal-warnings -Xlint
diff --git a/test/files/pos/t6091.scala b/test/files/pos/t6091.scala
new file mode 100644
index 0000000000..72e663ec3b
--- /dev/null
+++ b/test/files/pos/t6091.scala
@@ -0,0 +1,10 @@
+object Foo { def eq(x:Int) = x }
+
+class X { def ==(other: String) = true }
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ Foo eq 1
+ new X == null
+ }
+}
diff --git a/test/files/pos/t6210.flags b/test/files/pos/t6210.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6210.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6210.scala b/test/files/pos/t6210.scala
new file mode 100644
index 0000000000..1ce8493872
--- /dev/null
+++ b/test/files/pos/t6210.scala
@@ -0,0 +1,21 @@
+abstract sealed trait AST
+abstract sealed trait AExpr extends AST
+case class AAssign(name: String, v: AExpr) extends AExpr
+case class AConstBool(v: Boolean) extends AExpr
+
+trait Ty {}
+case class TInt() extends Ty
+case class TBool() extends Ty
+
+object Foo {
+ def checkExpr(ast: AExpr): Ty = {
+ var astTy:Ty = ast match {
+ case AAssign(nm: String, v:AExpr) => TBool()
+
+ case AConstBool(v: Boolean) => TBool()
+
+ case _ => throw new Exception(s"Unhandled case check(ast: ${ast.getClass})")
+ }
+ astTy
+ }
+}
diff --git a/test/files/pos/t6225.scala b/test/files/pos/t6225.scala
new file mode 100644
index 0000000000..d3d30d9e16
--- /dev/null
+++ b/test/files/pos/t6225.scala
@@ -0,0 +1,20 @@
+
+package library.x {
+ class X {
+ class Foo
+ implicit val foo: Foo = new Foo
+ }
+}
+package library {
+ package object y extends library.x.X
+}
+
+object ko {
+ import library.y.{Foo, foo}
+ implicitly[Foo]
+}
+
+object ko2 {
+ import library.y._
+ implicitly[Foo]
+}
diff --git a/test/files/pos/t6386.scala b/test/files/pos/t6386.scala
new file mode 100644
index 0000000000..85098a78f0
--- /dev/null
+++ b/test/files/pos/t6386.scala
@@ -0,0 +1,5 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ reify(manifest[Some[_]])
+} \ No newline at end of file
diff --git a/test/files/pos/t6514.scala b/test/files/pos/t6514.scala
new file mode 100644
index 0000000000..7c58605d39
--- /dev/null
+++ b/test/files/pos/t6514.scala
@@ -0,0 +1,11 @@
+object Test {
+ def e(msg: String) = new Exception(msg)
+
+ // this code ain't dead.
+ def a(b: Boolean) = {
+ b match {
+ case true => throw e("true")
+ case false => throw e("false")
+ }
+ }
+}
diff --git a/test/files/pos/t6675.flags b/test/files/pos/t6675.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6675.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6675.scala b/test/files/pos/t6675.scala
new file mode 100644
index 0000000000..f3bebea5be
--- /dev/null
+++ b/test/files/pos/t6675.scala
@@ -0,0 +1,20 @@
+object LeftOrRight {
+ def unapply[A](value: Either[A, A]): Option[A] = value match {
+ case scala.Left(x) => Some(x)
+ case scala.Right(x) => Some(x)
+ }
+}
+
+object Test {
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight(pair @ (a, b)) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight((a, b)) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+
+ (Left((0, 0)): Either[(Int, Int), (Int, Int)]) match {
+ case LeftOrRight(a, b) => a // false -Xlint warning: "extractor pattern binds a single value to a Product2 of type (Int, Int)"
+ }
+}
diff --git a/test/files/pos/t6771.flags b/test/files/pos/t6771.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t6771.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t6771.scala b/test/files/pos/t6771.scala
new file mode 100644
index 0000000000..0f0bd4e4a0
--- /dev/null
+++ b/test/files/pos/t6771.scala
@@ -0,0 +1,9 @@
+object Test {
+ type Id[X] = X
+ val a: Id[Option[Int]] = None
+
+ a match {
+ case Some(x) => println(x)
+ case None =>
+ }
+}
diff --git a/test/files/pos/t6921.scala b/test/files/pos/t6921.scala
new file mode 100644
index 0000000000..36e70e5d2c
--- /dev/null
+++ b/test/files/pos/t6921.scala
@@ -0,0 +1,11 @@
+class Message(messageType: String, reason: Option[String])
+
+class ReproForSI6921 {
+
+ private[this] var reason = ""
+
+ def decideElection = {
+ val explanation = None
+ new Message("", reason = explanation)
+ }
+}
diff --git a/test/files/pos/t7091.scala b/test/files/pos/t7091.scala
new file mode 100644
index 0000000000..72e81a2ea8
--- /dev/null
+++ b/test/files/pos/t7091.scala
@@ -0,0 +1,7 @@
+package p1.p2
+
+protected[p2] class C(var x: Int = 0)
+
+protected[p2] trait T {
+ new C()
+}
diff --git a/test/files/pos/t7190.scala b/test/files/pos/t7190.scala
new file mode 100644
index 0000000000..f7ccded1b4
--- /dev/null
+++ b/test/files/pos/t7190.scala
@@ -0,0 +1,26 @@
+import scala.language.experimental.macros
+import scala.reflect.macros._
+
+trait A[T] {
+ def min[U >: T](implicit ord: Numeric[U]): T = macro A.min[T, U]
+}
+
+object A {
+ def min[T: c.WeakTypeTag, U >: T: c.WeakTypeTag](c: Context)(ord: c.Expr[Numeric[U]]): c.Expr[T] = {
+ c.universe.reify {
+ ord.splice.zero.asInstanceOf[T]
+ }
+ }
+}
+
+class B extends A[Int] {
+ override def min[U >: Int](implicit ord: Numeric[U]): Int = macro B.min[U]
+}
+
+object B {
+ def min[U >: Int: c.WeakTypeTag](c: Context)(ord: c.Expr[Numeric[U]]): c.Expr[Int] = {
+ c.universe.reify {
+ ord.splice.zero.asInstanceOf[Int]
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7200b.scala b/test/files/pos/t7200b.scala
new file mode 100644
index 0000000000..9d579c6ef9
--- /dev/null
+++ b/test/files/pos/t7200b.scala
@@ -0,0 +1,50 @@
+import language.higherKinds
+
+trait T {
+ def t = 0
+}
+trait Foo {
+ def coflatMap[A <: T](f: A): A
+}
+
+object O extends Foo {
+ def coflatMap[A <: T](f: A) = {
+ val f2 = coflatMap(f) // inferred in 2.9.2 / 2.10.0 as [Nothing]
+ f2.t // so this does't type check.
+ f2
+ }
+}
+
+// Why? When a return type is inherited, the derived method
+// symbol first gets a preliminary type assigned, based on the
+// 1) method type of a unique matching super member
+// 2) viewed as a member type of the inheritor (to substitute,
+// e.g. class type parameters)
+// 3) substituted to replace the super-method's type parameters
+// with those of the inheritor
+// 4) dissected to take just the return type wrapped in thisMethodType().
+//
+// In Scala 2.10.0 and earlier, this preliminary method type
+//
+// 1) [A#11329 <: <empty>#3.this.T#7068](<param> f#11333: A#11329)A#11329
+// 2) [A#11329 <: <empty>#3.this.T#7068](<param> f#11333: A#11329)A#11329
+// 3) (<param> f#12556: A#11336)A#11336
+// 4) [A#11336 <: <empty>#3.this.T#7068](<param> f#12552: A#11337&0)A#11336
+//
+// The type #4 from the old version is problematic: the parameter is typed with
+// a skolem for the type parameter `A`. It won't be considered to match the
+// method it overrides, instead they are seen as being overloaded, and type inference
+// goes awry (Nothing is inferred as the type argument for the recursive call
+// to coflatMap.
+//
+// The Namers patch adds one step here: it subsitutes the type parameter symbols
+// for the skolems:
+//
+// https://github.com/scala/scala/commit/b74c33eb#L2R1014
+//
+// So we end up with a method symbol info:
+//
+// 5) [A#11336 <: <empty>#3.this.T#7068](<param> f#12505: A#11336)A#11336
+//
+// This *does* match the method in the super class, and type inference
+// chooses the correct type argument. \ No newline at end of file
diff --git a/test/files/pos/t7226.scala b/test/files/pos/t7226.scala
new file mode 100644
index 0000000000..06f0c95dc4
--- /dev/null
+++ b/test/files/pos/t7226.scala
@@ -0,0 +1,26 @@
+trait HK {
+ type Rep[X]
+
+ // okay
+ def unzip2[A, B](ps: Rep[List[(A, B)]])
+ unzip2(null.asInstanceOf[Rep[List[(Int, String)]]])
+
+ // okay
+ def unzipHK[A, B, C[_]](ps: Rep[C[(A, B)]])
+ unzipHK(null.asInstanceOf[Rep[List[(Int, String)]]])
+
+ def unzipHKRet0[A, C[_]](ps: C[A]): C[Int]
+ def ls: List[String]
+ unzipHKRet0(ls)
+
+ // fail
+ def unzipHKRet[A, C[_]](ps: Rep[C[A]]): Rep[C[Int]]
+ def rls: Rep[List[String]]
+ unzipHKRet(rls)
+}
+
+trait HK1 {
+ type Rep[A]
+ def unzip1[A, B, C[_]](ps: Rep[C[(A, B)]]): (Rep[C[A]], Rep[C[B]])
+ def doUnzip1[A, B](ps: Rep[List[(A, B)]]) = unzip1(ps)
+}
diff --git a/test/files/pos/t7232.flags b/test/files/pos/t7232.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t7232.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7232/Foo.java b/test/files/pos/t7232/Foo.java
new file mode 100644
index 0000000000..3478301b30
--- /dev/null
+++ b/test/files/pos/t7232/Foo.java
@@ -0,0 +1,9 @@
+package pack;
+
+import java.util.List;
+
+public class Foo {
+ public static java.util.List okay() { throw new Error(); }
+
+ public static List wrong() { throw new Error(); }
+}
diff --git a/test/files/pos/t7232/List.java b/test/files/pos/t7232/List.java
new file mode 100644
index 0000000000..e42c63aa67
--- /dev/null
+++ b/test/files/pos/t7232/List.java
@@ -0,0 +1,4 @@
+package pack;
+
+public class List {
+}
diff --git a/test/files/pos/t7232/Test.scala b/test/files/pos/t7232/Test.scala
new file mode 100644
index 0000000000..49c3c12aed
--- /dev/null
+++ b/test/files/pos/t7232/Test.scala
@@ -0,0 +1,5 @@
+object Test {
+ import pack._
+ Foo.okay().size()
+ Foo.wrong().size()
+}
diff --git a/test/files/pos/t7232b.flags b/test/files/pos/t7232b.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t7232b.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7232b/Foo.java b/test/files/pos/t7232b/Foo.java
new file mode 100644
index 0000000000..94f08d545e
--- /dev/null
+++ b/test/files/pos/t7232b/Foo.java
@@ -0,0 +1,8 @@
+package pack;
+
+import java.util.*;
+
+public class Foo {
+ // should be pack.List.
+ public static List list() { throw new Error(); }
+}
diff --git a/test/files/pos/t7232b/List.java b/test/files/pos/t7232b/List.java
new file mode 100644
index 0000000000..ce977152b9
--- /dev/null
+++ b/test/files/pos/t7232b/List.java
@@ -0,0 +1,5 @@
+package pack;
+
+public class List {
+ public void packList() {}
+}
diff --git a/test/files/pos/t7232b/Test.scala b/test/files/pos/t7232b/Test.scala
new file mode 100644
index 0000000000..6377e26bec
--- /dev/null
+++ b/test/files/pos/t7232b/Test.scala
@@ -0,0 +1,5 @@
+object Test {
+ import pack._
+
+ Foo.list().packList()
+}
diff --git a/test/files/pos/t7232c.flags b/test/files/pos/t7232c.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t7232c.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7232c/Foo.java b/test/files/pos/t7232c/Foo.java
new file mode 100644
index 0000000000..bbda09a2da
--- /dev/null
+++ b/test/files/pos/t7232c/Foo.java
@@ -0,0 +1,10 @@
+package pack;
+
+import java.util.List;
+
+public class Foo {
+ public static class List {
+ public void isInnerList() {}
+ }
+ public static List innerList() { throw new Error(); }
+}
diff --git a/test/files/pos/t7232c/Test.scala b/test/files/pos/t7232c/Test.scala
new file mode 100644
index 0000000000..aa7c710948
--- /dev/null
+++ b/test/files/pos/t7232c/Test.scala
@@ -0,0 +1,4 @@
+object Test {
+ import pack._
+ Foo.innerList().isInnerList()
+}
diff --git a/test/files/pos/t7232d.flags b/test/files/pos/t7232d.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t7232d.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7232d/Entry.java b/test/files/pos/t7232d/Entry.java
new file mode 100644
index 0000000000..0cfb6fb25b
--- /dev/null
+++ b/test/files/pos/t7232d/Entry.java
@@ -0,0 +1,4 @@
+package pack;
+
+public class Entry {
+}
diff --git a/test/files/pos/t7232d/Foo.java b/test/files/pos/t7232d/Foo.java
new file mode 100644
index 0000000000..df7114a0f0
--- /dev/null
+++ b/test/files/pos/t7232d/Foo.java
@@ -0,0 +1,8 @@
+package pack;
+
+import java.util.Map.Entry;
+
+public class Foo {
+ public static Entry mapEntry() { throw new Error(); }
+ public static void javaTest() { mapEntry().getKey(); }
+}
diff --git a/test/files/pos/t7232d/Test.scala b/test/files/pos/t7232d/Test.scala
new file mode 100644
index 0000000000..89a8063b3c
--- /dev/null
+++ b/test/files/pos/t7232d/Test.scala
@@ -0,0 +1,4 @@
+object Test {
+ import pack._
+ Foo.mapEntry().getKey()
+}
diff --git a/test/files/pos/t7233.scala b/test/files/pos/t7233.scala
new file mode 100644
index 0000000000..ae15c08c35
--- /dev/null
+++ b/test/files/pos/t7233.scala
@@ -0,0 +1,14 @@
+object Foo {
+ def bar(i: Int) = i
+
+ def ol(i: Int) = i
+ def ol(i: String) = i
+}
+object Test {
+ import Foo.{ bar => quux, toString => bar, ol => olRenamed}
+
+ val f1 = quux _
+ val f1Typed: (Int => Int) = f1
+
+ val f2: String => String = olRenamed _
+}
diff --git a/test/files/pos/t7233b.scala b/test/files/pos/t7233b.scala
new file mode 100644
index 0000000000..927c7fcfd1
--- /dev/null
+++ b/test/files/pos/t7233b.scala
@@ -0,0 +1,8 @@
+object Test {
+ // crash
+ def foo(a: Any) = { import a.{toString => toS}; toS }
+
+ // okay
+ def ok1(a: String) = { import a.{isInstanceOf => iio}; iio[String] }
+ def ok2(a: Int) = { import a.{toInt => ti}; ti }
+}
diff --git a/test/files/pos/t7234.scala b/test/files/pos/t7234.scala
new file mode 100644
index 0000000000..59a233d835
--- /dev/null
+++ b/test/files/pos/t7234.scala
@@ -0,0 +1,15 @@
+trait Main {
+ trait A {
+ type B
+ }
+ trait C {
+ def c(a: A, x: Int = 0)(b: a.B)
+ }
+ def c: C
+ def d(a: A, x: Int = 0)(b: a.B)
+
+ def ok1(a: A)(b: a.B) = c.c(a, 42)(b)
+ def ok2(a: A)(b: a.B) = d(a)(b)
+
+ def fail(a: A)(b: a.B) = c.c(a)(b)
+}
diff --git a/test/files/pos/t7234b.scala b/test/files/pos/t7234b.scala
new file mode 100644
index 0000000000..fee98e87a8
--- /dev/null
+++ b/test/files/pos/t7234b.scala
@@ -0,0 +1,20 @@
+trait Main {
+ trait A {
+ type B
+ def b: B
+ }
+ trait C {
+ def c(a: A, x: Int = 0)(b: => a.B, bs: a.B*)
+ def d(a: A = null, x: Int = 0)(b1: => a.B = a.b, b2: a.B = a.b)
+ }
+ def c: C
+ def ok(a: A)(b: a.B) = c.c(a, 42)(b)
+ def fail(a: A)(b: a.B) = c.c(a)(b)
+ def fail2(a: A)(b: a.B) = c.c(a)(b, b)
+ def fail3(a: A)(b: a.B) = c.c(a)(b, Seq[a.B](b): _*)
+
+ def fail4(a: A)(b: a.B) = c.d(a)()
+ def fail5(a: A)(b: a.B) = c.d(a)(b1 = a.b)
+ def fail6(a: A)(b: a.B) = c.d(a)(b2 = a.b)
+ def fail7(a: A)(b: a.B) = c.d()()
+}
diff --git a/test/files/pos/t7239.scala b/test/files/pos/t7239.scala
new file mode 100644
index 0000000000..16e9d00f17
--- /dev/null
+++ b/test/files/pos/t7239.scala
@@ -0,0 +1,38 @@
+object Test {
+ def BrokenMethod(): HasFilter[(Int, String)] = ???
+
+ trait HasFilter[B] {
+ def filter(p: B => Boolean) = ???
+ }
+
+ trait HasWithFilter {
+ def withFilter = ???
+ }
+
+ object addWithFilter {
+ trait NoImplicit
+ implicit def enrich(v: Any)
+ (implicit F0: NoImplicit): HasWithFilter = ???
+ }
+
+ BrokenMethod().withFilter(_ => true) // okay
+ BrokenMethod().filter(_ => true) // okay
+
+ locally {
+ import addWithFilter._
+ BrokenMethod().withFilter((_: (Int, String)) => true) // okay
+ }
+
+ locally {
+ import addWithFilter._
+ // adaptToMemberWithArgs sets the type of the tree `x`
+ // to ErrorType (while in silent mode, so the error is not
+ // reported. Later, when the fallback from `withFilter`
+ // to `filter` is attempted, the closure is taken to have
+ // have the type `<error> => Boolean`, which conforms to
+ // `(B => Boolean)`. Only later during pickling does the
+ // defensive check for erroneous types in the tree pick up
+ // the problem.
+ BrokenMethod().withFilter(x => true) // erroneous or inaccessible type.
+ }
+}
diff --git a/test/files/pos/t7285a.flags b/test/files/pos/t7285a.flags
new file mode 100644
index 0000000000..e8fb65d50c
--- /dev/null
+++ b/test/files/pos/t7285a.flags
@@ -0,0 +1 @@
+-Xfatal-warnings \ No newline at end of file
diff --git a/test/files/pos/t7285a.scala b/test/files/pos/t7285a.scala
new file mode 100644
index 0000000000..34e79c741b
--- /dev/null
+++ b/test/files/pos/t7285a.scala
@@ -0,0 +1,83 @@
+sealed abstract class Base
+
+object Test {
+ case object Up extends Base
+
+ def foo(d1: Base) =
+ d1 match {
+ case Up =>
+ }
+
+ // Sealed subtype: ModuleTypeRef <empty>.this.Test.Up.type
+ // Pattern: UniqueThisType Test.this.type
+}
+
+
+object Test1 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Up) => true
+ case (Up, Down) => false
+ }
+ }
+}
+
+object Test2 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ (d1: Base, d2: Base) =>
+ (d1) match {
+ case Up | Down => false
+ }
+ }
+}
+
+object Test3 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base
+
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Down, Down) => false
+ }
+ }
+}
+
+object Test4 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ }
+ import Test4.Base._
+ (d1: Base, d2: Base) =>
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Test4.Base.Up) => true
+ case (Up, Down) => false
+ }
+}
diff --git a/test/files/pos/t7329.scala b/test/files/pos/t7329.scala
new file mode 100644
index 0000000000..76bf1fb9f5
--- /dev/null
+++ b/test/files/pos/t7329.scala
@@ -0,0 +1 @@
+class TwoParamSpecializedWithDefault[@specialized A, @specialized B](a: A, b: B = (??? : B)) \ No newline at end of file
diff --git a/test/files/pos/t7369.flags b/test/files/pos/t7369.flags
new file mode 100644
index 0000000000..85d8eb2ba2
--- /dev/null
+++ b/test/files/pos/t7369.flags
@@ -0,0 +1 @@
+-Xfatal-warnings
diff --git a/test/files/pos/t7369.scala b/test/files/pos/t7369.scala
new file mode 100644
index 0000000000..2f31c93d29
--- /dev/null
+++ b/test/files/pos/t7369.scala
@@ -0,0 +1,37 @@
+object Test {
+ val X, Y = true
+ (null: Tuple1[Boolean]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) => // unreachable
+ case _ =>
+ }
+}
+
+
+sealed abstract class B;
+case object True extends B;
+case object False extends B;
+
+object Test2 {
+
+ val X: B = True
+ val Y: B = False
+
+ (null: Tuple1[B]) match {
+ case Tuple1(X) =>
+ case Tuple1(Y) => // no warning
+ case _ =>
+ }
+}
+
+object Test3 {
+ val X, O = true
+ def classify(neighbourhood: (Boolean, Boolean, Boolean)): String = {
+ neighbourhood match {
+ case (X, X, X) => "middle"
+ case (X, X, O) => "right"
+ case (O, X, X) => "left"
+ case _ => throw new IllegalArgumentException("Invalid")
+ }
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t7377/Client_2.scala b/test/files/pos/t7377/Client_2.scala
new file mode 100644
index 0000000000..5728956cca
--- /dev/null
+++ b/test/files/pos/t7377/Client_2.scala
@@ -0,0 +1,11 @@
+object Test {
+ M.noop(List(1) match { case Nil => 0; case (x::xs) => x })
+
+ case class Foo(a: Int)
+ val FooAlias: Foo.type = Foo
+ M.noop(Foo(0) match { case FooAlias(_) => 0 })
+
+ case class Bar()
+ val BarAlias: Bar.type = Bar
+ M.noop(Bar() match { case BarAlias() => 0 })
+}
diff --git a/test/files/pos/t7377/Macro_1.scala b/test/files/pos/t7377/Macro_1.scala
new file mode 100644
index 0000000000..a0ec1d84af
--- /dev/null
+++ b/test/files/pos/t7377/Macro_1.scala
@@ -0,0 +1,7 @@
+import language.experimental._
+import reflect.macros.Context
+
+object M {
+ def noopImpl[A](c: Context)(expr: c.Expr[A]): c.Expr[A] = c.Expr(c.typeCheck(c.resetLocalAttrs(expr.tree)))
+ def noop[A](expr: A): A = macro noopImpl[A]
+}
diff --git a/test/files/pos/t7377b.flags b/test/files/pos/t7377b.flags
new file mode 100644
index 0000000000..cb8324a345
--- /dev/null
+++ b/test/files/pos/t7377b.flags
@@ -0,0 +1 @@
+-Xoldpatmat \ No newline at end of file
diff --git a/test/files/pos/t7377b.scala b/test/files/pos/t7377b.scala
new file mode 100644
index 0000000000..aeee800d57
--- /dev/null
+++ b/test/files/pos/t7377b.scala
@@ -0,0 +1,13 @@
+object Test {
+ List(1) match { case Nil => 0; case (x::xs) => x }
+
+ case class Foo(a: Int)
+ val FooAlias: Foo.type = Foo
+ Foo(0) match { case FooAlias(_) => 0 }
+ Foo(0) match { case Foo(_) => 0 }
+
+ case class Bar()
+ val BarAlias: Bar.type = Bar
+ Bar() match { case BarAlias() => 0 }
+ Bar() match { case Bar() => 0 }
+}
diff --git a/test/files/pos/t7426.scala b/test/files/pos/t7426.scala
new file mode 100644
index 0000000000..8e42ad1812
--- /dev/null
+++ b/test/files/pos/t7426.scala
@@ -0,0 +1,3 @@
+class foo(x: Any) extends annotation.StaticAnnotation
+
+@foo(new AnyRef { }) trait A
diff --git a/test/files/pos/xlint1.flags b/test/files/pos/xlint1.flags
new file mode 100644
index 0000000000..7949c2afa2
--- /dev/null
+++ b/test/files/pos/xlint1.flags
@@ -0,0 +1 @@
+-Xlint -Xfatal-warnings
diff --git a/test/files/pos/xlint1.scala b/test/files/pos/xlint1.scala
new file mode 100644
index 0000000000..27936d8b14
--- /dev/null
+++ b/test/files/pos/xlint1.scala
@@ -0,0 +1,13 @@
+package object foo {
+ implicit class Bar[T](val x: T) extends AnyVal {
+ def bippy = 1
+ }
+}
+
+package foo {
+ object Baz {
+ def main(args: Array[String]): Unit = {
+ "abc".bippy
+ }
+ }
+}
diff --git a/test/files/presentation/doc.check b/test/files/presentation/doc.check
index e33756773d..5a3ff13151 100644
--- a/test/files/presentation/doc.check
+++ b/test/files/presentation/doc.check
@@ -1,49 +1 @@
-reload: Test.scala
-body:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This is a test comment), Text(.)))), Text(
-))))))
-@example:Body(List(Paragraph(Chain(List(Summary(Monospace(Text("abb".permutations = Iterator(abb, bab, bba)))))))))
-@version:
-@since:
-@todo:
-@note:
-@see:
-body:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This is a test comment), Text(.)))), Text(
-))))))
-@example:Body(List(Paragraph(Chain(List(Summary(Monospace(Text("abb".permutations = Iterator(abb, bab, bba)))))))))
-@version:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(1), Text(.)))), Text(0, 09/07/2012))))))
-@since:
-@todo:
-@note:
-@see:
-body:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This is a test comment), Text(.)))), Text(
-))))))
-@example:Body(List(Paragraph(Chain(List(Summary(Monospace(Text("abb".permutations = Iterator(abb, bab, bba)))))))))
-@version:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(1), Text(.)))), Text(0, 09/07/2012))))))
-@since:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(2), Text(.)))), Text(10))))))
-@todo:
-@note:
-@see:
-body:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This is a test comment), Text(.)))), Text(
-))))))
-@example:Body(List(Paragraph(Chain(List(Summary(Monospace(Text("abb".permutations = Iterator(abb, bab, bba)))))))))
-@version:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(1), Text(.)))), Text(0, 09/07/2012))))))
-@since:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(2), Text(.)))), Text(10))))))
-@todo:Body(List(Paragraph(Chain(List(Summary(Text(this method is unsafe)))))))
-@note:
-@see:
-body:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This is a test comment), Text(.)))), Text(
-))))))
-@example:Body(List(Paragraph(Chain(List(Summary(Monospace(Text("abb".permutations = Iterator(abb, bab, bba)))))))))
-@version:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(1), Text(.)))), Text(0, 09/07/2012))))))
-@since:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(2), Text(.)))), Text(10))))))
-@todo:Body(List(Paragraph(Chain(List(Summary(Text(this method is unsafe)))))))
-@note:Body(List(Paragraph(Chain(List(Summary(Text(Don't inherit!)))))))
-@see:
-body:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(This is a test comment), Text(.)))), Text(
-))))))
-@example:Body(List(Paragraph(Chain(List(Summary(Monospace(Text("abb".permutations = Iterator(abb, bab, bba)))))))))
-@version:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(1), Text(.)))), Text(0, 09/07/2012))))))
-@since:Body(List(Paragraph(Chain(List(Summary(Chain(List(Text(2), Text(.)))), Text(10))))))
-@todo:Body(List(Paragraph(Chain(List(Summary(Text(this method is unsafe)))))))
-@note:Body(List(Paragraph(Chain(List(Summary(Text(Don't inherit!)))))))
-@see:Body(List(Paragraph(Chain(List(Summary(Text(some other method)))))))
+reload: Base.scala, Class.scala, Derived.scala
diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala
index 475d92b861..371b825026 100755
--- a/test/files/presentation/doc/doc.scala
+++ b/test/files/presentation/doc/doc.scala
@@ -4,45 +4,56 @@ import scala.tools.nsc.doc.base.comment._
import scala.tools.nsc.interactive._
import scala.tools.nsc.interactive.tests._
import scala.tools.nsc.util._
-import scala.tools.nsc.io._
object Test extends InteractiveTest {
- override val settings: doc.Settings = docSettings
-
val tags = Seq(
"@example `\"abb\".permutations = Iterator(abb, bab, bba)`",
"@version 1.0, 09/07/2012",
"@since 2.10",
- "@todo this method is unsafe",
+ "@todo this is unsafe!",
"@note Don't inherit!",
- "@see some other method"
+ "@see something else"
)
- val comment = "This is a test comment."
- val caret = "<caret>"
+ val names = Seq("Class", "Def", "Val", "Var", "AbstracType", "TypeAlias", "Trait", "InnerClass")
+ val bareText =
+ """abstract class %s {
+ | def %s = ""
+ | val %s = ""
+ | var %s: String = _
+ | type %s
+ | type %s = String
+ | class %s
+ |}
+ |trait %s""".stripMargin.format(names: _*)
+
+ def docComment(nTags: Int) = "/**\n%s*/".format(tags.take(nTags).mkString("\n"))
+
+ def text(name: String, nTags: Int) = {
+ val nameIndex = bareText.indexOf(name)
+ val (pre, post) = bareText.splitAt(nameIndex)
+ val crIndex = pre.lastIndexOf("\n")
+ val (prepre, prepost) = pre.splitAt(crIndex)
+ prepre + docComment(nTags) + prepost + post
+ }
+
- def text(nTags: Int) =
- """|/** %s
- |
- | * %s */
- |trait Commented {}
- |class User(c: %sCommented)""".stripMargin.format(comment, tags take nTags mkString "\n", caret)
override lazy val compiler = {
- new {
- override val settings = {
- prepareSettings(Test.this.settings)
- Test.this.settings
- }
- } with Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase {
+ prepareSettings(settings)
+ new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase {
val global: this.type = this
def chooseLink(links: List[LinkTo]): LinkTo = links.head
def internalLink(sym: Symbol, site: Symbol) = None
def toString(link: LinkTo) = link.toString
+ def warnNoLink = false
+ def findExternalLink(sym: Symbol, name: String) = None
+
+ override def forScaladoc = true
- def getComment(sym: Symbol, source: SourceFile) = {
+ def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = {
val docResponse = new Response[(String, String, Position)]
- askDocComment(sym, sym.owner, source, docResponse)
+ askDocComment(sym, source, sym.owner, fragments, docResponse)
docResponse.get.left.toOption flatMap {
case (expanded, raw, pos) =>
if (expanded.isEmpty)
@@ -55,37 +66,74 @@ object Test extends InteractiveTest {
}
override def runDefaultTests() {
- for (i <- 1 to tags.length) {
- val markedText = text(i)
- val idx = markedText.indexOf(caret)
- val fileText = markedText.substring(0, idx) + markedText.substring(idx + caret.length)
- val source = sourceFiles(0)
- val batch = new BatchSourceFile(source.file, fileText.toCharArray)
+ import compiler._
+ def findSource(name: String) = sourceFiles.find(_.file.name == name).get
+
+ val className = names.head
+ for (name <- names;
+ i <- 1 to tags.length) {
+ val newText = text(name, i)
+ val source = findSource("Class.scala")
+ val batch = new BatchSourceFile(source.file, newText.toCharArray)
val reloadResponse = new Response[Unit]
compiler.askReload(List(batch), reloadResponse)
reloadResponse.get.left.toOption match {
case None =>
- reporter.println("Couldn't reload")
+ println("Couldn't reload")
case Some(_) =>
- val treeResponse = new compiler.Response[compiler.Tree]
- val pos = compiler.rangePos(batch, idx, idx, idx)
- compiler.askTypeAt(pos, treeResponse)
- treeResponse.get.left.toOption match {
- case Some(tree) =>
- val sym = tree.tpe.typeSymbol
- compiler.getComment(sym, batch) match {
- case None => println("Got no doc comment")
+ val parseResponse = new Response[Tree]
+ askParsedEntered(batch, true, parseResponse)
+ parseResponse.get.left.toOption match {
+ case None =>
+ println("Couldn't parse")
+ case Some(_) =>
+ val sym = compiler.ask { () =>
+ val toplevel = definitions.EmptyPackage.info.decl(newTypeName(name))
+ if (toplevel eq NoSymbol) {
+ val clazz = definitions.EmptyPackage.info.decl(newTypeName(className))
+
+ val term = clazz.info.decl(newTermName(name))
+ if (term eq NoSymbol) clazz.info.decl(newTypeName(name)) else
+ if (term.isAccessor) term.accessed else term
+ } else toplevel
+ }
+
+ getComment(sym, batch, (sym,batch)::Nil) match {
+ case None => println(s"Got no doc comment for $name")
case Some(comment) =>
import comment._
- val tags: List[(String, Iterable[Body])] =
- List(("@example", example), ("@version", version), ("@since", since.toList), ("@todo", todo), ("@note", note), ("@see", see))
- val str = ("body:" + body + "\n") +
- tags.map{ case (name, bodies) => name + ":" + bodies.mkString("\n") }.mkString("\n")
- println(str)
+ def cnt(bodies: Iterable[Body]) = bodies.size
+ val actual = cnt(example) + cnt(version) + cnt(since) + cnt(todo) + cnt(note) + cnt(see)
+ if (actual != i)
+ println(s"Got docComment with $actual tags instead of $i, file text:\n$newText")
}
- case None => println("Couldn't find a typedTree")
}
}
}
+
+ // Check inter-classes documentation one-time retrieved ok.
+ val baseSource = findSource("Base.scala")
+ val derivedSource = findSource("Derived.scala")
+ def existsText(where: Any, text: String): Boolean = where match {
+ case `text` => true
+ case s: Seq[_] => s exists (existsText(_, text))
+ case p: Product => p.productIterator exists (existsText(_, text))
+ }
+ val (derived, base) = compiler.ask { () =>
+ val derived = definitions.RootPackage.info.decl(newTermName("p")).info.decl(newTypeName("Derived"))
+ (derived, derived.ancestors(0))
+ }
+ val cmt1 = getComment(derived, derivedSource, (base, baseSource)::(derived, derivedSource)::Nil)
+ if (!existsText(cmt1, "Derived comment."))
+ println("Unexpected Derived class comment:"+cmt1)
+
+ val (fooDerived, fooBase) = compiler.ask { () =>
+ val decl = derived.tpe.decl(newTermName("foo"))
+ (decl, decl.allOverriddenSymbols(0))
+ }
+
+ val cmt2 = getComment(fooDerived, derivedSource, (fooBase, baseSource)::(fooDerived, derivedSource)::Nil)
+ if (!existsText(cmt2, "Base method has documentation."))
+ println("Unexpected foo method comment:"+cmt2)
}
}
diff --git a/test/files/presentation/doc/src/Class.scala b/test/files/presentation/doc/src/Class.scala
new file mode 100755
index 0000000000..a974bd6f5c
--- /dev/null
+++ b/test/files/presentation/doc/src/Class.scala
@@ -0,0 +1 @@
+object Class \ No newline at end of file
diff --git a/test/files/presentation/doc/src/Test.scala b/test/files/presentation/doc/src/Test.scala
deleted file mode 100755
index fcc1554994..0000000000
--- a/test/files/presentation/doc/src/Test.scala
+++ /dev/null
@@ -1 +0,0 @@
-object Test \ No newline at end of file
diff --git a/test/files/presentation/doc/src/p/Base.scala b/test/files/presentation/doc/src/p/Base.scala
new file mode 100755
index 0000000000..9031de3e3e
--- /dev/null
+++ b/test/files/presentation/doc/src/p/Base.scala
@@ -0,0 +1,11 @@
+package p
+
+/**
+ * @define BaseComment $BaseVar comment.
+ */
+trait Base {
+ /**
+ * Base method has documentation.
+ */
+ def foo: String
+}
diff --git a/test/files/presentation/doc/src/p/Derived.scala b/test/files/presentation/doc/src/p/Derived.scala
new file mode 100755
index 0000000000..1a9c9a26d1
--- /dev/null
+++ b/test/files/presentation/doc/src/p/Derived.scala
@@ -0,0 +1,9 @@
+package p
+
+/**
+ * $BaseComment
+ * @define BaseVar Derived
+ */
+class Derived extends Base {
+ def foo = ""
+}
diff --git a/test/files/presentation/hyperlinks.check b/test/files/presentation/hyperlinks.check
index 85d295dd7d..1051b67e85 100644
--- a/test/files/presentation/hyperlinks.check
+++ b/test/files/presentation/hyperlinks.check
@@ -1,4 +1,4 @@
-reload: NameDefaultTests.scala, PatMatTests.scala
+reload: NameDefaultTests.scala, PatMatTests.scala, SuperTypes.scala
askHyperlinkPos for `someOtherInt` at (14,24) NameDefaultTests.scala
================================================================================
@@ -44,3 +44,138 @@ askHyperlinkPos for `y` at (25,21) PatMatTests.scala
================================================================================
[response] found askHyperlinkPos for `y` at (23,13) PatMatTests.scala
================================================================================
+
+askHyperlinkPos for `BadPos` at (10,26) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `BadPos` at (2,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `BadPos` at (11,26) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `BadPos` at (2,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `Trait` at (12,25) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `Trait` at (6,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `SubTrait` at (13,28) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `SubTrait` at (7,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `Trait` at (14,25) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `Trait` at (6,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `LateralTrait` at (14,48) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `LateralTrait` at (8,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `Base` at (15,24) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `Base` at (4,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `Trait` at (15,40) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `Trait` at (6,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `LateralTrait` at (15,63) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `LateralTrait` at (8,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PBase` at (19,29) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PBase` at (17,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (20,33) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PBase` at (21,36) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PBase` at (17,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (23,27) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PSubTrait` at (24,30) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PSubTrait` at (20,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (25,27) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PLateralTrait` at (25,56) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PLateralTrait` at (21,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PBase` at (26,26) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PBase` at (17,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (26,48) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PLateralTrait` at (26,77) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PLateralTrait` at (21,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `BadPos` at (28,23) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `BadPos` at (2,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (29,23) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PSubTrait` at (30,26) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PSubTrait` at (20,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (31,23) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PLateralTrait` at (31,52) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PLateralTrait` at (21,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PBase` at (32,22) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PBase` at (17,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PTrait` at (32,44) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PTrait` at (19,7) SuperTypes.scala
+================================================================================
+
+askHyperlinkPos for `PLateralTrait` at (32,73) SuperTypes.scala
+================================================================================
+[response] found askHyperlinkPos for `PLateralTrait` at (21,7) SuperTypes.scala
+================================================================================
diff --git a/test/files/presentation/hyperlinks/src/SuperTypes.scala b/test/files/presentation/hyperlinks/src/SuperTypes.scala
new file mode 100644
index 0000000000..15d16069fd
--- /dev/null
+++ b/test/files/presentation/hyperlinks/src/SuperTypes.scala
@@ -0,0 +1,32 @@
+/** This tests that hyperlinking works for super types. See SI-7224 */
+class BadPos[A](a: A)
+
+class Base
+
+trait Trait extends Base
+trait SubTrait extends Trait
+trait LateralTrait extends Base
+
+object obj1 extends BadPos/*#*/(new Object)
+object obj2 extends BadPos/*#*/[AnyRef](new Object)
+object obj3 extends Trait/*#*/
+object obj4 extends SubTrait/*#*/
+object obj5 extends Trait/*#*/ with LateralTrait/*#*/
+object obj6 extends Base/*#*/ with Trait/*#*/ with LateralTrait/*#*/
+
+class PBase[A]
+
+trait PTrait[A] extends PBase/*#*/[A]
+trait PSubTrait[A] extends PTrait/*#*/[A]
+trait PLateralTrait[A] extends PBase/*#*/[A]
+
+object pobj2 extends PTrait/*#*/[Int]
+object pobj3 extends PSubTrait/*#*/[Int]
+object pobj4 extends PTrait/*#*/[Int] with PLateralTrait/*#*/[Int]
+object pobj5 extends PBase/*#*/[Int] with PTrait/*#*/[Int] with PLateralTrait/*#*/[Int]
+
+class c1 extends BadPos/*#*/(new Object)
+class c2 extends PTrait/*#*/[Int]
+class c3 extends PSubTrait/*#*/[Int]
+class c4 extends PTrait/*#*/[Int] with PLateralTrait/*#*/[Int]
+class c5 extends PBase/*#*/[Int] with PTrait/*#*/[Int] with PLateralTrait/*#*/[Int]
diff --git a/test/files/presentation/ide-t1000567.check b/test/files/presentation/ide-t1000567.check
new file mode 100644
index 0000000000..6d62cb3e13
--- /dev/null
+++ b/test/files/presentation/ide-t1000567.check
@@ -0,0 +1 @@
+reload: a.scala, b.scala
diff --git a/test/files/presentation/ide-t1000567/Runner.scala b/test/files/presentation/ide-t1000567/Runner.scala
new file mode 100644
index 0000000000..77a618b60a
--- /dev/null
+++ b/test/files/presentation/ide-t1000567/Runner.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.interactive.tests.InteractiveTest
+
+// also known as SI-5013
+
+object Test extends InteractiveTest {
+
+ override def runDefaultTests(): Unit = {
+ val a = sourceFiles.find(_.file.name == "a.scala").head
+ val b = sourceFiles.find(_.file.name == "b.scala").head
+ askLoadedTyped(a).get
+ askLoadedTyped(b).get
+ super.runDefaultTests()
+ }
+
+}
diff --git a/test/files/presentation/ide-t1000567/src/a/a.scala b/test/files/presentation/ide-t1000567/src/a/a.scala
new file mode 100644
index 0000000000..ee21112a3e
--- /dev/null
+++ b/test/files/presentation/ide-t1000567/src/a/a.scala
@@ -0,0 +1,5 @@
+package a
+
+class Foo {
+ protected[Foo] var x = 0
+}
diff --git a/test/files/presentation/ide-t1000567/src/b/b.scala b/test/files/presentation/ide-t1000567/src/b/b.scala
new file mode 100644
index 0000000000..a0185b15cf
--- /dev/null
+++ b/test/files/presentation/ide-t1000567/src/b/b.scala
@@ -0,0 +1,5 @@
+package b
+
+class Bar extends a.Foo {
+ println(x)
+}
diff --git a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
index 159097cc10..950569c880 100644
--- a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
+++ b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala
@@ -25,7 +25,7 @@ import scala.tools.nsc.doc
object Test extends InteractiveTest {
final val mega = 1024 * 1024
- override val settings: doc.Settings = docSettings
+ override val withDocComments = true
override def execute(): Unit = memoryConsumptionTest()
diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala
new file mode 100644
index 0000000000..9b1e612f4f
--- /dev/null
+++ b/test/files/run/classfile-format-51.scala
@@ -0,0 +1,126 @@
+import java.io.{File, FileOutputStream}
+
+import scala.tools.nsc.settings.ScalaVersion
+import scala.tools.partest._
+import scala.tools.asm
+import asm.{AnnotationVisitor, ClassWriter, FieldVisitor, Handle, MethodVisitor, Opcodes}
+import Opcodes._
+
+// This test ensures that we can read JDK 7 (classfile format 51) files, including those
+// with invokeDynamic instructions and associated constant pool entries
+// to do that it first uses ASM to generate a class called DynamicInvoker. Then
+// it runs a normal compile on the source in the 'code' field that refers to
+// DynamicInvoker. Any failure will be dumped to std out.
+//
+// By it's nature the test can only work on JDK 7+ because under JDK 6 some of the
+// classes referred to by DynamicInvoker won't be available and DynamicInvoker won't
+// verify. So the test includes a version check that short-circuites the whole test
+// on JDK 6
+object Test extends DirectTest {
+ override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path
+
+ def generateClass() {
+ val invokerClassName = "DynamicInvoker"
+ val bootstrapMethodName = "bootstrap"
+ val bootStrapMethodType = "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;"
+ val targetMethodName = "target"
+ val targetMethodType = "()Ljava/lang/String;"
+
+ val cw = new ClassWriter(0)
+ cw.visit(V1_7, ACC_PUBLIC + ACC_SUPER, invokerClassName, null, "java/lang/Object", null)
+
+ val constructor = cw.visitMethod(ACC_PUBLIC, "<init>", "()V", null, null)
+ constructor.visitCode()
+ constructor.visitVarInsn(ALOAD, 0)
+ constructor.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V")
+ constructor.visitInsn(RETURN)
+ constructor.visitMaxs(1, 1)
+ constructor.visitEnd()
+
+ val target = cw.visitMethod(ACC_PUBLIC + ACC_STATIC, targetMethodName, targetMethodType, null, null)
+ target.visitCode()
+ target.visitLdcInsn("hello")
+ target.visitInsn(ARETURN)
+ target.visitMaxs(1, 1)
+ target.visitEnd()
+
+ val bootstrap = cw.visitMethod(ACC_PUBLIC + ACC_STATIC, bootstrapMethodName, bootStrapMethodType, null, null)
+ bootstrap.visitCode()
+// val lookup = MethodHandles.lookup();
+ bootstrap.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodHandles", "lookup", "()Ljava/lang/invoke/MethodHandles$Lookup;")
+ bootstrap.visitVarInsn(ASTORE, 3) // lookup
+
+// val clazz = lookup.lookupClass();
+ bootstrap.visitVarInsn(ALOAD, 3) // lookup
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "lookupClass", "()Ljava/lang/Class;")
+ bootstrap.visitVarInsn(ASTORE, 4) // clazz
+
+// val methodType = MethodType.fromMethodDescriptorString("()Ljava/lang/String, clazz.getClassLoader()")
+ bootstrap.visitLdcInsn("()Ljava/lang/String;")
+ bootstrap.visitVarInsn(ALOAD, 4) // CLAZZ
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/Class", "getClassLoader", "()Ljava/lang/ClassLoader;")
+ bootstrap.visitMethodInsn(INVOKESTATIC, "java/lang/invoke/MethodType", "fromMethodDescriptorString", "(Ljava/lang/String;Ljava/lang/ClassLoader;)Ljava/lang/invoke/MethodType;")
+ bootstrap.visitVarInsn(ASTORE, 5) // methodType
+
+// val methodHandle = lookup.findStatic(thisClass, "target", methodType)
+ bootstrap.visitVarInsn(ALOAD, 3) // lookup
+ bootstrap.visitVarInsn(ALOAD, 4) // clazz
+ bootstrap.visitLdcInsn("target")
+ bootstrap.visitVarInsn(ALOAD, 5) // methodType
+ bootstrap.visitMethodInsn(INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "findStatic", "(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;")
+ bootstrap.visitVarInsn(ASTORE, 6) // methodHandle
+
+// new ConstantCallSite(methodHandle)
+ bootstrap.visitTypeInsn(NEW, "java/lang/invoke/ConstantCallSite")
+ bootstrap.visitInsn(DUP)
+ bootstrap.visitVarInsn(ALOAD, 6) // methodHandle
+ bootstrap.visitMethodInsn(INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V")
+ bootstrap.visitInsn(ARETURN)
+ bootstrap.visitMaxs(4,7)
+ bootstrap.visitEnd()
+
+ val test = cw.visitMethod(ACC_PUBLIC + ACC_FINAL, "test", s"()Ljava/lang/String;", null, null)
+ test.visitCode()
+ val bootstrapHandle = new Handle(H_INVOKESTATIC, invokerClassName, bootstrapMethodName, bootStrapMethodType)
+ test.visitInvokeDynamicInsn("invoke", targetMethodType, bootstrapHandle)
+ test.visitInsn(ARETURN)
+ test.visitMaxs(1, 1)
+ test.visitEnd()
+
+ cw.visitEnd()
+ val bytes = cw.toByteArray()
+
+ val fos = new FileOutputStream(new File(s"${testOutput.path}/$invokerClassName.class"))
+ try
+ fos write bytes
+ finally
+ fos.close()
+
+ }
+
+ def code =
+"""
+object Driver {
+ val invoker = new DynamicInvoker()
+ println(invoker.test())
+}
+"""
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ try {
+ // this test is only valid under JDK 1.7+
+ // cheat a little by using 'ScalaVersion' because it can parse java versions just as well
+ val requiredJavaVersion = ScalaVersion("1.7")
+ val executingJavaVersion = ScalaVersion(System.getProperty("java.specification.version"))
+ if (executingJavaVersion >= requiredJavaVersion) {
+ generateClass()
+ compile()
+ }
+ }
+ finally
+ System.setErr(prevErr)
+ }
+}
diff --git a/test/files/run/inline-ex-handlers.check b/test/files/run/inline-ex-handlers.check
index f2f0b60687..50a9d87557 100644
--- a/test/files/run/inline-ex-handlers.check
+++ b/test/files/run/inline-ex-handlers.check
@@ -1,425 +1,490 @@
-172c172
-< locals: value x$1, value x1
----
-> locals: value x$1, value x1, variable boxed1
-174c174
-< blocks: [1,2,3,4]
----
-> blocks: [1,3,4]
-186a187,188
-> 92 STORE_LOCAL(variable boxed1)
-> 92 LOAD_LOCAL(variable boxed1)
-195,197d196
-< 92 JUMP 2
-<
-< 2:
-247c246
-< blocks: [1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18]
----
-> blocks: [1,2,3,4,5,6,8,10,11,12,13,14,15,16,17,18]
-258,260d256
-< 92 JUMP 7
-<
-< 7:
-391c387
-< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
----
-> locals: value args, variable result, value ex6, value x4, value x5, value x
-393c389
-< blocks: [1,2,3,4,5,8,10,11,13]
----
-> blocks: [1,2,3,5,8,10,11,13,14]
-417c413,422
-< 103 THROW(MyException)
----
-> ? STORE_LOCAL(value ex6)
-> ? JUMP 14
->
-> 14:
-> 101 LOAD_LOCAL(value ex6)
-> 101 STORE_LOCAL(value x4)
-> 101 SCOPE_ENTER value x4
-> 106 LOAD_LOCAL(value x4)
-> 106 IS_INSTANCE REF(class MyException)
-> 106 CZJUMP (BOOL)NE ? 5 : 8
-430,432d434
-< 101 JUMP 4
-<
-< 4:
-442,445d443
-< 106 LOAD_LOCAL(value x5)
-< 106 CALL_METHOD MyException.message (dynamic)
-< 106 STORE_LOCAL(value message)
-< 106 SCOPE_ENTER value message
-447c445,446
-< 106 LOAD_LOCAL(value message)
----
-> ? LOAD_LOCAL(value x5)
-> 106 CALL_METHOD MyException.message (dynamic)
-519c518
-< blocks: [1,2,3,4,6,7,8,9,10]
----
-> blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
-548c547
-< 306 THROW(MyException)
----
-> ? JUMP 11
-549a549,553
-> 11:
-> ? LOAD_LOCAL(variable monitor4)
-> 305 MONITOR_EXIT
-> ? JUMP 12
->
-554c558
-< ? THROW(Throwable)
----
-> ? JUMP 12
-560c564,571
-< ? THROW(Throwable)
----
-> ? STORE_LOCAL(value t)
-> ? JUMP 13
->
-> 12:
-> ? LOAD_LOCAL(variable monitor3)
-> 304 MONITOR_EXIT
-> ? STORE_LOCAL(value t)
-> ? JUMP 13
-575a587,598
-> 13:
-> 310 LOAD_MODULE object Predef
-> 310 CALL_PRIMITIVE(StartConcat)
-> 310 CONSTANT("Caught crash: ")
-> 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
-> 310 LOAD_LOCAL(value t)
-> 310 CALL_METHOD java.lang.Throwable.toString (dynamic)
-> 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
-> 310 CALL_PRIMITIVE(EndConcat)
-> 310 CALL_METHOD scala.Predef.println (dynamic)
-> 310 JUMP 2
->
-584c607
-< catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
----
-> catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
-587c610
-< catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
----
-> catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
-619c642
-< blocks: [1,2,3,4,5,6,7,9,10]
----
-> blocks: [1,2,3,4,5,6,7,9,10,11,12]
-643c666,667
-< 78 THROW(IllegalArgumentException)
----
-> ? STORE_LOCAL(value e)
-> ? JUMP 11
-644a669,673
-> 11:
-> 81 LOAD_LOCAL(value e)
-> ? STORE_LOCAL(variable exc1)
-> ? JUMP 12
->
-672c701,702
-< 81 THROW(Exception)
----
-> ? STORE_LOCAL(variable exc1)
-> ? JUMP 12
-688a719,731
-> 12:
-> 83 LOAD_MODULE object Predef
-> 83 CONSTANT("finally")
-> 83 CALL_METHOD scala.Predef.println (dynamic)
-> 84 LOAD_LOCAL(variable result)
-> 84 CONSTANT(1)
-> 84 CALL_PRIMITIVE(Arithmetic(SUB,INT))
-> 84 CONSTANT(2)
-> 84 CALL_PRIMITIVE(Arithmetic(DIV,INT))
-> 84 STORE_LOCAL(variable result)
-> 84 LOAD_LOCAL(variable exc1)
-> 84 THROW(Throwable)
->
-694c737
-< catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
----
-> catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
-718c761
-< locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
----
-> locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
-720c763
-< blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25]
----
-> blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28]
-744c787,794
-< 172 THROW(MyException)
----
-> ? STORE_LOCAL(value ex6)
-> ? JUMP 26
->
-> 26:
-> 170 LOAD_LOCAL(value ex6)
-> 170 STORE_LOCAL(value x4)
-> 170 SCOPE_ENTER value x4
-> 170 JUMP 15
-787,790d836
-< 175 LOAD_LOCAL(value x5)
-< 175 CALL_METHOD MyException.message (dynamic)
-< 175 STORE_LOCAL(value message)
-< 175 SCOPE_ENTER value message
-792c838,839
-< 176 LOAD_LOCAL(value message)
----
-> ? LOAD_LOCAL(value x5)
-> 176 CALL_METHOD MyException.message (dynamic)
-796c843,844
-< 177 LOAD_LOCAL(value message)
----
-> ? LOAD_LOCAL(value x5)
-> 177 CALL_METHOD MyException.message (dynamic)
-798c846,847
-< 177 THROW(MyException)
----
-> ? STORE_LOCAL(value ex6)
-> ? JUMP 27
-802c851,852
-< 170 THROW(Throwable)
----
-> ? STORE_LOCAL(value ex6)
-> ? JUMP 27
-811a862,867
-> 27:
-> 169 LOAD_LOCAL(value ex6)
-> 169 STORE_LOCAL(value x4)
-> 169 SCOPE_ENTER value x4
-> 169 JUMP 5
->
-822,825d877
-< 180 LOAD_LOCAL(value x5)
-< 180 CALL_METHOD MyException.message (dynamic)
-< 180 STORE_LOCAL(value message)
-< 180 SCOPE_ENTER value message
-827c879,880
-< 181 LOAD_LOCAL(value message)
----
-> ? LOAD_LOCAL(value x5)
-> 181 CALL_METHOD MyException.message (dynamic)
-831c884,885
-< 182 LOAD_LOCAL(value message)
----
-> ? LOAD_LOCAL(value x5)
-> 182 CALL_METHOD MyException.message (dynamic)
-833c887,888
-< 182 THROW(MyException)
----
-> ? STORE_LOCAL(variable exc2)
-> ? JUMP 28
-837c892,893
-< 169 THROW(Throwable)
----
-> ? STORE_LOCAL(variable exc2)
-> ? JUMP 28
-853a910,922
-> 28:
-> 184 LOAD_MODULE object Predef
-> 184 CONSTANT("finally")
-> 184 CALL_METHOD scala.Predef.println (dynamic)
-> 185 LOAD_LOCAL(variable result)
-> 185 CONSTANT(1)
-> 185 CALL_PRIMITIVE(Arithmetic(SUB,INT))
-> 185 CONSTANT(2)
-> 185 CALL_PRIMITIVE(Arithmetic(DIV,INT))
-> 185 STORE_LOCAL(variable result)
-> 185 LOAD_LOCAL(variable exc2)
-> 185 THROW(Throwable)
->
-859c928
-< catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4
----
-> catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4
-862c931
-< catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3
----
-> catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3
-886c955
-< locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
----
-> locals: value args, variable result, value e, value ex6, value x4, value x5, value x
-888c957
-< blocks: [1,2,3,6,7,8,11,13,14,16]
----
-> blocks: [1,2,3,6,7,8,11,13,14,16,17]
-912c981,988
-< 124 THROW(MyException)
----
-> ? STORE_LOCAL(value ex6)
-> ? JUMP 17
->
-> 17:
-> 122 LOAD_LOCAL(value ex6)
-> 122 STORE_LOCAL(value x4)
-> 122 SCOPE_ENTER value x4
-> 122 JUMP 7
-937,940d1012
-< 127 LOAD_LOCAL(value x5)
-< 127 CALL_METHOD MyException.message (dynamic)
-< 127 STORE_LOCAL(value message)
-< 127 SCOPE_ENTER value message
-942c1014,1015
-< 127 LOAD_LOCAL(value message)
----
-> ? LOAD_LOCAL(value x5)
-> 127 CALL_METHOD MyException.message (dynamic)
-971c1044
-< catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
----
-> catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
-995c1068
-< locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
----
-> locals: value args, variable result, value ex6, value x4, value x5, value x, value e
-997c1070
-< blocks: [1,2,3,4,5,8,12,13,14,16]
----
-> blocks: [1,2,3,5,8,12,13,14,16,17]
-1021c1094,1103
-< 148 THROW(MyException)
----
-> ? STORE_LOCAL(value ex6)
-> ? JUMP 17
->
-> 17:
-> 145 LOAD_LOCAL(value ex6)
-> 145 STORE_LOCAL(value x4)
-> 145 SCOPE_ENTER value x4
-> 154 LOAD_LOCAL(value x4)
-> 154 IS_INSTANCE REF(class MyException)
-> 154 CZJUMP (BOOL)NE ? 5 : 8
-1042,1044d1123
-< 145 JUMP 4
-<
-< 4:
-1054,1057d1132
-< 154 LOAD_LOCAL(value x5)
-< 154 CALL_METHOD MyException.message (dynamic)
-< 154 STORE_LOCAL(value message)
-< 154 SCOPE_ENTER value message
-1059c1134,1135
-< 154 LOAD_LOCAL(value message)
----
-> ? LOAD_LOCAL(value x5)
-> 154 CALL_METHOD MyException.message (dynamic)
-1276c1352
-< blocks: [1,2,3,4,5,7]
----
-> blocks: [1,2,3,4,5,7,8]
-1300c1376,1383
-< 38 THROW(IllegalArgumentException)
----
-> ? STORE_LOCAL(value e)
-> ? JUMP 8
->
-> 8:
-> 42 LOAD_MODULE object Predef
-> 42 CONSTANT("IllegalArgumentException")
-> 42 CALL_METHOD scala.Predef.println (dynamic)
-> 42 JUMP 2
-1347c1430
-< locals: value args, variable result, value ex6, value x4, value x5, value message, value x
----
-> locals: value args, variable result, value ex6, value x4, value x5, value x
-1349c1432
-< blocks: [1,2,3,4,5,8,10,11,13,14,16]
----
-> blocks: [1,2,3,5,8,10,11,13,14,16,17]
-1373c1456,1457
-< 203 THROW(MyException)
----
-> ? STORE_LOCAL(value ex6)
-> ? JUMP 17
-1393c1477,1486
-< 209 THROW(MyException)
----
-> ? STORE_LOCAL(value ex6)
-> ? JUMP 17
->
-> 17:
-> 200 LOAD_LOCAL(value ex6)
-> 200 STORE_LOCAL(value x4)
-> 200 SCOPE_ENTER value x4
-> 212 LOAD_LOCAL(value x4)
-> 212 IS_INSTANCE REF(class MyException)
-> 212 CZJUMP (BOOL)NE ? 5 : 8
-1406,1408d1498
-< 200 JUMP 4
-<
-< 4:
-1418,1421d1507
-< 212 LOAD_LOCAL(value x5)
-< 212 CALL_METHOD MyException.message (dynamic)
-< 212 STORE_LOCAL(value message)
-< 212 SCOPE_ENTER value message
-1423c1509,1510
-< 213 LOAD_LOCAL(value message)
----
-> ? LOAD_LOCAL(value x5)
-> 213 CALL_METHOD MyException.message (dynamic)
-1467c1554
-< blocks: [1,2,3,4,5,7]
----
-> blocks: [1,2,3,4,5,7,8]
-1491c1578,1579
-< 58 THROW(IllegalArgumentException)
----
-> ? STORE_LOCAL(value e)
-> ? JUMP 8
-1492a1581,1586
-> 8:
-> 62 LOAD_MODULE object Predef
-> 62 CONSTANT("RuntimeException")
-> 62 CALL_METHOD scala.Predef.println (dynamic)
-> 62 JUMP 2
->
-1540c1634
-< blocks: [1,2,3,4]
----
-> blocks: [1,2,3,4,5]
-1560c1654,1659
-< 229 THROW(MyException)
----
-> ? JUMP 5
->
-> 5:
-> ? LOAD_LOCAL(variable monitor1)
-> 228 MONITOR_EXIT
-> 228 THROW(Throwable)
-1566c1665
-< ? THROW(Throwable)
----
-> 228 THROW(Throwable)
-1594c1693
-< locals: value args, variable result, variable monitor2, variable monitorResult1
----
-> locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
-1596c1695
-< blocks: [1,2,3,4]
----
-> blocks: [1,2,3,4,5]
-1619c1718,1726
-< 245 THROW(MyException)
----
-> ? STORE_LOCAL(value exception$1)
-> ? DROP ConcatClass
-> ? LOAD_LOCAL(value exception$1)
-> ? JUMP 5
->
-> 5:
-> ? LOAD_LOCAL(variable monitor2)
-> 244 MONITOR_EXIT
-> 244 THROW(Throwable)
-1625c1732
-< ? THROW(Throwable)
----
-> 244 THROW(Throwable)
-
+--- a
++++ b
+@@ -171,5 +171,5 @@
+ def productElement(x$1: Int (INT)): Object {
+- locals: value x$1, value x1
++ locals: value x$1, value x1, variable boxed1
+ startBlock: 1
+- blocks: [1,2,3,4]
++ blocks: [1,3,4]
+
+@@ -186,2 +186,4 @@
+ 92 LOAD_LOCAL(value x$1)
++ 92 STORE_LOCAL(variable boxed1)
++ 92 LOAD_LOCAL(variable boxed1)
+ 92 BOX INT
+@@ -194,5 +196,2 @@
+ 92 CALL_METHOD MyException.message (dynamic)
+- 92 JUMP 2
+-
+- 2:
+ 92 RETURN(REF(class Object))
+@@ -246,3 +245,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,5,6,7,8,10,11,12,13,14,15,16,17,18]
++ blocks: [1,2,3,4,5,6,8,10,11,12,13,14,15,16,17,18]
+
+@@ -257,5 +256,2 @@
+ 92 SCOPE_ENTER value x1
+- 92 JUMP 7
+-
+- 7:
+ 92 LOAD_LOCAL(value x1)
+@@ -390,5 +386,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value ex6, value x4, value x5, value message, value x
++ locals: value args, variable result, value ex6, value x4, value x5, value x
+ startBlock: 1
+- blocks: [1,2,3,4,5,8,10,11,13]
++ blocks: [1,2,3,5,8,10,11,13,14]
+
+@@ -416,4 +412,13 @@
+ 103 CALL_METHOD MyException.<init> (static-instance)
+- 103 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 14
+
++ 14:
++ 101 LOAD_LOCAL(value ex6)
++ 101 STORE_LOCAL(value x4)
++ 101 SCOPE_ENTER value x4
++ 106 LOAD_LOCAL(value x4)
++ 106 IS_INSTANCE REF(class MyException)
++ 106 CZJUMP (BOOL)NE ? 5 : 8
++
+ 13:
+@@ -429,5 +434,2 @@
+ 101 SCOPE_ENTER value x4
+- 101 JUMP 4
+-
+- 4:
+ 106 LOAD_LOCAL(value x4)
+@@ -441,8 +443,5 @@
+ 106 SCOPE_ENTER value x5
+- 106 LOAD_LOCAL(value x5)
+- 106 CALL_METHOD MyException.message (dynamic)
+- 106 STORE_LOCAL(value message)
+- 106 SCOPE_ENTER value message
+ 106 LOAD_MODULE object Predef
+- 106 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 106 CALL_METHOD MyException.message (dynamic)
+ 106 CALL_METHOD scala.Predef.println (dynamic)
+@@ -518,3 +517,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,6,7,8,9,10]
++ blocks: [1,2,3,4,6,7,8,9,10,11,12,13]
+
+@@ -547,4 +546,9 @@
+ 306 CALL_METHOD MyException.<init> (static-instance)
+- 306 THROW(MyException)
++ ? JUMP 11
+
++ 11:
++ ? LOAD_LOCAL(variable monitor4)
++ 305 MONITOR_EXIT
++ ? JUMP 12
++
+ 9:
+@@ -553,3 +557,3 @@
+ 305 MONITOR_EXIT
+- ? THROW(Throwable)
++ ? JUMP 12
+
+@@ -559,4 +563,11 @@
+ 304 MONITOR_EXIT
+- ? THROW(Throwable)
++ ? STORE_LOCAL(value t)
++ ? JUMP 13
+
++ 12:
++ ? LOAD_LOCAL(variable monitor3)
++ 304 MONITOR_EXIT
++ ? STORE_LOCAL(value t)
++ ? JUMP 13
++
+ 3:
+@@ -575,2 +586,14 @@
+
++ 13:
++ 310 LOAD_MODULE object Predef
++ 310 CALL_PRIMITIVE(StartConcat)
++ 310 CONSTANT("Caught crash: ")
++ 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
++ 310 LOAD_LOCAL(value t)
++ 310 CALL_METHOD java.lang.Throwable.toString (dynamic)
++ 310 CALL_PRIMITIVE(StringConcat(REF(class String)))
++ 310 CALL_PRIMITIVE(EndConcat)
++ 310 CALL_METHOD scala.Predef.println (dynamic)
++ 310 JUMP 2
++
+ 2:
+@@ -583,6 +606,6 @@
+ with finalizer: null
+- catch (Throwable) in ArrayBuffer(7, 8, 9, 10) starting at: 6
++ catch (Throwable) in ArrayBuffer(7, 8, 9, 10, 11) starting at: 6
+ consisting of blocks: List(6)
+ with finalizer: null
+- catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10) starting at: 3
++ catch (Throwable) in ArrayBuffer(4, 6, 7, 8, 9, 10, 11, 12) starting at: 3
+ consisting of blocks: List(3)
+@@ -618,3 +641,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,5,6,7,9,10]
++ blocks: [1,2,3,4,5,6,7,9,10,11,12]
+
+@@ -642,4 +665,10 @@
+ 78 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
+- 78 THROW(IllegalArgumentException)
++ ? STORE_LOCAL(value e)
++ ? JUMP 11
+
++ 11:
++ 81 LOAD_LOCAL(value e)
++ ? STORE_LOCAL(variable exc1)
++ ? JUMP 12
++
+ 9:
+@@ -671,3 +700,4 @@
+ 81 LOAD_LOCAL(value e)
+- 81 THROW(Exception)
++ ? STORE_LOCAL(variable exc1)
++ ? JUMP 12
+
+@@ -688,2 +718,15 @@
+
++ 12:
++ 83 LOAD_MODULE object Predef
++ 83 CONSTANT("finally")
++ 83 CALL_METHOD scala.Predef.println (dynamic)
++ 84 LOAD_LOCAL(variable result)
++ 84 CONSTANT(1)
++ 84 CALL_PRIMITIVE(Arithmetic(SUB,INT))
++ 84 CONSTANT(2)
++ 84 CALL_PRIMITIVE(Arithmetic(DIV,INT))
++ 84 STORE_LOCAL(variable result)
++ 84 LOAD_LOCAL(variable exc1)
++ 84 THROW(Throwable)
++
+ }
+@@ -693,3 +736,3 @@
+ with finalizer: null
+- catch (<none>) in ArrayBuffer(4, 6, 7, 9) starting at: 3
++ catch (<none>) in ArrayBuffer(4, 6, 7, 9, 11) starting at: 3
+ consisting of blocks: List(3)
+@@ -717,5 +760,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value message, value x, value ex6, value x4, value x5, value message, value x
++ locals: value args, variable result, value ex6, variable exc2, value x4, value x5, value x, value ex6, value x4, value x5, value x
+ startBlock: 1
+- blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25]
++ blocks: [1,2,3,4,5,6,9,11,14,15,16,19,21,22,24,25,26,27,28]
+
+@@ -743,4 +786,11 @@
+ 172 CALL_METHOD MyException.<init> (static-instance)
+- 172 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 26
+
++ 26:
++ 170 LOAD_LOCAL(value ex6)
++ 170 STORE_LOCAL(value x4)
++ 170 SCOPE_ENTER value x4
++ 170 JUMP 15
++
+ 24:
+@@ -786,8 +836,5 @@
+ 175 SCOPE_ENTER value x5
+- 175 LOAD_LOCAL(value x5)
+- 175 CALL_METHOD MyException.message (dynamic)
+- 175 STORE_LOCAL(value message)
+- 175 SCOPE_ENTER value message
+ 176 LOAD_MODULE object Predef
+- 176 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 176 CALL_METHOD MyException.message (dynamic)
+ 176 CALL_METHOD scala.Predef.println (dynamic)
+@@ -795,5 +842,7 @@
+ 177 DUP(REF(class MyException))
+- 177 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 177 CALL_METHOD MyException.message (dynamic)
+ 177 CALL_METHOD MyException.<init> (static-instance)
+- 177 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 27
+
+@@ -801,3 +850,4 @@
+ 170 LOAD_LOCAL(value ex6)
+- 170 THROW(Throwable)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 27
+
+@@ -811,2 +861,8 @@
+
++ 27:
++ 169 LOAD_LOCAL(value ex6)
++ 169 STORE_LOCAL(value x4)
++ 169 SCOPE_ENTER value x4
++ 169 JUMP 5
++
+ 5:
+@@ -821,8 +877,5 @@
+ 180 SCOPE_ENTER value x5
+- 180 LOAD_LOCAL(value x5)
+- 180 CALL_METHOD MyException.message (dynamic)
+- 180 STORE_LOCAL(value message)
+- 180 SCOPE_ENTER value message
+ 181 LOAD_MODULE object Predef
+- 181 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 181 CALL_METHOD MyException.message (dynamic)
+ 181 CALL_METHOD scala.Predef.println (dynamic)
+@@ -830,5 +883,7 @@
+ 182 DUP(REF(class MyException))
+- 182 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 182 CALL_METHOD MyException.message (dynamic)
+ 182 CALL_METHOD MyException.<init> (static-instance)
+- 182 THROW(MyException)
++ ? STORE_LOCAL(variable exc2)
++ ? JUMP 28
+
+@@ -836,3 +891,4 @@
+ 169 LOAD_LOCAL(value ex6)
+- 169 THROW(Throwable)
++ ? STORE_LOCAL(variable exc2)
++ ? JUMP 28
+
+@@ -853,2 +909,15 @@
+
++ 28:
++ 184 LOAD_MODULE object Predef
++ 184 CONSTANT("finally")
++ 184 CALL_METHOD scala.Predef.println (dynamic)
++ 185 LOAD_LOCAL(variable result)
++ 185 CONSTANT(1)
++ 185 CALL_PRIMITIVE(Arithmetic(SUB,INT))
++ 185 CONSTANT(2)
++ 185 CALL_PRIMITIVE(Arithmetic(DIV,INT))
++ 185 STORE_LOCAL(variable result)
++ 185 LOAD_LOCAL(variable exc2)
++ 185 THROW(Throwable)
++
+ }
+@@ -858,6 +927,6 @@
+ with finalizer: null
+- catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24) starting at: 4
++ catch (Throwable) in ArrayBuffer(14, 15, 16, 19, 21, 22, 24, 26) starting at: 4
+ consisting of blocks: List(9, 6, 5, 4)
+ with finalizer: null
+- catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24) starting at: 3
++ catch (<none>) in ArrayBuffer(4, 5, 6, 9, 14, 15, 16, 19, 21, 22, 24, 26, 27) starting at: 3
+ consisting of blocks: List(3)
+@@ -885,5 +954,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value e, value ex6, value x4, value x5, value message, value x
++ locals: value args, variable result, value e, value ex6, value x4, value x5, value x
+ startBlock: 1
+- blocks: [1,2,3,6,7,8,11,13,14,16]
++ blocks: [1,2,3,6,7,8,11,13,14,16,17]
+
+@@ -911,4 +980,11 @@
+ 124 CALL_METHOD MyException.<init> (static-instance)
+- 124 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 17
+
++ 17:
++ 122 LOAD_LOCAL(value ex6)
++ 122 STORE_LOCAL(value x4)
++ 122 SCOPE_ENTER value x4
++ 122 JUMP 7
++
+ 16:
+@@ -936,8 +1012,5 @@
+ 127 SCOPE_ENTER value x5
+- 127 LOAD_LOCAL(value x5)
+- 127 CALL_METHOD MyException.message (dynamic)
+- 127 STORE_LOCAL(value message)
+- 127 SCOPE_ENTER value message
+ 127 LOAD_MODULE object Predef
+- 127 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 127 CALL_METHOD MyException.message (dynamic)
+ 127 CALL_METHOD scala.Predef.println (dynamic)
+@@ -970,3 +1043,3 @@
+ with finalizer: null
+- catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16) starting at: 3
++ catch (IllegalArgumentException) in ArrayBuffer(6, 7, 8, 11, 13, 14, 16, 17) starting at: 3
+ consisting of blocks: List(3)
+@@ -994,5 +1067,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value ex6, value x4, value x5, value message, value x, value e
++ locals: value args, variable result, value ex6, value x4, value x5, value x, value e
+ startBlock: 1
+- blocks: [1,2,3,4,5,8,12,13,14,16]
++ blocks: [1,2,3,5,8,12,13,14,16,17]
+
+@@ -1020,4 +1093,13 @@
+ 148 CALL_METHOD MyException.<init> (static-instance)
+- 148 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 17
+
++ 17:
++ 145 LOAD_LOCAL(value ex6)
++ 145 STORE_LOCAL(value x4)
++ 145 SCOPE_ENTER value x4
++ 154 LOAD_LOCAL(value x4)
++ 154 IS_INSTANCE REF(class MyException)
++ 154 CZJUMP (BOOL)NE ? 5 : 8
++
+ 16:
+@@ -1041,5 +1123,2 @@
+ 145 SCOPE_ENTER value x4
+- 145 JUMP 4
+-
+- 4:
+ 154 LOAD_LOCAL(value x4)
+@@ -1053,8 +1132,5 @@
+ 154 SCOPE_ENTER value x5
+- 154 LOAD_LOCAL(value x5)
+- 154 CALL_METHOD MyException.message (dynamic)
+- 154 STORE_LOCAL(value message)
+- 154 SCOPE_ENTER value message
+ 154 LOAD_MODULE object Predef
+- 154 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 154 CALL_METHOD MyException.message (dynamic)
+ 154 CALL_METHOD scala.Predef.println (dynamic)
+@@ -1275,3 +1351,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,5,7]
++ blocks: [1,2,3,4,5,7,8]
+
+@@ -1299,4 +1375,11 @@
+ 38 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
+- 38 THROW(IllegalArgumentException)
++ ? STORE_LOCAL(value e)
++ ? JUMP 8
+
++ 8:
++ 42 LOAD_MODULE object Predef
++ 42 CONSTANT("IllegalArgumentException")
++ 42 CALL_METHOD scala.Predef.println (dynamic)
++ 42 JUMP 2
++
+ 7:
+@@ -1346,5 +1429,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, value ex6, value x4, value x5, value message, value x
++ locals: value args, variable result, value ex6, value x4, value x5, value x
+ startBlock: 1
+- blocks: [1,2,3,4,5,8,10,11,13,14,16]
++ blocks: [1,2,3,5,8,10,11,13,14,16,17]
+
+@@ -1372,3 +1455,4 @@
+ 203 CALL_METHOD MyException.<init> (static-instance)
+- 203 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 17
+
+@@ -1392,4 +1476,13 @@
+ 209 CALL_METHOD MyException.<init> (static-instance)
+- 209 THROW(MyException)
++ ? STORE_LOCAL(value ex6)
++ ? JUMP 17
+
++ 17:
++ 200 LOAD_LOCAL(value ex6)
++ 200 STORE_LOCAL(value x4)
++ 200 SCOPE_ENTER value x4
++ 212 LOAD_LOCAL(value x4)
++ 212 IS_INSTANCE REF(class MyException)
++ 212 CZJUMP (BOOL)NE ? 5 : 8
++
+ 16:
+@@ -1405,5 +1498,2 @@
+ 200 SCOPE_ENTER value x4
+- 200 JUMP 4
+-
+- 4:
+ 212 LOAD_LOCAL(value x4)
+@@ -1417,8 +1507,5 @@
+ 212 SCOPE_ENTER value x5
+- 212 LOAD_LOCAL(value x5)
+- 212 CALL_METHOD MyException.message (dynamic)
+- 212 STORE_LOCAL(value message)
+- 212 SCOPE_ENTER value message
+ 213 LOAD_MODULE object Predef
+- 213 LOAD_LOCAL(value message)
++ ? LOAD_LOCAL(value x5)
++ 213 CALL_METHOD MyException.message (dynamic)
+ 213 CALL_METHOD scala.Predef.println (dynamic)
+@@ -1466,3 +1553,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4,5,7]
++ blocks: [1,2,3,4,5,7,8]
+
+@@ -1490,4 +1577,11 @@
+ 58 CALL_METHOD java.lang.IllegalArgumentException.<init> (static-instance)
+- 58 THROW(IllegalArgumentException)
++ ? STORE_LOCAL(value e)
++ ? JUMP 8
+
++ 8:
++ 62 LOAD_MODULE object Predef
++ 62 CONSTANT("RuntimeException")
++ 62 CALL_METHOD scala.Predef.println (dynamic)
++ 62 JUMP 2
++
+ 7:
+@@ -1539,3 +1633,3 @@
+ startBlock: 1
+- blocks: [1,2,3,4]
++ blocks: [1,2,3,4,5]
+
+@@ -1559,4 +1653,9 @@
+ 229 CALL_METHOD MyException.<init> (static-instance)
+- 229 THROW(MyException)
++ ? JUMP 5
+
++ 5:
++ ? LOAD_LOCAL(variable monitor1)
++ 228 MONITOR_EXIT
++ 228 THROW(Throwable)
++
+ 3:
+@@ -1565,3 +1664,3 @@
+ 228 MONITOR_EXIT
+- ? THROW(Throwable)
++ 228 THROW(Throwable)
+
+@@ -1593,5 +1692,5 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, variable result, variable monitor2, variable monitorResult1
++ locals: value exception$1, value args, variable result, variable monitor2, variable monitorResult1
+ startBlock: 1
+- blocks: [1,2,3,4]
++ blocks: [1,2,3,4,5]
+
+@@ -1618,4 +1717,12 @@
+ 245 CALL_METHOD MyException.<init> (static-instance)
+- 245 THROW(MyException)
++ ? STORE_LOCAL(value exception$1)
++ ? DROP ConcatClass
++ ? LOAD_LOCAL(value exception$1)
++ ? JUMP 5
+
++ 5:
++ ? LOAD_LOCAL(variable monitor2)
++ 244 MONITOR_EXIT
++ 244 THROW(Throwable)
++
+ 3:
+@@ -1624,3 +1731,3 @@
+ 244 MONITOR_EXIT
+- ? THROW(Throwable)
++ 244 THROW(Throwable)
+
diff --git a/test/files/run/interpolation.scala b/test/files/run/interpolation.scala
index f443bd5feb..14d9819348 100644
--- a/test/files/run/interpolation.scala
+++ b/test/files/run/interpolation.scala
@@ -13,7 +13,7 @@ object Test extends App {
println(s"Best price: $f")
println(f"Best price: $f%.2f")
println(s"$f% discount included")
- println(f"$f%3.2f% discount included")
+ println(f"$f%3.2f%% discount included")
}
test1(1)
diff --git a/test/files/run/interpolationMultiline1.scala b/test/files/run/interpolationMultiline1.scala
index 437aed44b0..db634e7775 100644
--- a/test/files/run/interpolationMultiline1.scala
+++ b/test/files/run/interpolationMultiline1.scala
@@ -13,7 +13,7 @@ object Test extends App {
println(s"""Best price: $f""")
println(f"""Best price: $f%.2f""")
println(s"""$f% discount included""")
- println(f"""$f%3.2f% discount included""")
+ println(f"""$f%3.2f%% discount included""")
}
test1(1)
diff --git a/test/files/run/macro-expand-nullary-generic.check b/test/files/run/macro-expand-nullary-generic.check
index 133840c469..42976f4baf 100644
--- a/test/files/run/macro-expand-nullary-generic.check
+++ b/test/files/run/macro-expand-nullary-generic.check
@@ -1,6 +1,6 @@
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
-it works TypeTag[Int]
+it works WeakTypeTag[Int]
+it works WeakTypeTag[Int]
+it works WeakTypeTag[Int]
+it works WeakTypeTag[Int]
+it works WeakTypeTag[Int]
kkthxbai
diff --git a/test/files/run/macro-expand-tparams-explicit.check b/test/files/run/macro-expand-tparams-explicit.check
index e7e6718406..b6b4f6fa3a 100644
--- a/test/files/run/macro-expand-tparams-explicit.check
+++ b/test/files/run/macro-expand-tparams-explicit.check
@@ -1 +1 @@
-TypeTag[Int]
+WeakTypeTag[Int]
diff --git a/test/files/run/macro-expand-tparams-implicit.check b/test/files/run/macro-expand-tparams-implicit.check
index fa6b335afb..a9bf55423e 100644
--- a/test/files/run/macro-expand-tparams-implicit.check
+++ b/test/files/run/macro-expand-tparams-implicit.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
+WeakTypeTag[Int]
WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-prefix-a.check b/test/files/run/macro-expand-tparams-prefix-a.check
index 0bf3c55bbe..ca44a4f652 100644
--- a/test/files/run/macro-expand-tparams-prefix-a.check
+++ b/test/files/run/macro-expand-tparams-prefix-a.check
@@ -1,4 +1,4 @@
-TypeTag[Int]
-TypeTag[Int]
+WeakTypeTag[Int]
+WeakTypeTag[Int]
WeakTypeTag[String]
-TypeTag[Boolean]
+WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-b.check b/test/files/run/macro-expand-tparams-prefix-b.check
index 77c2ee9051..2ff2ce435d 100644
--- a/test/files/run/macro-expand-tparams-prefix-b.check
+++ b/test/files/run/macro-expand-tparams-prefix-b.check
@@ -1,2 +1,2 @@
-TypeTag[Boolean] TypeTag[Int]
-TypeTag[Boolean] WeakTypeTag[String]
+WeakTypeTag[Boolean] WeakTypeTag[Int]
+WeakTypeTag[Boolean] WeakTypeTag[String]
diff --git a/test/files/run/macro-expand-tparams-prefix-c1.check b/test/files/run/macro-expand-tparams-prefix-c1.check
index f0dd5b9cd8..0f24f74db1 100644
--- a/test/files/run/macro-expand-tparams-prefix-c1.check
+++ b/test/files/run/macro-expand-tparams-prefix-c1.check
@@ -1,3 +1,3 @@
-TypeTag[Int]
+WeakTypeTag[Int]
WeakTypeTag[String]
-TypeTag[Boolean]
+WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-c2.check b/test/files/run/macro-expand-tparams-prefix-c2.check
index f0dd5b9cd8..0f24f74db1 100644
--- a/test/files/run/macro-expand-tparams-prefix-c2.check
+++ b/test/files/run/macro-expand-tparams-prefix-c2.check
@@ -1,3 +1,3 @@
-TypeTag[Int]
+WeakTypeTag[Int]
WeakTypeTag[String]
-TypeTag[Boolean]
+WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-expand-tparams-prefix-d1.check b/test/files/run/macro-expand-tparams-prefix-d1.check
index c5aaaf5a09..7832503256 100644
--- a/test/files/run/macro-expand-tparams-prefix-d1.check
+++ b/test/files/run/macro-expand-tparams-prefix-d1.check
@@ -1,3 +1,3 @@
WeakTypeTag[T]
WeakTypeTag[U]
-TypeTag[Boolean]
+WeakTypeTag[Boolean]
diff --git a/test/files/run/macro-undetparams-consfromsls.check b/test/files/run/macro-undetparams-consfromsls.check
index b10a90043e..3fee58d9c1 100644
--- a/test/files/run/macro-undetparams-consfromsls.check
+++ b/test/files/run/macro-undetparams-consfromsls.check
@@ -1,5 +1,5 @@
-A = TypeTag[Int]
-B = TypeTag[Nothing]
+A = WeakTypeTag[Int]
+B = WeakTypeTag[Nothing]
List(1)
-A = TypeTag[Any]
+A = WeakTypeTag[Any]
List(abc, 1)
diff --git a/test/files/run/macro-undetparams-macroitself.check b/test/files/run/macro-undetparams-macroitself.check
index fa6b335afb..a9bf55423e 100644
--- a/test/files/run/macro-undetparams-macroitself.check
+++ b/test/files/run/macro-undetparams-macroitself.check
@@ -1,2 +1,2 @@
-TypeTag[Int]
+WeakTypeTag[Int]
WeakTypeTag[String]
diff --git a/test/files/run/outertest.scala b/test/files/run/outertest.scala
index 3cc96afa5b..fa0443f669 100644
--- a/test/files/run/outertest.scala
+++ b/test/files/run/outertest.scala
@@ -1,26 +1,57 @@
// A test for the case where the outer field of class B#J should be eliminated.
-// You can verify this by running a javap on B.J
+
+import reflect.ClassTag
+
abstract class A {
+ abstract class I
- abstract class I {
+ val foo = this
+}
+class B extends A {
+ class J extends I {
+ val bar = foo
}
- val foo = "foo"
+ type II = I
+ class K extends II {
+ val bar = foo
+ }
+ class L extends (I @annotation.tailrec) {
+ val bar = foo
+ }
}
-class B extends A {
- class J extends I {
+class C extends A {
+ val c: C = this
+
+ class M extends c.I {
val bar = foo
}
-
}
-object Test extends App {
+object Test extends App {
val b = new B
- assert((new b.J).bar == b.foo)
+ val c0 = new C
+ val c = new { override val c = c0 } with C
+
+ assert((new b.J).bar eq b)
+ assert((new b.K).bar eq b)
+ assert((new b.L).bar eq b)
+ assert((new c.M).bar eq c)
+
+ def checkOuterFields[C: ClassTag](expected: Int) {
+ val cls = implicitly[ClassTag[C]].runtimeClass
+ val outerFields = cls.getDeclaredFields().filter(_.getName.contains("$outer"))
+ assert(outerFields.size == expected, outerFields.map(_.getName))
+ }
+ checkOuterFields[A#I](1) // the base class must have the $outer pointer
+ checkOuterFields[B#J](0) // reuse parent class' $outer pointer
+ checkOuterFields[B#K](0) // ... through an alias
+ checkOuterFields[B#L](0) // ... through the annotated type
+ checkOuterFields[C#M](1) // different prefix, can't share.
}
diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check
index 56ddd74375..0cb18e989a 100644
--- a/test/files/run/repl-colon-type.check
+++ b/test/files/run/repl-colon-type.check
@@ -4,12 +4,6 @@ Type :help for more information.
scala>
scala> :type List[1, 2, 3]
-<console>:2: error: identifier expected but integer literal found.
- List[1, 2, 3]
- ^
-<console>:3: error: ']' expected but '}' found.
- }
- ^
<console>:1: error: identifier expected but integer literal found.
List[1, 2, 3]
^
@@ -45,12 +39,9 @@ scala> :type lazy val f = 5
Int
scala> :type protected lazy val f = 5
-<console>:2: error: illegal start of statement (no modifiers allowed here)
- protected lazy val f = 5
- ^
<console>:5: error: lazy value f cannot be accessed in object $iw
Access to protected value f not permitted because
- enclosing object $eval in package $line19 is not a subclass of
+ enclosing object $eval in package $line13 is not a subclass of
object $iw where target is defined
lazy val $result = `f`
^
@@ -223,4 +214,14 @@ PolyType(
scala>
+scala> // SI-7132 - :type doesn't understand Unit
+
+scala> :type ()
+Unit
+
+scala> :type println("side effect!")
+Unit
+
+scala>
+
scala>
diff --git a/test/files/run/repl-colon-type.scala b/test/files/run/repl-colon-type.scala
index c055b215c2..8cf81a6afe 100644
--- a/test/files/run/repl-colon-type.scala
+++ b/test/files/run/repl-colon-type.scala
@@ -26,6 +26,10 @@ object Test extends ReplTest {
|:type -v Nil.combinations _
|:type -v def f[T <: AnyVal] = List[T]().combinations _
|:type -v def f[T, U >: T](x: T, y: List[U]) = x :: y
+ |
+ |// SI-7132 - :type doesn't understand Unit
+ |:type ()
+ |:type println("side effect!")
""".stripMargin
}
diff --git a/test/files/run/resetattrs-this.check b/test/files/run/resetattrs-this.check
new file mode 100644
index 0000000000..27ba77ddaf
--- /dev/null
+++ b/test/files/run/resetattrs-this.check
@@ -0,0 +1 @@
+true
diff --git a/test/files/run/resetattrs-this.scala b/test/files/run/resetattrs-this.scala
new file mode 100644
index 0000000000..12afa3d712
--- /dev/null
+++ b/test/files/run/resetattrs-this.scala
@@ -0,0 +1,11 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val tb = cm.mkToolBox()
+ val tree = Select(This(cm.staticPackage("scala").moduleClass), newTermName("Predef"))
+ val ttree = tb.typeCheck(tree)
+ val rttree = tb.resetAllAttrs(ttree)
+ println(tb.eval(rttree) == Predef)
+} \ No newline at end of file
diff --git a/test/files/run/t3994.scala b/test/files/run/t3994.scala
new file mode 100644
index 0000000000..0ee1d9d966
--- /dev/null
+++ b/test/files/run/t3994.scala
@@ -0,0 +1,20 @@
+trait T {
+ trait Default { def foo = this }
+ object Default extends Default
+}
+
+class Crash { // if you change this to a `trait` it keeps failing, though if it is an `object` it compiles just fine!
+ class Element
+
+ /* declare this as a class, and the crash goes away */
+ trait ElementOrdering extends Ordering[Element] {
+ def compare(a: Element, b: Element): Int = 0
+ }
+
+ implicit object ElementOrdering extends ElementOrdering
+}
+
+object Test extends App {
+ (new T {}).Default
+ (new Crash).ElementOrdering
+}
diff --git a/test/files/run/t5527.check b/test/files/run/t5527.check
index 1518168c51..36bee9bb55 100644
--- a/test/files/run/t5527.check
+++ b/test/files/run/t5527.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of parser]] // newSource1
+[[syntax trees at end of parser]] // newSource1.scala
package <empty> {
object UselessComments extends scala.AnyRef {
def <init>() = {
diff --git a/test/files/run/t5603.check b/test/files/run/t5603.check
index 5127d3c1c7..3f19a0a4b1 100644
--- a/test/files/run/t5603.check
+++ b/test/files/run/t5603.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of parser]] // newSource1
+[[syntax trees at end of parser]] // newSource1.scala
[0:241]package [0:0]<empty> {
[0:82]abstract trait Greeting extends [15:82][83]scala.AnyRef {
[15]def $init$() = [15]{
diff --git a/test/files/run/t5699.check b/test/files/run/t5699.check
new file mode 100755
index 0000000000..df19644ae6
--- /dev/null
+++ b/test/files/run/t5699.check
@@ -0,0 +1,11 @@
+[[syntax trees at end of parser]] // annodef.java
+package <empty> {
+ object MyAnnotation extends {
+ def <init>() = _
+ };
+ class MyAnnotation extends scala.annotation.Annotation with _root_.java.lang.annotation.Annotation with scala.annotation.ClassfileAnnotation {
+ def <init>() = _;
+ def value(): String
+ }
+}
+
diff --git a/test/files/run/t5699.scala b/test/files/run/t5699.scala
new file mode 100755
index 0000000000..5cef67e3b1
--- /dev/null
+++ b/test/files/run/t5699.scala
@@ -0,0 +1,24 @@
+import scala.tools.partest.DirectTest
+import scala.tools.nsc.util.BatchSourceFile
+
+object Test extends DirectTest {
+ // Java code
+ override def code = """
+ |public @interface MyAnnotation { String value(); }
+ """.stripMargin
+
+ override def extraSettings: String = "-usejavacp -Ystop-after:typer -Xprint:parser"
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def newSources(sourceCodes: String*) = {
+ assert(sourceCodes.size == 1)
+ List(new BatchSourceFile("annodef.java", sourceCodes(0)))
+ }
+}
diff --git a/test/files/run/t5710-1.check b/test/files/run/t5710-1.check
new file mode 100644
index 0000000000..eac2025aeb
--- /dev/null
+++ b/test/files/run/t5710-1.check
@@ -0,0 +1 @@
+evaluated = (abc,abc)
diff --git a/test/files/run/t5710-1.scala b/test/files/run/t5710-1.scala
new file mode 100644
index 0000000000..12bd858e06
--- /dev/null
+++ b/test/files/run/t5710-1.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ val (x, y) = ("abc": Any) match { case x => (x, x) }
+ (x, y)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+} \ No newline at end of file
diff --git a/test/files/run/t5710-2.check b/test/files/run/t5710-2.check
new file mode 100644
index 0000000000..eac2025aeb
--- /dev/null
+++ b/test/files/run/t5710-2.check
@@ -0,0 +1 @@
+evaluated = (abc,abc)
diff --git a/test/files/run/t5710-2.scala b/test/files/run/t5710-2.scala
new file mode 100644
index 0000000000..6d2129cca2
--- /dev/null
+++ b/test/files/run/t5710-2.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val code = reify {
+ val (x, y) = "abc" match { case x => (x, x) }
+ (x, y)
+ };
+
+ val toolbox = cm.mkToolBox()
+ val evaluated = toolbox.eval(code.tree)
+ println("evaluated = " + evaluated)
+} \ No newline at end of file
diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check
index 94013efd36..613d25b075 100644
--- a/test/files/run/t6028.check
+++ b/test/files/run/t6028.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of lambdalift]] // newSource1
+[[syntax trees at end of lambdalift]] // newSource1.scala
package <empty> {
class T extends Object {
<paramaccessor> val T$$classParam: Int = _;
diff --git a/test/files/run/t6146b.check b/test/files/run/t6146b.check
index b664d1152a..49ff70697e 100644
--- a/test/files/run/t6146b.check
+++ b/test/files/run/t6146b.check
@@ -37,8 +37,15 @@ memType: (sub: u.Type, scrut: u.Type)u.Type
scala>
-scala> memType(S1, fTpe)
-res0: u.Type = O.X.S1.type
+scala> val mt1 = memType(S1, fTpe)
+mt1: u.Type = O.X.S1.type
+
+scala> global.typeDeconstruct.show(mt1)
+res0: String =
+TypeRef(
+ pre = SingleType(pre = ThisType(object O), object X)
+ TypeSymbol(class S1 extends C.this.F[T])
+)
scala> memType(S2, fTpe)
res1: u.Type = O.S2
diff --git a/test/files/run/t6146b.scala b/test/files/run/t6146b.scala
index adcd40d2ee..e63709aa9d 100644
--- a/test/files/run/t6146b.scala
+++ b/test/files/run/t6146b.scala
@@ -31,7 +31,8 @@ val fTpe = typeOf[O.type].decl(newTermName("foo")).paramss.head.head.tpe
def memType(sub: Type, scrut: Type): Type =
nestedMemberType(sub.typeSymbol, scrut.prefix, scrut.typeSymbol.owner)
-memType(S1, fTpe)
+val mt1 = memType(S1, fTpe)
+global.typeDeconstruct.show(mt1)
memType(S2, fTpe)
memType(S3, fTpe)
memType(S4, fTpe)
diff --git a/test/files/run/t6223.check b/test/files/run/t6223.check
index 90ec019407..f83799bab1 100644
--- a/test/files/run/t6223.check
+++ b/test/files/run/t6223.check
@@ -1,4 +1,4 @@
bar
-bar$mcI$sp
bar$mIc$sp
bar$mIcI$sp
+bar$mcI$sp \ No newline at end of file
diff --git a/test/files/run/t6223.scala b/test/files/run/t6223.scala
index 4ab7c832e6..0996ea1c45 100644
--- a/test/files/run/t6223.scala
+++ b/test/files/run/t6223.scala
@@ -5,7 +5,7 @@ class Foo[@specialized(Int) A](a:A) {
object Test {
def main(args:Array[String]) {
val f = new Foo(333)
- val ms = f.getClass().getDeclaredMethods()
- ms.foreach(m => println(m.getName))
+ val ms = f.getClass().getDeclaredMethods().map(_.getName).sorted
+ ms.foreach(println)
}
}
diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check
index e6467edc95..e940975e44 100644
--- a/test/files/run/t6288.check
+++ b/test/files/run/t6288.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of patmat]] // newSource1
+[[syntax trees at end of patmat]] // newSource1.scala
[7]package [7]<empty> {
[7]object Case3 extends [13][106]scala.AnyRef {
[13]def <init>(): [13]Case3.type = [13]{
diff --git a/test/files/run/t6370.scala b/test/files/run/t6370.scala
new file mode 100644
index 0000000000..c86b87dc8a
--- /dev/null
+++ b/test/files/run/t6370.scala
@@ -0,0 +1,12 @@
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ val m = collection.immutable.ListMap( "x" -> 1 )
+ try {
+ m("y")
+ } catch {
+ case e : NoSuchElementException => assert(e.getMessage() == "key not found: y")
+ }
+
+ }
+}
diff --git a/test/files/run/t6440.check b/test/files/run/t6440.check
index 69c253eab4..806279fb74 100644
--- a/test/files/run/t6440.check
+++ b/test/files/run/t6440.check
@@ -1,4 +1,4 @@
-pos: source-newSource1,line-9,offset=109 bad symbolic reference. A signature in U.class refers to term pack1
+pos: source-newSource1.scala,line-9,offset=109 bad symbolic reference. A signature in U.class refers to term pack1
in package <root> which is not available.
It may be completely missing from the current classpath, or the version on
the classpath might be incompatible with the version used when compiling U.class. ERROR
diff --git a/test/files/run/t6555.check b/test/files/run/t6555.check
index 04117b7c2f..a18a8e8023 100644
--- a/test/files/run/t6555.check
+++ b/test/files/run/t6555.check
@@ -1,4 +1,4 @@
-[[syntax trees at end of specialize]] // newSource1
+[[syntax trees at end of specialize]] // newSource1.scala
package <empty> {
class Foo extends Object {
def <init>(): Foo = {
diff --git a/test/files/run/t6715.scala b/test/files/run/t6715.scala
new file mode 100644
index 0000000000..07ff34218a
--- /dev/null
+++ b/test/files/run/t6715.scala
@@ -0,0 +1,15 @@
+import scala.reflect.runtime.universe._
+
+class A {
+ def $$ = 1
+ def $times = 1
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val memberSet: Set[String] = typeOf[A].members.map{ _.toString }.toSet
+ assert(memberSet contains "method *")
+ assert(memberSet contains "method $$")
+ assert(! (memberSet contains "method"))
+ }
+}
diff --git a/test/files/run/t6725-1.check b/test/files/run/t6725-1.check
new file mode 100644
index 0000000000..6ed281c757
--- /dev/null
+++ b/test/files/run/t6725-1.check
@@ -0,0 +1,2 @@
+1
+1
diff --git a/test/files/run/t6725-1.scala b/test/files/run/t6725-1.scala
new file mode 100644
index 0000000000..a167ef8aa3
--- /dev/null
+++ b/test/files/run/t6725-1.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ val a = 1
+ val s = f"$a%s%n$a%s"
+ println(s)
+} \ No newline at end of file
diff --git a/test/files/run/t6725-2.check b/test/files/run/t6725-2.check
new file mode 100644
index 0000000000..3496917ad5
--- /dev/null
+++ b/test/files/run/t6725-2.check
@@ -0,0 +1,8 @@
+
+
+aaaa
+
+
+aaaa
+aaaa
+aaaa
diff --git a/test/files/run/t6725-2.scala b/test/files/run/t6725-2.scala
new file mode 100644
index 0000000000..e033cf5ea8
--- /dev/null
+++ b/test/files/run/t6725-2.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ println(f"%n")
+ println(f"aaaa%n")
+ println(f"%naaaa")
+ println(f"aaaa%naaaa")
+} \ No newline at end of file
diff --git a/test/files/run/t6793.scala b/test/files/run/t6793.scala
new file mode 100644
index 0000000000..0b1f1619af
--- /dev/null
+++ b/test/files/run/t6793.scala
@@ -0,0 +1,9 @@
+package a { class C1(private[a] val v0: String) }
+package b { class C2(v1: String) extends a.C1(v1) { def foo = v1 } }
+
+object Test extends App {
+ new b.C2("x")
+
+ val c2Fields = classOf[b.C2].getDeclaredFields
+ assert(c2Fields.size == 1, c2Fields.map(_.getName).toList)
+}
diff --git a/test/files/run/t6793b.scala b/test/files/run/t6793b.scala
new file mode 100644
index 0000000000..cb3f2fb2fa
--- /dev/null
+++ b/test/files/run/t6793b.scala
@@ -0,0 +1,11 @@
+package a {
+ class C1(val v0: String)
+ class C2(v1: String) extends a.C1(v1) { def foo = v1 }
+}
+
+object Test extends App {
+ new a.C2("x")
+
+ val c2Fields = classOf[a.C2].getDeclaredFields
+ assert(c2Fields.isEmpty, c2Fields.map(_.getName).mkString(", "))
+}
diff --git a/test/files/run/t6793c.scala b/test/files/run/t6793c.scala
new file mode 100644
index 0000000000..e28c7c81a1
--- /dev/null
+++ b/test/files/run/t6793c.scala
@@ -0,0 +1,11 @@
+package a {
+ class C1(private[a] val v0: String)
+ class C2(v1: String) extends a.C1(v1) { def foo = v1 }
+}
+
+object Test extends App {
+ new a.C2("x").foo
+
+ val c2Fields = classOf[a.C2].getDeclaredFields
+ assert(c2Fields.isEmpty, c2Fields.map(_.getName).toList)
+}
diff --git a/test/files/run/t6900.scala b/test/files/run/t6900.scala
new file mode 100644
index 0000000000..a29d388129
--- /dev/null
+++ b/test/files/run/t6900.scala
@@ -0,0 +1,36 @@
+import annotation.tailrec
+
+trait Universe {
+ type T <: AnyRef
+}
+
+final class Bug {
+ var i = 1
+ def stop() = { i -= 1; i < 0 }
+ // the alias bypasses the fast path in erasures InfoTransformer
+ // predicated on `TypeMap.noChangeToSymbols`
+ type Alias = Any
+
+ @tailrec
+ // So we get two symbols for `universe`, the original on the ValDef
+ // and a clone in the MethodType of `f`.
+ def f(universe: Universe, l: Alias): universe.T = {
+ if (stop()) null.asInstanceOf[universe.T] else f(universe, null)
+ }
+
+ @tailrec
+ def g(universe: Universe)(l: Alias): universe.T = {
+ if (stop()) null.asInstanceOf[universe.T] else g(universe)(l)
+ }
+
+ @tailrec
+ def h(universe: Universe)(l: List[universe.T]): List[universe.T] = {
+ if (stop()) Nil else h(universe)(l)
+ }
+}
+
+object Test extends App {
+ assert(new Bug().f(null, null) == null)
+ assert(new Bug().g(null)(null) == null)
+ assert(new Bug().h(null)(null) == Nil)
+} \ No newline at end of file
diff --git a/test/files/run/t6935.scala b/test/files/run/t6935.scala
new file mode 100644
index 0000000000..dea2d7f2e2
--- /dev/null
+++ b/test/files/run/t6935.scala
@@ -0,0 +1,14 @@
+object Test {
+
+ def main(args: Array[String]): Unit = {
+ import java.io._
+ val bytes = new ByteArrayOutputStream()
+ val out = new ObjectOutputStream(bytes)
+ out.writeObject(())
+ out.close()
+ val buf = bytes.toByteArray
+ val in = new ObjectInputStream(new ByteArrayInputStream(buf))
+ val unit = in.readObject()
+ assert(unit == ())
+ }
+}
diff --git a/test/files/run/t6937.check b/test/files/run/t6937.check
new file mode 100644
index 0000000000..9a1fa4cfaf
--- /dev/null
+++ b/test/files/run/t6937.check
@@ -0,0 +1,26 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{universe=>ru}
+
+scala> import scala.reflect.runtime.{currentMirror => cm}
+import scala.reflect.runtime.{currentMirror=>cm}
+
+scala> import scala.reflect.api.{Universe => ApiUniverse}
+import scala.reflect.api.{Universe=>ApiUniverse}
+
+scala> class A
+defined class A
+
+scala> lazy val apiru = ru: ApiUniverse
+apiru: scala.reflect.api.Universe = <lazy>
+
+scala> apiru.typeTag[A].in(cm)
+res0: reflect.runtime.universe.TypeTag[A] = TypeTag[A]
+
+scala>
+
+scala>
diff --git a/test/files/run/t6937.scala b/test/files/run/t6937.scala
new file mode 100644
index 0000000000..4b30894bf3
--- /dev/null
+++ b/test/files/run/t6937.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ def code = """
+ import scala.reflect.runtime.{universe => ru}
+ import scala.reflect.runtime.{currentMirror => cm}
+ import scala.reflect.api.{Universe => ApiUniverse}
+ class A
+ lazy val apiru = ru: ApiUniverse
+ apiru.typeTag[A].in(cm)
+ """
+} \ No newline at end of file
diff --git a/test/files/run/t7074.check b/test/files/run/t7074.check
new file mode 100644
index 0000000000..ab9cf11f16
--- /dev/null
+++ b/test/files/run/t7074.check
@@ -0,0 +1,9 @@
+<a/>
+<a b="2" c="3" d="1"/>
+<a b="2" c="4" d="1" e="3" f="5"/>
+<a b="5" c="4" d="3" e="2" f="1"/>
+<a b="1" c="2" d="3" e="4" f="5"/>
+<a a:b="2" a:c="3" a:d="1"/>
+<a a:b="2" a:c="4" a:d="1" a:e="3" a:f="5"/>
+<a a:b="5" a:c="4" a:d="3" a:e="2" a:f="1"/>
+<a a:b="1" a:c="2" a:d="3" a:e="4" a:f="5"/>
diff --git a/test/files/run/t7074.scala b/test/files/run/t7074.scala
new file mode 100644
index 0000000000..693a076a7a
--- /dev/null
+++ b/test/files/run/t7074.scala
@@ -0,0 +1,15 @@
+import scala.xml.Utility.sort
+
+object Test extends App {
+ println(sort(<a/>))
+ println(sort(<a d="1" b="2" c="3"/>))
+ println(sort(<a d="1" b="2" e="3" c="4" f="5"/>))
+ println(sort(<a f="1" e="2" d="3" c="4" b="5"/>))
+ println(sort(<a b="1" c="2" d="3" e="4" f="5"/>))
+
+ println(sort(<a a:d="1" a:b="2" a:c="3"/>))
+ println(sort(<a a:d="1" a:b="2" a:e="3" a:c="4" a:f="5"/>))
+ println(sort(<a a:f="1" a:e="2" a:d="3" a:c="4" a:b="5"/>))
+ println(sort(<a a:b="1" a:c="2" a:d="3" a:e="4" a:f="5"/>))
+}
+
diff --git a/test/files/run/t7171.scala b/test/files/run/t7171.scala
new file mode 100644
index 0000000000..97585b9860
--- /dev/null
+++ b/test/files/run/t7171.scala
@@ -0,0 +1,22 @@
+trait T {
+ final case class A()
+
+ // Was:
+ // error: scrutinee is incompatible with pattern type;
+ // found : T.this.A
+ // required: T#A
+ def foo(a: T#A) = a match {
+ case _: A => true; case _ => false
+ }
+}
+
+object Test extends App {
+ val t1 = new T {}
+ val t2 = new T {}
+ val a1 = new t1.A()
+ val a2 = new t1.A()
+ assert(t1.foo(a1))
+ // as noted in the unchecked warning (tested in the corresponding neg test),
+ // the outer pointer isn't checked
+ assert(t1.foo(a2))
+}
diff --git a/test/files/run/t7185.check b/test/files/run/t7185.check
new file mode 100644
index 0000000000..455c1aa3b7
--- /dev/null
+++ b/test/files/run/t7185.check
@@ -0,0 +1,34 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> import scala.tools.reflect.ToolBox
+import scala.tools.reflect.ToolBox
+
+scala> import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe._
+
+scala> object O { def apply() = 0 }
+defined module O
+
+scala> val ORef = reify { O }.tree
+ORef: reflect.runtime.universe.Tree = $read.O
+
+scala> val tree = Apply(Block(Nil, Block(Nil, ORef)), Nil)
+tree: reflect.runtime.universe.Apply =
+{
+ {
+ $read.O
+ }
+}()
+
+scala> {val tb = reflect.runtime.currentMirror.mkToolBox(); tb.typeCheck(tree): Any}
+res0: Any =
+{
+ {
+ $read.O.apply()
+ }
+}
+
+scala>
diff --git a/test/files/run/t7185.scala b/test/files/run/t7185.scala
new file mode 100644
index 0000000000..d9d913e78a
--- /dev/null
+++ b/test/files/run/t7185.scala
@@ -0,0 +1,12 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ override def code = """
+import scala.tools.reflect.ToolBox
+import scala.reflect.runtime.universe._
+object O { def apply() = 0 }
+val ORef = reify { O }.tree
+val tree = Apply(Block(Nil, Block(Nil, ORef)), Nil)
+{val tb = reflect.runtime.currentMirror.mkToolBox(); tb.typeCheck(tree): Any}
+"""
+}
diff --git a/test/files/run/t7200.scala b/test/files/run/t7200.scala
new file mode 100644
index 0000000000..ba342df14d
--- /dev/null
+++ b/test/files/run/t7200.scala
@@ -0,0 +1,34 @@
+import language.higherKinds
+
+object Test extends App {
+
+ // Slice of comonad is where this came up
+ trait Foo[F[_]] {
+ def coflatMap[A, B](f: F[A] => B): F[A] => F[B]
+ }
+
+ // A non-empty list
+ case class Nel[A](head: A, tail: List[A])
+
+ object NelFoo extends Foo[Nel] {
+
+ // It appears that the return type for recursive calls is not inferred
+ // properly, yet no warning is issued. Providing a return type or
+ // type arguments for the recursive call fixes the problem.
+
+ def coflatMap[A, B](f: Nel[A] => B) = // ok w/ return type
+ l => Nel(f(l), l.tail match {
+ case Nil => Nil
+ case h :: t => {
+ val r = coflatMap(f)(Nel(h, t)) // ok w/ type args
+ r.head :: r.tail
+ }
+ })
+ }
+
+ // Without a recursive call all is well, but with recursion we get a
+ // ClassCastException from Integer to Nothing
+ NelFoo.coflatMap[Int, Int](_.head + 1)(Nel(1, Nil)) // Ok
+ NelFoo.coflatMap[Int, Int](_.head + 1)(Nel(1, List(2))) // CCE
+
+}
diff --git a/test/files/run/t7214.scala b/test/files/run/t7214.scala
new file mode 100644
index 0000000000..ff1ea8082d
--- /dev/null
+++ b/test/files/run/t7214.scala
@@ -0,0 +1,57 @@
+// pattern matcher crashes here trying to synthesize an uneeded outer test.
+// no-symbol does not have an owner
+// at scala.reflect.internal.SymbolTable.abort(SymbolTable.scala:49)
+// at scala.tools.nsc.Global.abort(Global.scala:253)
+// at scala.reflect.internal.Symbols$NoSymbol.owner(Symbols.scala:3248)
+// at scala.reflect.internal.Symbols$Symbol.effectiveOwner(Symbols.scala:678)
+// at scala.reflect.internal.Symbols$Symbol.isDefinedInPackage(Symbols.scala:664)
+// at scala.reflect.internal.TreeGen.mkAttributedSelect(TreeGen.scala:188)
+// at scala.reflect.internal.TreeGen.mkAttributedRef(TreeGen.scala:124)
+// at scala.tools.nsc.ast.TreeDSL$CODE$.REF(TreeDSL.scala:308)
+// at scala.tools.nsc.typechecker.PatternMatching$TreeMakers$TypeTestTreeMaker$treeCondStrategy$.outerTest(PatternMatching.scala:1209)
+class Crash {
+ type Alias = C#T
+
+ val c = new C
+ val t = new c.T
+
+ // Crash via a Typed Pattern...
+ (t: Any) match {
+ case e: Alias =>
+ }
+
+ // ... or via a Typed Extractor Pattern.
+ object Extractor {
+ def unapply(a: Alias): Option[Any] = None
+ }
+ (t: Any) match {
+ case Extractor() =>
+ case _ =>
+ }
+
+ // checking that correct outer tests are applied when
+ // aliases for path dependent types are involved.
+ val c2 = new C
+ type CdotT = c.T
+ type C2dotT = c2.T
+
+ val outerField = t.getClass.getDeclaredFields.find(_.getName contains ("outer")).get
+ outerField.setAccessible(true)
+
+ (t: Any) match {
+ case _: C2dotT =>
+ println(s"!!! wrong match. t.outer=${outerField.get(t)} / c2 = $c2") // this matches on 2.10.0
+ case _: CdotT =>
+ case _ =>
+ println(s"!!! wrong match. t.outer=${outerField.get(t)} / c = $c")
+ }
+}
+
+class C {
+ class T
+}
+
+object Test extends App {
+ new Crash
+}
+
diff --git a/test/files/run/t7215.scala b/test/files/run/t7215.scala
new file mode 100644
index 0000000000..c93e97f9c8
--- /dev/null
+++ b/test/files/run/t7215.scala
@@ -0,0 +1,6 @@
+object Test extends App {
+ List[List[Any]]().transpose.isEmpty
+ Array[Array[Any]]().transpose.isEmpty
+ Vector[Vector[Any]]().transpose.isEmpty
+ Stream[Stream[Any]]().transpose.isEmpty
+}
diff --git a/test/files/run/t7235.check b/test/files/run/t7235.check
new file mode 100644
index 0000000000..9cb9c55a0c
--- /dev/null
+++ b/test/files/run/t7235.check
@@ -0,0 +1,4 @@
+C
+List(C)
+private val _ = _
+List()
diff --git a/test/files/run/t7235.scala b/test/files/run/t7235.scala
new file mode 100644
index 0000000000..6039189716
--- /dev/null
+++ b/test/files/run/t7235.scala
@@ -0,0 +1,14 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+class C
+
+object Test extends App {
+ val Block(List(ValDef(_, _, tpt: CompoundTypeTree, _)), _) = reify{ val x: C{} = ??? }.tree
+ println(tpt)
+ println(tpt.templ.parents)
+ println(tpt.templ.self)
+ println(tpt.templ.body)
+}
diff --git a/test/files/run/t7240.check b/test/files/run/t7240.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t7240.check
diff --git a/test/files/run/t7240/Macros_1.scala b/test/files/run/t7240/Macros_1.scala
new file mode 100644
index 0000000000..6465e18760
--- /dev/null
+++ b/test/files/run/t7240/Macros_1.scala
@@ -0,0 +1,48 @@
+package bakery
+
+import scala.language.experimental.macros
+import scala.reflect.macros.Context
+
+trait FailureCake {
+ implicit def liftAnyFails[T: Manifest]: Any = ???
+
+ // This works
+ // implicit def liftAny[T]: Any = ???
+}
+
+object Bakery {
+
+ def failure: Any = macro failureImpl
+ def failureImpl(c: Context): c.Expr[Any] = {
+ import c.universe._
+
+ def dslTrait(dslName: String) = {
+ val names = dslName.split("\\.").toList.reverse
+ assert(names.length >= 1, "DSL trait name must be in the valid format. DSL trait name is " + dslName)
+
+ val tpeName = newTypeName(names.head)
+ names.tail.reverse match {
+ case head :: tail ⇒
+ Select(tail.foldLeft[Tree](Ident(newTermName(head)))((tree, name) ⇒ Select(tree, newTermName(name))), tpeName)
+ case Nil ⇒
+ Ident(tpeName)
+ }
+ }
+
+ def composeDSL(transformedBody: Tree) =
+ ClassDef(Modifiers(), newTypeName("eval"), List(), Template(
+ List(dslTrait("bakery.FailureCake")),
+ emptyValDef,
+ List(
+ DefDef(Modifiers(), nme.CONSTRUCTOR, List(), List(List()), TypeTree(),
+ Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(())))),
+ DefDef(Modifiers(), newTermName("main"), List(), List(List()), Ident(newTypeName("Any")), transformedBody))))
+
+ def constructor = Apply(Select(New(Ident(newTypeName("eval"))), nme.CONSTRUCTOR), List())
+
+ c.eval(c.Expr[Any](
+ c.resetAllAttrs(Block(composeDSL(Literal(Constant(1))), constructor))))
+
+ c.Expr[Any](Literal(Constant(1)))
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t7240/Test_2.scala b/test/files/run/t7240/Test_2.scala
new file mode 100644
index 0000000000..2450bdabf9
--- /dev/null
+++ b/test/files/run/t7240/Test_2.scala
@@ -0,0 +1,3 @@
+object Test extends App {
+ bakery.Bakery.failure
+} \ No newline at end of file
diff --git a/test/files/run/t7242.scala b/test/files/run/t7242.scala
new file mode 100644
index 0000000000..c995336144
--- /dev/null
+++ b/test/files/run/t7242.scala
@@ -0,0 +1,71 @@
+class CrashTest {
+ def foo = ()
+ trait CrashTestTable {
+ def cols = foo
+ }
+ // This was leading to a class between the mixed in
+ // outer accessor and the outer accessor of this object.
+ object CrashTestTable extends CrashTestTable {
+ foo
+ cols
+ }
+}
+
+class CrashTest1 {
+ def foo = ()
+ class CrashTestTable {
+ def cols = foo
+ }
+ object CrashTestTable extends CrashTestTable {
+ foo
+ cols
+ }
+}
+
+class CrashTest2 {
+ def foo = ()
+ trait CrashTestTable {
+ def cols = foo
+ }
+ object Obj extends CrashTestTable {
+ foo
+ cols
+ }
+}
+
+class CrashTest3 {
+ def foo = ()
+
+ def meth() {
+ trait CrashTestTable {
+ def cols = foo
+ }
+ object Obj extends CrashTestTable {
+ foo
+ cols
+ }
+ Obj
+ }
+}
+
+object Test extends App {
+ {
+ val c = new CrashTest
+ c.CrashTestTable
+ }
+
+ {
+ val c = new CrashTest1
+ c.CrashTestTable
+ }
+
+ {
+ val c = new CrashTest2
+ c.Obj
+ }
+
+ {
+ val c = new CrashTest3
+ c.meth()
+ }
+}
diff --git a/test/files/run/t7246.check b/test/files/run/t7246.check
new file mode 100755
index 0000000000..ce01362503
--- /dev/null
+++ b/test/files/run/t7246.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/t7246/Outer.java b/test/files/run/t7246/Outer.java
new file mode 100755
index 0000000000..163276fb3b
--- /dev/null
+++ b/test/files/run/t7246/Outer.java
@@ -0,0 +1,4 @@
+public class Outer {
+ public class Inner {
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t7246/Test.scala b/test/files/run/t7246/Test.scala
new file mode 100755
index 0000000000..9f23ca8f3a
--- /dev/null
+++ b/test/files/run/t7246/Test.scala
@@ -0,0 +1,16 @@
+object Test extends App {
+
+ val so = new SubOuter
+ val si = new so.SubInner
+ println(si.bar)
+}
+
+class SubOuter extends Outer {
+
+ val foo = "hello"
+
+ class SubInner extends Inner {
+ def bar = foo
+ }
+
+} \ No newline at end of file
diff --git a/test/files/run/t7246b.check b/test/files/run/t7246b.check
new file mode 100755
index 0000000000..5073bd8617
--- /dev/null
+++ b/test/files/run/t7246b.check
@@ -0,0 +1,2 @@
+base
+sub
diff --git a/test/files/run/t7246b/Base.scala b/test/files/run/t7246b/Base.scala
new file mode 100755
index 0000000000..4e71d3313d
--- /dev/null
+++ b/test/files/run/t7246b/Base.scala
@@ -0,0 +1,7 @@
+class Base {
+ val baseOuter = "base"
+
+ class BaseInner {
+ val baseInner = baseOuter
+ }
+}
diff --git a/test/files/run/t7246b/Outer.java b/test/files/run/t7246b/Outer.java
new file mode 100755
index 0000000000..53a79316ef
--- /dev/null
+++ b/test/files/run/t7246b/Outer.java
@@ -0,0 +1,4 @@
+public class Outer extends Base {
+ public class Inner extends BaseInner {
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t7246b/Test.scala b/test/files/run/t7246b/Test.scala
new file mode 100755
index 0000000000..f0982ea8d0
--- /dev/null
+++ b/test/files/run/t7246b/Test.scala
@@ -0,0 +1,14 @@
+object Test extends App {
+
+ val so = new SubOuter
+ val si = new so.SubInner
+ println(si.baseInner)
+ println(si.subInner)
+}
+
+class SubOuter extends Outer {
+ val subOuter = "sub"
+ class SubInner extends Inner {
+ def subInner = subOuter
+ }
+}
diff --git a/test/files/run/t7249.check b/test/files/run/t7249.check
new file mode 100644
index 0000000000..7777e0a5a2
--- /dev/null
+++ b/test/files/run/t7249.check
@@ -0,0 +1 @@
+Yup!
diff --git a/test/files/run/t7249.scala b/test/files/run/t7249.scala
new file mode 100644
index 0000000000..375df5c3ad
--- /dev/null
+++ b/test/files/run/t7249.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ def bnToLambda(s: => String): () => String = () => s
+ var x: () => String = () => sys.error("Nope")
+ val y = bnToLambda { x() }
+ x = () => "Yup!"
+ println(y())
+}
diff --git a/test/files/run/t7271.check b/test/files/run/t7271.check
new file mode 100644
index 0000000000..01632eca2c
--- /dev/null
+++ b/test/files/run/t7271.check
@@ -0,0 +1,12 @@
+[[syntax trees at end of parser]] // newSource1
+[0:91]package [0:0]<empty> {
+ [0:91]class C extends [8:91][91]scala.AnyRef {
+ [8]def <init>() = [8]{
+ [8][8][8]super.<init>();
+ [8]()
+ };
+ [16:44]def quote = [28:44]<28:44><28:44>[28]StringContext([30:34]"foo", [40:44]"baz").s([35:39]this);
+ [51:85]def tripleQuote = [69:85]<69:85><69:85>[69]StringContext([71:75]"foo", [81:85]"baz").s([76:80]this)
+ }
+}
+
diff --git a/test/files/run/t7271.scala b/test/files/run/t7271.scala
new file mode 100644
index 0000000000..6fccf14d20
--- /dev/null
+++ b/test/files/run/t7271.scala
@@ -0,0 +1,34 @@
+import scala.tools.partest._
+import java.io._
+import scala.tools.nsc._
+import scala.tools.nsc.util.CommandLineParser
+import scala.tools.nsc.{Global, Settings, CompilerCommand}
+import scala.tools.nsc.reporters.ConsoleReporter
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:parser -Ystop-after:parser -d " + testOutput.path
+
+ override def code = """
+ class C {
+ def quote = s"foo${this}baz"
+ def tripleQuote = s"foo${this}baz"
+ }
+ """.trim
+
+ override def show(): Unit = {
+ // redirect err to out, for logging
+ val prevErr = System.err
+ System.setErr(System.out)
+ compile()
+ System.setErr(prevErr)
+ }
+
+ override def newCompiler(args: String*): Global = {
+
+ val settings = new Settings()
+ settings.Xprintpos.value = true
+ val command = new CompilerCommand((CommandLineParser tokenize extraSettings) ++ args.toList, settings)
+ new Global(command.settings, new ConsoleReporter(settings)) with interactive.RangePositions
+ }
+}
diff --git a/test/files/run/t7290.scala b/test/files/run/t7290.scala
new file mode 100644
index 0000000000..01f7e8f68e
--- /dev/null
+++ b/test/files/run/t7290.scala
@@ -0,0 +1,9 @@
+object Test extends App {
+ val y = (0: Int) match {
+ case 1 => 1
+ case 0 | 0 => 0
+ case 2 | 2 | 2 | 3 | 2 | 3 => 0
+ case _ => -1
+ }
+ assert(y == 0, y)
+}
diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check
new file mode 100644
index 0000000000..966736915e
--- /dev/null
+++ b/test/files/run/t7319.check
@@ -0,0 +1,38 @@
+Type in expressions to have them evaluated.
+Type :help for more information.
+
+scala>
+
+scala> class M[A]
+defined class M
+
+scala> implicit def ma0[A](a: A): M[A] = null
+warning: there were 1 feature warning(s); re-run with -feature for details
+ma0: [A](a: A)M[A]
+
+scala> implicit def ma1[A](a: A): M[A] = null
+warning: there were 1 feature warning(s); re-run with -feature for details
+ma1: [A](a: A)M[A]
+
+scala> def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
+warning: there were 1 feature warning(s); re-run with -feature for details
+convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int
+
+scala> convert(Some[Int](0))
+<console>:12: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int])
+ --- because ---
+argument expression's type is not compatible with formal parameter type;
+ found : Some[Int]
+ required: ?F forSome { type _$1 <: ?F forSome { type _$2 } }
+ convert(Some[Int](0))
+ ^
+<console>:12: error: type mismatch;
+ found : Some[Int]
+ required: F[_ <: F[_]]
+ convert(Some[Int](0))
+ ^
+
+scala> 0
+res1: Int = 0
+
+scala>
diff --git a/test/files/run/t7319.scala b/test/files/run/t7319.scala
new file mode 100644
index 0000000000..23ffeb977d
--- /dev/null
+++ b/test/files/run/t7319.scala
@@ -0,0 +1,13 @@
+import scala.tools.partest.ReplTest
+
+object Test extends ReplTest {
+ // so we can provide the ambiguities, rather than relying in Predef implicits
+ override def extraSettings = "-Yno-predef"
+ override def code = """
+class M[A]
+implicit def ma0[A](a: A): M[A] = null
+implicit def ma1[A](a: A): M[A] = null
+def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0
+convert(Some[Int](0))
+0""" // before the fix, this line, and all that followed, re-issued the implicit ambiguity error.
+}
diff --git a/test/files/run/t7325.check b/test/files/run/t7325.check
new file mode 100644
index 0000000000..3c7652f42c
--- /dev/null
+++ b/test/files/run/t7325.check
@@ -0,0 +1,19 @@
+%
+%%
+%%%
+%n
+%
+
+%%n
+%%
+
+%%%n
+%%%
+
+0
+0%d
+0%%d
+0
+
+0
+
diff --git a/test/files/run/t7325.scala b/test/files/run/t7325.scala
new file mode 100644
index 0000000000..26f6bc6ef7
--- /dev/null
+++ b/test/files/run/t7325.scala
@@ -0,0 +1,25 @@
+object Test extends App {
+ // println(f"%")
+ println(f"%%")
+ // println(f"%%%")
+ println(f"%%%%")
+ // println(f"%%%%%")
+ println(f"%%%%%%")
+
+ println(f"%%n")
+ println(f"%%%n")
+ println(f"%%%%n")
+ println(f"%%%%%n")
+ println(f"%%%%%%n")
+ println(f"%%%%%%%n")
+
+ // println(f"${0}%")
+ println(f"${0}%d")
+ println(f"${0}%%d")
+ // println(f"${0}%%%d")
+ println(f"${0}%%%%d")
+ // println(f"${0}%%%%%d")
+
+ println(f"${0}%n")
+ println(f"${0}%d%n")
+} \ No newline at end of file
diff --git a/test/files/run/t7341.check b/test/files/run/t7341.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/run/t7341.check
diff --git a/test/files/run/t7341.flags b/test/files/run/t7341.flags
new file mode 100755
index 0000000000..ae08446055
--- /dev/null
+++ b/test/files/run/t7341.flags
@@ -0,0 +1 @@
+-Xcheckinit \ No newline at end of file
diff --git a/test/files/run/t7341.scala b/test/files/run/t7341.scala
new file mode 100755
index 0000000000..dc526c6c19
--- /dev/null
+++ b/test/files/run/t7341.scala
@@ -0,0 +1,15 @@
+object Obj {
+ private var cache: Any = ()
+ def returning(f: () => Unit) = ()
+ def foo {
+ returning(() => cache = ())
+ }
+
+ def apply(): Any = {
+ cache
+ }
+}
+
+object Test extends App {
+ Obj()
+}
diff --git a/test/files/run/t7398.scala b/test/files/run/t7398.scala
new file mode 100644
index 0000000000..e4090f7db3
--- /dev/null
+++ b/test/files/run/t7398.scala
@@ -0,0 +1,31 @@
+import scala.tools.partest._
+
+object Test extends CompilerTest {
+ import global._
+
+ def javaVersion = scala.util.Properties.javaVersion
+ def isJavaEight = javaVersion startsWith "1.8"
+ // This way we auto-pass on non-java8 since there's nothing to check
+ override lazy val units = {
+ val res: List[CompilationUnit] = if (isJavaEight) javaCompilationUnits(global)(defaultMethodSource) else Nil
+ val word = if (isJavaEight) "Attempting" else "Skipping"
+ log(s"$word java8-specific test under java version $javaVersion")
+ res
+ }
+
+ private def defaultMethodSource = """
+public interface Iterator<E> {
+ boolean hasNext();
+ E next();
+ default void remove() {
+ throw new UnsupportedOperationException("remove");
+ }
+ default void forEachRemaining(Consumer<? super E> action) {
+ throw new UnsupportedOperationException("forEachRemaining");
+ }
+}
+ """
+
+ // We're only checking we can parse it.
+ def check(source: String, unit: global.CompilationUnit): Unit = ()
+}
diff --git a/test/files/run/test-cpp.check b/test/files/run/test-cpp.check
index bfea438c60..13f4c64be3 100644
--- a/test/files/run/test-cpp.check
+++ b/test/files/run/test-cpp.check
@@ -1,65 +1,81 @@
-37c37
-< locals: value args, value x, value y
----
-> locals: value args
-42,43d41
-< 52 CONSTANT(2)
-< 52 STORE_LOCAL(value x)
-45,46d42
-< 53 LOAD_LOCAL(value x)
-< 53 STORE_LOCAL(value y)
-49c45
-< 54 LOAD_LOCAL(value y)
----
-> 54 CONSTANT(2)
-92c88
-< locals: value args, value x, value y
----
-> locals: value args, value x
-101,102d96
-< 82 LOAD_LOCAL(value x)
-< 82 STORE_LOCAL(value y)
-105c99
-< 83 LOAD_LOCAL(value y)
----
-> 83 LOAD_LOCAL(value x)
-135c129
-< locals: value args, value x, value y
----
-> locals: value args
-140,141d133
-< 66 THIS(TestAliasChainDerefThis)
-< 66 STORE_LOCAL(value x)
-143,144d134
-< 67 LOAD_LOCAL(value x)
-< 67 STORE_LOCAL(value y)
-147c137
-< 68 LOAD_LOCAL(value y)
----
-> 68 THIS(Object)
-176c166
-< locals: value x, value y
----
-> locals: value x
-181,182d170
-< 29 LOAD_LOCAL(value x)
-< 29 STORE_LOCAL(value y)
-185c173
-< 30 LOAD_LOCAL(value y)
----
-> 30 LOAD_LOCAL(value x)
-223,224d210
-< 97 LOAD_LOCAL(variable x)
-< 97 STORE_LOCAL(variable y)
-227c213
-< 98 LOAD_LOCAL(variable y)
----
-> 98 LOAD_LOCAL(variable x)
-233,234d218
-< 101 LOAD_LOCAL(variable y)
-< 101 STORE_LOCAL(variable x)
-236c220
-< 102 LOAD_LOCAL(variable x)
----
-> 102 LOAD_LOCAL(variable y)
-
+--- a
++++ b
+@@ -36,3 +36,3 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, value x, value y
++ locals: value args
+ startBlock: 1
+@@ -41,10 +41,6 @@
+ 1:
+- 52 CONSTANT(2)
+- 52 STORE_LOCAL(value x)
+ 52 SCOPE_ENTER value x
+- 53 LOAD_LOCAL(value x)
+- 53 STORE_LOCAL(value y)
+ 53 SCOPE_ENTER value y
+ 54 LOAD_MODULE object Predef
+- 54 LOAD_LOCAL(value y)
++ 54 CONSTANT(2)
+ 54 BOX INT
+@@ -91,3 +87,3 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, value x, value y
++ locals: value args, value x
+ startBlock: 1
+@@ -100,7 +96,5 @@
+ 81 SCOPE_ENTER value x
+- 82 LOAD_LOCAL(value x)
+- 82 STORE_LOCAL(value y)
+ 82 SCOPE_ENTER value y
+ 83 LOAD_MODULE object Predef
+- 83 LOAD_LOCAL(value y)
++ 83 LOAD_LOCAL(value x)
+ 83 BOX INT
+@@ -134,3 +128,3 @@
+ def main(args: Array[String] (ARRAY[REF(class String)])): Unit {
+- locals: value args, value x, value y
++ locals: value args
+ startBlock: 1
+@@ -139,10 +133,6 @@
+ 1:
+- 66 THIS(TestAliasChainDerefThis)
+- 66 STORE_LOCAL(value x)
+ 66 SCOPE_ENTER value x
+- 67 LOAD_LOCAL(value x)
+- 67 STORE_LOCAL(value y)
+ 67 SCOPE_ENTER value y
+ 68 LOAD_MODULE object Predef
+- 68 LOAD_LOCAL(value y)
++ 68 THIS(Object)
+ 68 CALL_METHOD scala.Predef.println (dynamic)
+@@ -175,3 +165,3 @@
+ def test(x: Int (INT)): Unit {
+- locals: value x, value y
++ locals: value x
+ startBlock: 1
+@@ -180,7 +170,5 @@
+ 1:
+- 29 LOAD_LOCAL(value x)
+- 29 STORE_LOCAL(value y)
+ 29 SCOPE_ENTER value y
+ 30 LOAD_MODULE object Predef
+- 30 LOAD_LOCAL(value y)
++ 30 LOAD_LOCAL(value x)
+ 30 BOX INT
+@@ -222,7 +210,5 @@
+ 96 SCOPE_ENTER variable x
+- 97 LOAD_LOCAL(variable x)
+- 97 STORE_LOCAL(variable y)
+ 97 SCOPE_ENTER variable y
+ 98 LOAD_MODULE object Predef
+- 98 LOAD_LOCAL(variable y)
++ 98 LOAD_LOCAL(variable x)
+ 98 BOX INT
+@@ -232,6 +218,4 @@
+ 100 STORE_LOCAL(variable y)
+- 101 LOAD_LOCAL(variable y)
+- 101 STORE_LOCAL(variable x)
+ 102 LOAD_MODULE object Predef
+- 102 LOAD_LOCAL(variable x)
++ 102 LOAD_LOCAL(variable y)
+ 102 BOX INT
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
index 8c558ced60..84e5435afe 100644
--- a/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
+++ b/test/files/run/typetags_without_scala_reflect_typetag_lookup.check
@@ -1,2 +1,2 @@
-pos: source-newSource1,line-9,offset=466 could not find implicit value for evidence parameter of type reflect.runtime.package.universe.TypeTag[Int] ERROR
+pos: source-newSource1.scala,line-9,offset=466 could not find implicit value for evidence parameter of type reflect.runtime.package.universe.TypeTag[Int] ERROR
diff --git a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
index acfecce628..8c9d07d836 100644
--- a/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
+++ b/test/files/run/typetags_without_scala_reflect_typetag_manifest_interop.check
@@ -1,2 +1,2 @@
-pos: source-newSource1,line-9,offset=479 No Manifest available for App.this.T. ERROR
+pos: source-newSource1.scala,line-9,offset=479 No Manifest available for App.this.T. ERROR
diff --git a/test/partest b/test/partest
index dd57137b21..842d2903b7 100755
--- a/test/partest
+++ b/test/partest
@@ -53,8 +53,8 @@ if [ -z "$EXT_CLASSPATH" ] ; then
fi
done
elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
- for lib in `echo "partest library reflect compiler"`; do
- ext="$SCALA_HOME/build/pack/lib/scala-$lib.jar"
+ for lib in `echo "scala-partest scala-library scala-reflect scala-compiler diffutils"`; do
+ ext="$SCALA_HOME/build/pack/lib/$lib.jar"
if [ -z "$EXT_CLASSPATH" ] ; then
EXT_CLASSPATH="$ext"
else
diff --git a/test/pending/run/t6387.check b/test/pending/run/t6387.check
new file mode 100644
index 0000000000..83b33d238d
--- /dev/null
+++ b/test/pending/run/t6387.check
@@ -0,0 +1 @@
+1000
diff --git a/test/pending/run/t6387.scala b/test/pending/run/t6387.scala
new file mode 100644
index 0000000000..bbebb5f511
--- /dev/null
+++ b/test/pending/run/t6387.scala
@@ -0,0 +1,16 @@
+trait A {
+ def foo: Long
+}
+
+object Test {
+ def a(): A = new A {
+ var foo: Long = 1000L
+
+ val test = () => {
+ foo = 28
+ }
+ }
+ def main(args: Array[String]) {
+ println(a().foo)
+ }
+}
diff --git a/test/scaladoc/run/SI-6580.check b/test/scaladoc/run/SI-6580.check
new file mode 100644
index 0000000000..2fb6ec3258
--- /dev/null
+++ b/test/scaladoc/run/SI-6580.check
@@ -0,0 +1,11 @@
+Chain(List(Chain(List(Text(Here z(1) is defined as follows:), Text(
+), HtmlTag(<br>), Text(
+), Text( ), HtmlTag(<img src='http://example.com/fig1.png'>), Text(
+), HtmlTag(<br>), Text(
+), Text(plus z(1) times), Text(
+), HtmlTag(<br>), Text(
+), Text( ), HtmlTag(<img src='http://example.com/fig2.png'>), Text(
+), HtmlTag(<br>), Text(
+), Text(equals QL of something
+)))))
+Done.
diff --git a/test/scaladoc/run/SI-6580.scala b/test/scaladoc/run/SI-6580.scala
new file mode 100644
index 0000000000..c544138f44
--- /dev/null
+++ b/test/scaladoc/run/SI-6580.scala
@@ -0,0 +1,32 @@
+import scala.tools.nsc.doc
+import scala.tools.nsc.doc.model._
+import scala.tools.nsc.doc.html.page.{Index, ReferenceIndex}
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def scaladocSettings = ""
+ override def code = """
+
+ object Test {
+ /** Here z(1) is defined as follows:
+ * <br>
+ * <img src='http://example.com/fig1.png'>
+ * <br>
+ * plus z(1) times
+ * <br>
+ * <img src='http://example.com/fig2.png'>
+ * <br>
+ * equals QL of something
+ */
+ def f = 1
+ }
+
+ """
+
+ def testModel(rootPackage: Package) {
+ import access._
+
+ val f = rootPackage._object("Test")._method("f")
+ println(f.comment.get.short)
+ }
+}
diff --git a/test/scaladoc/run/SI-6715.check b/test/scaladoc/run/SI-6715.check
new file mode 100644
index 0000000000..619c56180b
--- /dev/null
+++ b/test/scaladoc/run/SI-6715.check
@@ -0,0 +1 @@
+Done.
diff --git a/test/scaladoc/run/SI-6715.scala b/test/scaladoc/run/SI-6715.scala
new file mode 100644
index 0000000000..92d3376234
--- /dev/null
+++ b/test/scaladoc/run/SI-6715.scala
@@ -0,0 +1,15 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ def scaladocSettings = ""
+
+ override def code = "object A { def $$ = 123 }"
+
+ def testModel(rootPackage: Package) = {
+ import access._
+
+ val method = rootPackage._object("A")._method("$$")
+ assert(method != null)
+ }
+}
diff --git a/test/scaladoc/run/SI-7367.check b/test/scaladoc/run/SI-7367.check
new file mode 100755
index 0000000000..3925a0d464
--- /dev/null
+++ b/test/scaladoc/run/SI-7367.check
@@ -0,0 +1 @@
+Done. \ No newline at end of file
diff --git a/test/scaladoc/run/SI-7367.scala b/test/scaladoc/run/SI-7367.scala
new file mode 100755
index 0000000000..6e5a317932
--- /dev/null
+++ b/test/scaladoc/run/SI-7367.scala
@@ -0,0 +1,25 @@
+import scala.tools.nsc.doc.model._
+import scala.tools.partest.ScaladocModelTest
+
+object Test extends ScaladocModelTest {
+ override def code = """
+ class annot() extends annotation.StaticAnnotation {
+ def this(a: Any) = this()
+ }
+
+ @annot(0)
+ class B
+ """
+
+ def scaladocSettings = ""
+
+ def testModel(root: Package) = {
+ import access._
+ val annotations = root._class("B").annotations
+ assert(annotations.size == 1)
+ assert(annotations(0).annotationClass == root._class("annot"))
+ val args = annotations(0).arguments
+ assert(args.size == 1)
+ assert(args(0).value.expression == "0")
+ }
+}
diff --git a/test/scaladoc/scalacheck/IndexTest.scala b/test/scaladoc/scalacheck/IndexTest.scala
index bf385898fc..e114767264 100644
--- a/test/scaladoc/scalacheck/IndexTest.scala
+++ b/test/scaladoc/scalacheck/IndexTest.scala
@@ -71,14 +71,7 @@ object Test extends Properties("Index") {
case None => false
}
}
- property("browser contants a script element") = {
- createIndex("src/compiler/scala/tools/nsc/doc/html/page/Index.scala") match {
- case Some(index) =>
- (index.browser \ "script").size == 1
- case None => false
- }
- }
property("package objects in index") = {
createIndex("test/scaladoc/resources/SI-5558.scala") match {
case Some(index) =>
diff --git a/tools/stability-test.sh b/tools/stability-test.sh
new file mode 100755
index 0000000000..f017ac0842
--- /dev/null
+++ b/tools/stability-test.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+
+declare failed
+
+echo "Comparing build/quick/classes and build/strap/classes"
+for dir in library reflect compiler; do
+ # feel free to replace by a more elegant approach -- don't know how
+ if diff -rw -x '*.css' \
+ -x '*.custom' \
+ -x '*.gif' \
+ -x '*.js' \
+ -x '*.layout' \
+ -x '*.png' \
+ -x '*.properties' \
+ -x '*.tmpl' \
+ -x '*.tooltip' \
+ -x '*.txt' \
+ -x '*.xml' \
+ build/{quick,strap}/classes/$dir
+ then
+ classes=$(find build/quick/classes/$dir -name '*.class' | wc -l)
+ printf "%8s: %5d classfiles verified identical\n" $dir $classes
+ else
+ failed=true
+ fi
+done
+
+[[ -z $failed ]] || exit 127