aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore2
-rw-r--r--README.md5
-rw-r--r--bridge/src/main/scala/xsbt/ScaladocInterface.scala72
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala79
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java63
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala49
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala34
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala191
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala27
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala199
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala32
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala115
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala94
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala82
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala28
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala25
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala344
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala846
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala84
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala224
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/entities.scala115
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/factories.scala183
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/internal.scala89
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/java.scala223
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/json.scala93
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/parsers.scala98
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/model/references.scala20
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala92
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala125
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala25
-rw-r--r--dottydoc/src/dotty/tools/dottydoc/util/mutate.scala65
-rw-r--r--dottydoc/test/BaseTest.scala57
-rw-r--r--dottydoc/test/ConstructorTest.scala211
-rw-r--r--dottydoc/test/PackageStructure.scala89
-rw-r--r--dottydoc/test/SimpleComments.scala29
-rw-r--r--dottydoc/test/WhitelistedStdLib.scala45
-rw-r--r--project/Build.scala11
-rw-r--r--project/plugins.sbt2
-rw-r--r--src/dotty/tools/dotc/Compiler.scala4
-rw-r--r--src/dotty/tools/dotc/ast/Desugar.scala32
-rw-r--r--src/dotty/tools/dotc/ast/TreeInfo.scala14
-rw-r--r--src/dotty/tools/dotc/ast/Trees.scala7
-rw-r--r--src/dotty/tools/dotc/config/ScalaSettings.scala65
-rw-r--r--src/dotty/tools/dotc/config/Settings.scala4
-rw-r--r--src/dotty/tools/dotc/core/Constraint.scala3
-rw-r--r--src/dotty/tools/dotc/core/Contexts.scala28
-rw-r--r--src/dotty/tools/dotc/core/OrderingConstraint.scala44
-rw-r--r--src/dotty/tools/dotc/core/TypeComparer.scala27
-rw-r--r--src/dotty/tools/dotc/core/TyperState.scala21
-rw-r--r--src/dotty/tools/dotc/core/Types.scala22
-rw-r--r--src/dotty/tools/dotc/core/classfile/ClassfileParser.scala10
-rw-r--r--src/dotty/tools/dotc/parsing/Parsers.scala41
-rw-r--r--src/dotty/tools/dotc/parsing/Scanners.scala4
-rw-r--r--src/dotty/tools/dotc/transform/ExpandSAMs.scala3
-rw-r--r--src/dotty/tools/dotc/transform/InterceptedMethods.scala1
-rw-r--r--src/dotty/tools/dotc/transform/PatternMatcher.scala178
-rw-r--r--src/dotty/tools/dotc/transform/PostTyper.scala16
-rw-r--r--src/dotty/tools/dotc/transform/TailRec.scala8
-rw-r--r--src/dotty/tools/dotc/transform/TreeChecker.scala2
-rw-r--r--src/dotty/tools/dotc/transform/TypeTestsCasts.scala2
-rw-r--r--src/dotty/tools/dotc/transform/patmat/Space.scala619
-rw-r--r--src/dotty/tools/dotc/typer/Applications.scala94
-rw-r--r--src/dotty/tools/dotc/typer/Checking.scala2
-rw-r--r--src/dotty/tools/dotc/typer/Dynamic.scala85
-rw-r--r--src/dotty/tools/dotc/typer/FrontEnd.scala2
-rw-r--r--src/dotty/tools/dotc/typer/Implicits.scala9
-rw-r--r--src/dotty/tools/dotc/typer/Inferencing.scala3
-rw-r--r--src/dotty/tools/dotc/typer/Namer.scala57
-rw-r--r--src/dotty/tools/dotc/typer/ProtoTypes.scala47
-rw-r--r--src/dotty/tools/dotc/typer/RefChecks.scala2
-rw-r--r--src/dotty/tools/dotc/typer/TypeAssigner.scala38
-rw-r--r--src/dotty/tools/dotc/typer/Typer.scala59
-rw-r--r--src/dotty/tools/dotc/util/DiffUtil.scala130
-rw-r--r--src/scala/compat/java8/JFunction.java87
-rw-r--r--src/scala/compat/java8/JFunction1.java76
-rw-r--r--src/scala/compat/java8/JFunction10.java2
-rw-r--r--src/scala/compat/java8/JFunction11.java2
-rw-r--r--src/scala/compat/java8/JFunction12.java2
-rw-r--r--src/scala/compat/java8/JFunction13.java2
-rw-r--r--src/scala/compat/java8/JFunction14.java2
-rw-r--r--src/scala/compat/java8/JFunction15.java2
-rw-r--r--src/scala/compat/java8/JFunction16.java2
-rw-r--r--src/scala/compat/java8/JFunction17.java2
-rw-r--r--src/scala/compat/java8/JFunction18.java2
-rw-r--r--src/scala/compat/java8/JFunction19.java2
-rw-r--r--src/scala/compat/java8/JFunction2.java164
-rw-r--r--src/scala/compat/java8/JFunction20.java2
-rw-r--r--src/scala/compat/java8/JFunction21.java2
-rw-r--r--src/scala/compat/java8/JFunction22.java2
-rw-r--r--src/scala/compat/java8/JFunction3.java2
-rw-r--r--src/scala/compat/java8/JFunction4.java2
-rw-r--r--src/scala/compat/java8/JFunction5.java2
-rw-r--r--src/scala/compat/java8/JFunction6.java2
-rw-r--r--src/scala/compat/java8/JFunction7.java2
-rw-r--r--src/scala/compat/java8/JFunction8.java2
-rw-r--r--src/scala/compat/java8/JFunction9.java2
-rw-r--r--test/dotc/build.scala1
-rw-r--r--test/dotc/tests.scala1
-rw-r--r--test/test/DottyDocParsingTests.scala90
-rw-r--r--tests/disabled/not-representable/pos/t3999b.scala (renamed from tests/pending/pos/t3999b.scala)0
-rw-r--r--tests/disabled/not-representable/pos/t5544/Api_1.scala (renamed from tests/run/t5544/Api_1.scala)1
-rw-r--r--tests/disabled/not-representable/pos/t5544/Test_2.scala (renamed from tests/run/t5544/Test_2.scala)0
-rw-r--r--tests/disabled/not-representable/pos/t7035.scala (renamed from tests/pending/pos/t7035.scala)2
-rw-r--r--tests/disabled/not-representable/pos/t7228.scala (renamed from tests/pending/pos/t7228.scala)1
-rw-r--r--tests/disabled/not-representable/pos/t8111.scala (renamed from tests/pending/pos/t8111.scala)1
-rw-r--r--tests/disabled/not-representable/t2337.scala (renamed from tests/pending/run/t2337.scala)2
-rw-r--r--tests/disabled/not-testable/t5604b/T_1.scala (renamed from tests/pending/pos/t5604b/T_1.scala)0
-rw-r--r--tests/disabled/not-testable/t5604b/T_2.scala (renamed from tests/pending/pos/t5604b/T_2.scala)0
-rw-r--r--tests/disabled/not-testable/t5604b/Test_1.scala (renamed from tests/pending/pos/t5604b/Test_1.scala)0
-rw-r--r--tests/disabled/not-testable/t5604b/Test_2.scala (renamed from tests/pending/pos/t5604b/Test_2.scala)0
-rw-r--r--tests/disabled/not-testable/t5604b/pack_1.scala (renamed from tests/pending/pos/t5604b/pack_1.scala)0
-rw-r--r--tests/disabled/not-testable/t5954a/A_1.scala (renamed from tests/pending/pos/t5954a/A_1.scala)0
-rw-r--r--tests/disabled/not-testable/t5954a/B_2.scala (renamed from tests/pending/pos/t5954a/B_2.scala)0
-rw-r--r--tests/disabled/not-testable/t5954b/A_1.scala (renamed from tests/pending/pos/t5954b/A_1.scala)0
-rw-r--r--tests/disabled/not-testable/t5954b/B_2.scala (renamed from tests/pending/pos/t5954b/B_2.scala)0
-rw-r--r--tests/disabled/not-testable/t5954c/A_1.scala (renamed from tests/pending/pos/t5954c/A_1.scala)0
-rw-r--r--tests/disabled/not-testable/t5954c/B_2.scala (renamed from tests/pending/pos/t5954c/B_2.scala)0
-rw-r--r--tests/disabled/not-testable/t5954d/A_1.scala (renamed from tests/pending/pos/t5954d/A_1.scala)0
-rw-r--r--tests/disabled/not-testable/t5954d/B_2.scala (renamed from tests/pending/pos/t5954d/B_2.scala)0
-rw-r--r--tests/disabled/not-testable/t8134/A_1.scala (renamed from tests/pending/pos/t8134/A_1.scala)0
-rw-r--r--tests/disabled/not-testable/t8134/B_2.scala (renamed from tests/pending/pos/t8134/B_2.scala)0
-rw-r--r--tests/disabled/structural-type/pos/depmet_implicit_oopsla_zipwith.scala (renamed from tests/pending/pos/depmet_implicit_oopsla_zipwith.scala)0
-rw-r--r--tests/disabled/typetags/pos/t8237b.scala (renamed from tests/pending/pos/t8237b.scala)0
-rw-r--r--tests/neg/applydynamic_sip.check52
-rw-r--r--tests/neg/applydynamic_sip.flags (renamed from tests/untried/neg/applydynamic_sip.flags)0
-rw-r--r--tests/neg/applydynamic_sip.scala36
-rw-r--r--tests/neg/emptyCatch.scala3
-rw-r--r--tests/neg/t6355b.check (renamed from tests/untried/neg/t6355b.check)4
-rw-r--r--tests/neg/t6355b.scala (renamed from tests/untried/neg/t6355b.scala)4
-rw-r--r--tests/neg/t6663.check (renamed from tests/untried/neg/t6663.check)0
-rw-r--r--tests/neg/t6663.scala (renamed from tests/untried/neg/t6663.scala)2
-rw-r--r--tests/neg/t6920.check (renamed from tests/untried/neg/t6920.check)2
-rw-r--r--tests/neg/t6920.scala (renamed from tests/untried/neg/t6920.scala)2
-rw-r--r--tests/neg/t7239.scala (renamed from tests/pending/pos/t7239.scala)10
-rw-r--r--tests/neg/t8002-nested-scope.scala (renamed from tests/pending/pos/t8002-nested-scope.scala)2
-rw-r--r--tests/neg/t8006.check (renamed from tests/untried/neg/t8006.check)2
-rw-r--r--tests/neg/t8006.scala (renamed from tests/untried/neg/t8006.scala)2
-rw-r--r--tests/patmat/NonAbstractSealed.check5
-rw-r--r--tests/patmat/NonAbstractSealed.scala10
-rw-r--r--tests/patmat/TwoTrait.scala12
-rw-r--r--tests/patmat/aladdin1055/A.scala6
-rw-r--r--tests/patmat/aladdin1055/Test_1.scala.ignore5
-rw-r--r--tests/patmat/aladdin1055/expected.check.ignore5
-rw-r--r--tests/patmat/enum/Day.java4
-rw-r--r--tests/patmat/enum/expected.check9
-rw-r--r--tests/patmat/enum/patmat-enum.scala21
-rw-r--r--tests/patmat/exhausting.check25
-rw-r--r--tests/patmat/exhausting.scala58
-rw-r--r--tests/patmat/exhaustive_heuristics.scala26
-rw-r--r--tests/patmat/for.scala5
-rw-r--r--tests/patmat/gadt.check17
-rw-r--r--tests/patmat/gadt.scala58
-rw-r--r--tests/patmat/gadt2.scala.ignore14
-rw-r--r--tests/patmat/gadt3.scala.ignore10
-rw-r--r--tests/patmat/i947.check4
-rw-r--r--tests/patmat/i947.scala16
-rw-r--r--tests/patmat/outer-ref-checks.scala106
-rw-r--r--tests/patmat/partial-function.scala12
-rw-r--r--tests/patmat/patmat-adt.check21
-rw-r--r--tests/patmat/patmat-adt.scala58
-rw-r--r--tests/patmat/patmat-extractor.scala17
-rw-r--r--tests/patmat/patmat-indent.check13
-rw-r--r--tests/patmat/patmat-indent.scala30
-rw-r--r--tests/patmat/patmat-ortype.check13
-rw-r--r--tests/patmat/patmat-ortype.scala40
-rw-r--r--tests/patmat/patmatexhaust-huge.check5
-rw-r--r--tests/patmat/patmatexhaust-huge.scala806
-rw-r--r--tests/patmat/patmatexhaust.check33
-rw-r--r--tests/patmat/patmatexhaust.scala131
-rw-r--r--tests/patmat/sealed-java-enums.check5
-rw-r--r--tests/patmat/sealed-java-enums.scala10
-rw-r--r--tests/patmat/t1056.scala5
-rw-r--r--tests/patmat/t2425.scala15
-rw-r--r--tests/patmat/t2442/MyEnum.java3
-rw-r--r--tests/patmat/t2442/MySecondEnum.java6
-rw-r--r--tests/patmat/t2442/expected.check9
-rw-r--r--tests/patmat/t2442/t2442.scala15
-rw-r--r--tests/patmat/t3097.scala35
-rw-r--r--tests/patmat/t3098/a.scala6
-rw-r--r--tests/patmat/t3098/b.scala8
-rw-r--r--tests/patmat/t3098/expected.check5
-rw-r--r--tests/patmat/t3111.check8
-rw-r--r--tests/patmat/t3111.scala13
-rw-r--r--tests/patmat/t3163.check5
-rw-r--r--tests/patmat/t3163.scala3
-rw-r--r--tests/patmat/t3683.scala19
-rw-r--r--tests/patmat/t3683a.check5
-rw-r--r--tests/patmat/t3683a.scala20
-rw-r--r--tests/patmat/t4020.scala25
-rw-r--r--tests/patmat/t4333.scala.ignore7
-rw-r--r--tests/patmat/t4408.check5
-rw-r--r--tests/patmat/t4408.scala16
-rw-r--r--tests/patmat/t4526.check13
-rw-r--r--tests/patmat/t4526.scala16
-rw-r--r--tests/patmat/t4691.check5
-rw-r--r--tests/patmat/t4691.scala18
-rw-r--r--tests/patmat/t4691_exhaust_extractor.check13
-rw-r--r--tests/patmat/t4691_exhaust_extractor.scala33
-rw-r--r--tests/patmat/t5440.check5
-rw-r--r--tests/patmat/t5440.scala6
-rw-r--r--tests/patmat/t5968.scala7
-rw-r--r--tests/patmat/t6008.scala5
-rw-r--r--tests/patmat/t6146.scala60
-rw-r--r--tests/patmat/t6420.check5
-rw-r--r--tests/patmat/t6420.scala11
-rw-r--r--tests/patmat/t6450.scala9
-rw-r--r--tests/patmat/t6582_exhaust_big.check5
-rw-r--r--tests/patmat/t6582_exhaust_big.scala32
-rw-r--r--tests/patmat/t6818.scala11
-rw-r--r--tests/patmat/t7020.check17
-rw-r--r--tests/patmat/t7020.scala30
-rw-r--r--tests/patmat/t7206.scala.ignore19
-rw-r--r--tests/patmat/t7285.check13
-rw-r--r--tests/patmat/t7285.scala55
-rw-r--r--tests/patmat/t7285a.scala83
-rw-r--r--tests/patmat/t7298.scala11
-rw-r--r--tests/patmat/t7353.scala11
-rw-r--r--tests/patmat/t7437.scala17
-rw-r--r--tests/patmat/t7466.check5
-rw-r--r--tests/patmat/t7466.scala17
-rw-r--r--tests/patmat/t7631.check5
-rw-r--r--tests/patmat/t7631.scala11
-rw-r--r--tests/patmat/t7669.check5
-rw-r--r--tests/patmat/t7669.scala14
-rw-r--r--tests/patmat/t7746.check5
-rw-r--r--tests/patmat/t7746.scala5
-rw-r--r--tests/patmat/t8068.scala14
-rw-r--r--tests/patmat/t8178.check13
-rw-r--r--tests/patmat/t8178.scala33
-rw-r--r--tests/patmat/t8412.check5
-rw-r--r--tests/patmat/t8412.scala14
-rw-r--r--tests/patmat/t8430.check5
-rw-r--r--tests/patmat/t8430.scala19
-rw-r--r--tests/patmat/t8511.check5
-rw-r--r--tests/patmat/t8511.scala25
-rw-r--r--tests/patmat/t8546.scala49
-rw-r--r--tests/patmat/t8606.scala18
-rw-r--r--tests/patmat/t8700a/Bar.scala9
-rw-r--r--tests/patmat/t8700a/Baz.java11
-rw-r--r--tests/patmat/t8700a/Foo.java4
-rw-r--r--tests/patmat/t8700a/expected.check9
-rw-r--r--tests/patmat/t9129.check5
-rw-r--r--tests/patmat/t9129.scala29
-rw-r--r--tests/patmat/t9232.check5
-rw-r--r--tests/patmat/t9232.scala16
-rw-r--r--tests/patmat/t9289.check9
-rw-r--r--tests/patmat/t9289.scala28
-rw-r--r--tests/patmat/t9351.check13
-rw-r--r--tests/patmat/t9351.scala35
-rw-r--r--tests/patmat/t9398.check5
-rw-r--r--tests/patmat/t9398.scala13
-rw-r--r--tests/patmat/t9399.scala16
-rw-r--r--tests/patmat/t9411a.scala27
-rw-r--r--tests/patmat/t9411b.scala36
-rw-r--r--tests/patmat/t9573.check5
-rw-r--r--tests/patmat/t9573.scala13
-rw-r--r--tests/patmat/t9630.scala21
-rw-r--r--tests/patmat/t9657.check17
-rw-r--r--tests/patmat/t9657.scala62
-rw-r--r--tests/patmat/t9672.check5
-rw-r--r--tests/patmat/t9672.scala28
-rw-r--r--tests/patmat/t9677.check4
-rw-r--r--tests/patmat/t9677.scala23
-rw-r--r--tests/patmat/t9779.check5
-rw-r--r--tests/patmat/t9779.scala13
-rw-r--r--tests/patmat/try.scala5
-rw-r--r--tests/patmat/tuple.scala5
-rw-r--r--tests/patmat/virtpatmat_apply.check5
-rw-r--r--tests/patmat/virtpatmat_apply.scala7
-rw-r--r--tests/patmat/virtpatmat_exhaust_compound.check15
-rw-r--r--tests/patmat/virtpatmat_exhaust_compound.scala.ignore29
-rw-r--r--tests/patmat/virtpatmat_reach_sealed_unsealed.check11
-rw-r--r--tests/patmat/virtpatmat_reach_sealed_unsealed.scala21
-rw-r--r--tests/pending/neg/i533/Compat.scala7
-rw-r--r--tests/pending/neg/i533/JA.java5
-rw-r--r--tests/pending/pos/contraImplicits.scala18
-rw-r--r--tests/pending/pos/depmet_implicit_norm_ret.scala9
-rw-r--r--tests/pending/pos/depsel.scala14
-rw-r--r--tests/pending/pos/exponential-spec.scala4
-rw-r--r--tests/pending/pos/generic-sigs.flags1
-rw-r--r--tests/pending/pos/infersingle.flags1
-rw-r--r--tests/pending/pos/isApplicableSafe.scala8
-rw-r--r--tests/pending/pos/setter-not-implicit.scala3
-rw-r--r--tests/pending/pos/stable.scala11
-rw-r--r--tests/pending/pos/t1756.scala59
-rw-r--r--tests/pending/pos/t3631.scala1
-rw-r--r--tests/pending/pos/t3960.flags1
-rw-r--r--tests/pending/pos/t4188.scala1
-rw-r--r--tests/pending/pos/t4579.flags1
-rw-r--r--tests/pending/pos/t4911.flags1
-rw-r--r--tests/pending/pos/t5029.flags1
-rw-r--r--tests/pending/pos/t5899.flags1
-rw-r--r--tests/pending/pos/t5932.flags1
-rw-r--r--tests/pending/pos/t6123-explaintypes-implicits.flags1
-rw-r--r--tests/pending/pos/t6994.flags1
-rw-r--r--tests/pending/pos/t7011.flags1
-rw-r--r--tests/pending/pos/t7285a.flags1
-rw-r--r--tests/pending/pos/t7296.scala1
-rw-r--r--tests/pending/pos/t762.scala2
-rw-r--r--tests/pending/pos/t8187.check0
-rw-r--r--tests/pending/pos/t8364.check0
-rw-r--r--tests/pending/pos/t8369a.check0
-rw-r--r--tests/pending/pos/trait-force-info.flags1
-rw-r--r--tests/pending/pos/virtpatmat_alts_subst.flags1
-rw-r--r--tests/pending/pos/virtpatmat_exist1.flags1
-rw-r--r--tests/pending/pos/virtpatmat_exist3.flags1
-rw-r--r--tests/pending/run/t2337.check4
-rw-r--r--tests/pending/run/t3150.scala36
-rw-r--r--tests/pending/run/unapply.check3
-rw-r--r--tests/pos-scala2/t3050.scala (renamed from tests/pending/run/t3050.scala)0
-rw-r--r--tests/pos-scala2/t7688.scala (renamed from tests/pending/pos/t7688.scala)0
-rw-r--r--tests/pos-special/t8146a.scala (renamed from tests/pending/pos/t8146a.scala)0
-rw-r--r--tests/pos/apply-equiv.scala (renamed from tests/pending/pos/apply-equiv.scala)0
-rw-r--r--tests/pos/dependent-implicits.scala7
-rw-r--r--tests/pos/extractor-types.scala (renamed from tests/pending/pos/extractor-types.scala)0
-rw-r--r--tests/pos/for-filter.scala12
-rw-r--r--tests/pos/gadts2.scala (renamed from tests/pending/pos/gadts2.scala)2
-rw-r--r--tests/pos/hk-infer.scala (renamed from tests/pending/pos/hk-infer.scala)0
-rw-r--r--tests/pos/i743.scala (renamed from tests/pending/pos/i743.scala)0
-rw-r--r--tests/pos/infersingle.scala (renamed from tests/pending/pos/infersingle.scala)0
-rw-r--r--tests/pos/isApplicableSafe.scala54
-rw-r--r--tests/pos/lazyvals.scala (renamed from tests/pending/pos/lazyvals.scala)0
-rw-r--r--tests/pos/matthias4.scala (renamed from tests/pending/pos/matthias4.scala)0
-rw-r--r--tests/pos/mixins.scala (renamed from tests/pending/pos/mixins.scala)0
-rw-r--r--tests/pos/return_thistype.scala (renamed from tests/pending/pos/return_thistype.scala)0
-rw-r--r--tests/pos/t1500a.scala28
-rw-r--r--tests/pos/t1513a.scala36
-rw-r--r--tests/pos/t1513b.scala25
-rw-r--r--tests/pos/t1756.scala33
-rw-r--r--tests/pos/t3494.scala (renamed from tests/pending/pos/t3494.scala)0
-rw-r--r--tests/pos/t3800.scala (renamed from tests/pending/pos/t3800.scala)0
-rw-r--r--tests/pos/t3862.scala (renamed from tests/pending/pos/t3862.scala)0
-rw-r--r--tests/pos/t3880.scala (renamed from tests/pending/pos/t3880.scala)0
-rw-r--r--tests/pos/t3999/a_1.scala (renamed from tests/pending/pos/t3999/a_1.scala)2
-rw-r--r--tests/pos/t3999/b_2.scala (renamed from tests/pending/pos/t3999/b_2.scala)0
-rw-r--r--tests/pos/t4269.scala (renamed from tests/pending/pos/t4269.scala)0
-rw-r--r--tests/pos/t5070.scala15
-rw-r--r--tests/pos/t5330.scala (renamed from tests/pending/pos/t5330.scala)0
-rw-r--r--tests/pos/t5604/ReplConfig.scala (renamed from tests/pending/pos/t5604/ReplConfig.scala)0
-rw-r--r--tests/pos/t5604/ReplReporter.scala (renamed from tests/pending/pos/t5604/ReplReporter.scala)0
-rw-r--r--tests/pos/t5726.scala (renamed from tests/pending/pos/t5726.scala)0
-rw-r--r--tests/pos/t5769.scala (renamed from tests/pending/pos/t5769.scala)0
-rw-r--r--tests/pos/t578.scala (renamed from tests/pending/pos/t578.scala)0
-rw-r--r--tests/pos/t5899.scala (renamed from tests/pending/pos/t5899.scala)0
-rw-r--r--tests/pos/t6084.scala (renamed from tests/pending/pos/t6084.scala)0
-rw-r--r--tests/pos/t6722.scala (renamed from tests/pending/pos/t6722.scala)0
-rw-r--r--tests/pos/t6815_import.scala (renamed from tests/pending/pos/t6815_import.scala)0
-rw-r--r--tests/pos/t6948.scala (renamed from tests/pending/pos/t6948.scala)0
-rw-r--r--tests/pos/t7294.scala (renamed from tests/pending/pos/t7294.scala)0
-rw-r--r--tests/pos/t7426.scala (renamed from tests/pending/pos/t7426.scala)0
-rw-r--r--tests/pos/t7517.scala (renamed from tests/pending/pos/t7517.scala)0
-rw-r--r--tests/pos/t762.scala4
-rw-r--r--tests/pos/t7668.scala (renamed from tests/pending/pos/t7668.scala)2
-rw-r--r--tests/pos/t7902.scala (renamed from tests/pending/pos/t7902.scala)0
-rw-r--r--tests/pos/t8046c.scala (renamed from tests/pending/pos/t8046c.scala)0
-rw-r--r--tests/pos/t807.scala (renamed from tests/pending/pos/t807.scala)0
-rw-r--r--tests/pos/t8300-patmat-a.scala (renamed from tests/pending/pos/t8300-patmat-a.scala)0
-rw-r--r--tests/pos/t8300-patmat-b.scala (renamed from tests/pending/pos/t8300-patmat-b.scala)0
-rw-r--r--tests/pos/t8301b.scala (renamed from tests/pending/pos/t8301b.scala)0
-rw-r--r--tests/pos/t8364.scala (renamed from tests/pending/pos/t8364.scala)0
-rw-r--r--tests/pos/trait-force-info.scala (renamed from tests/pending/pos/trait-force-info.scala)0
-rw-r--r--tests/pos/tryWithoutHandler.scala7
-rw-r--r--tests/run/applydynamic_sip.check (renamed from tests/pending/run/applydynamic_sip.check)0
-rw-r--r--tests/run/applydynamic_sip.flags (renamed from tests/pending/run/applydynamic_sip.flags)0
-rw-r--r--tests/run/applydynamic_sip.scala (renamed from tests/pending/run/applydynamic_sip.scala)1
-rw-r--r--tests/run/dynamic-anyval.check (renamed from tests/pending/run/dynamic-anyval.check)0
-rw-r--r--tests/run/dynamic-anyval.scala (renamed from tests/pending/run/dynamic-anyval.scala)0
-rw-r--r--tests/run/dynamicDynamicTests.scala41
-rw-r--r--tests/run/i1490.check3
-rw-r--r--tests/run/i1490.scala13
-rw-r--r--tests/run/t1335.scala11
-rw-r--r--tests/run/t1500b.scala21
-rw-r--r--tests/run/t1500c.scala19
-rw-r--r--tests/run/t298.check (renamed from tests/pending/run/t298.check)0
-rw-r--r--tests/run/t298.scala (renamed from tests/pending/run/t298.scala)0
-rw-r--r--tests/run/t3026.check (renamed from tests/pending/run/t3026.check)0
-rwxr-xr-xtests/run/t3026.scala (renamed from tests/pending/run/t3026.scala)0
-rw-r--r--tests/run/t3353.check (renamed from tests/pending/run/t3353.check)0
-rw-r--r--tests/run/t3353.scala (renamed from tests/pending/run/t3353.scala)0
-rw-r--r--tests/run/t4536.check (renamed from tests/pending/run/t4536.check)0
-rw-r--r--tests/run/t4536.flags (renamed from tests/pending/run/t4536.flags)0
-rw-r--r--tests/run/t4536.scala (renamed from tests/pending/run/t4536.scala)8
-rw-r--r--tests/run/t5040.check (renamed from tests/pending/run/t5040.check)0
-rw-r--r--tests/run/t5040.flags (renamed from tests/pending/run/t5040.flags)0
-rw-r--r--tests/run/t5040.scala (renamed from tests/pending/run/t5040.scala)1
-rw-r--r--tests/run/t5733.check (renamed from tests/pending/run/t5733.check)0
-rw-r--r--tests/run/t5733.scala (renamed from tests/pending/run/t5733.scala)0
-rw-r--r--tests/run/t6353.check (renamed from tests/pending/run/t6353.check)0
-rw-r--r--tests/run/t6353.scala (renamed from tests/pending/run/t6353.scala)0
-rw-r--r--tests/run/t6355.check (renamed from tests/pending/run/t6355.check)0
-rw-r--r--tests/run/t6355.scala (renamed from tests/pending/run/t6355.scala)0
-rw-r--r--tests/run/t6663.check (renamed from tests/pending/run/t6663.check)0
-rw-r--r--tests/run/t6663.flags (renamed from tests/pending/run/t6663.flags)0
-rw-r--r--tests/run/t6663.scala (renamed from tests/pending/run/t6663.scala)0
-rw-r--r--tests/run/unapply.scala (renamed from tests/pending/run/unapply.scala)4
-rw-r--r--tests/untried/neg/applydynamic_sip.check73
-rw-r--r--tests/untried/neg/applydynamic_sip.scala33
397 files changed, 9614 insertions, 664 deletions
diff --git a/.gitignore b/.gitignore
index c9f12e986..17eba0468 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,7 @@
*.DS_Store
*.class
*.log
+*.swp
*~
*.swp
@@ -37,6 +38,7 @@ scala-scala
# Ignore output files but keep the directory
out/
+build/
!out/.keep
# Ignore build-file
diff --git a/README.md b/README.md
index bdac6376d..20fea6653 100644
--- a/README.md
+++ b/README.md
@@ -30,8 +30,10 @@ See [github contributors page](https://github.com/lampepfl/dotty/graphs/contribu
| Colored Repl | Implemented |
| Sbt incremental build | Implemented |
| Non-blocking lazy vals | Implemented |
+| Multiverse equality | Implemented |
| Option-less pattern matching(based on [name-based patmat](https://github.com/scala/scala/pull/2848)) | Implemented |
| Function arity adaptation | Implemented |
+| Exhaustivity checks in pattern matching | Implemented |
| | |
| Non-boxed arrays of value classes | In progress |
| Working contravariant implicits | In progress |
@@ -44,8 +46,7 @@ See [github contributors page](https://github.com/lampepfl/dotty/graphs/contribu
| Effects | Under consideration |
| Auto-completion in repl | Under consideration |
| Spec Option-less pattern matching | Under consideration |
-| Multiverse equality | Under consideration |
-| Exhaustivity checks in pattern matching | Under consideration |
+
There are also plethora of small details such as [per-callsite @tailrec annotations](https://github.com/lampepfl/dotty/issues/1221)
####What are the complications that I can have If I start using Dotty?
diff --git a/bridge/src/main/scala/xsbt/ScaladocInterface.scala b/bridge/src/main/scala/xsbt/ScaladocInterface.scala
new file mode 100644
index 000000000..3ad9c7941
--- /dev/null
+++ b/bridge/src/main/scala/xsbt/ScaladocInterface.scala
@@ -0,0 +1,72 @@
+/* sbt -- Simple Build Tool
+ * Copyright 2008, 2009 Mark Harrah
+ */
+package xsbt
+
+import xsbti.Logger
+import dotty.tools.dottydoc.api.scala.Dottydoc
+import java.net.URL
+
+class ScaladocInterface {
+ def run(args: Array[String], log: Logger, delegate: xsbti.Reporter) =
+ (new DottydocRunner(args, log, delegate)).run()
+}
+
+class DottydocRunner(args: Array[String], log: Logger, delegate: xsbti.Reporter) extends Dottydoc {
+ def run(): Unit = getOutputFolder(args).map { outputFolder =>
+ val index = createIndex(args)
+ val resources = getResources(args)
+ val template = getTemplate(resources)
+
+ template.fold(writeJson(index, outputFolder)) { tpl =>
+ buildDocs(outputFolder, tpl, resources, index)
+ }
+ } getOrElse {
+ delegate.log(
+ NoPosition,
+ "No output folder set for API documentation (\"-d\" parameter should be passed to the documentation tool)",
+ xsbti.Severity.Error
+ )
+ }
+
+ private[this] val NoPosition = new xsbti.Position {
+ val line = xsbti.Maybe.nothing[Integer]
+ val lineContent = ""
+ val offset = xsbti.Maybe.nothing[Integer]
+ val sourcePath = xsbti.Maybe.nothing[String]
+ val sourceFile = xsbti.Maybe.nothing[java.io.File]
+ val pointer = xsbti.Maybe.nothing[Integer]
+ val pointerSpace = xsbti.Maybe.nothing[String]
+ }
+
+ private def getStringSetting(name: String): Option[String] =
+ args find (_.startsWith(name)) map (_.drop(name.length))
+
+ private def getOutputFolder(args: Array[String]): Option[String] =
+ args sliding(2) find { case Array(x, _) => x == "-d" } map (_.tail.head.trim)
+
+ private def getTemplate(resources: List[URL]): Option[URL] =
+ resources.find(_.getFile.endsWith("template.html"))
+
+ private def getResources(args: Array[String]): List[URL] = {
+ val cp = args sliding (2) find { case Array(x, _) => x == "-classpath" } map (_.tail.head.trim) getOrElse ""
+
+ cp.split(":").find(_.endsWith("dottydoc-client.jar")).map { resourceJar =>
+ import java.util.jar.JarFile
+ val jarEntries = (new JarFile(resourceJar)).entries
+ var entries: List[URL] = Nil
+
+ while (jarEntries.hasMoreElements) {
+ val entry = jarEntries.nextElement()
+
+ if (!entry.isDirectory()) {
+ val path = s"jar:file:$resourceJar!/${entry.getName}"
+ val url = new URL(path)
+ entries = url :: entries
+ }
+ }
+
+ entries
+ } getOrElse (Nil)
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala b/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala
new file mode 100644
index 000000000..2d4c7abcf
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/DottyDoc.scala
@@ -0,0 +1,79 @@
+package dotty.tools
+package dottydoc
+
+import core._
+import core.transform._
+import dotc.config.CompilerCommand
+import dotc.config.Printers.dottydoc
+import dotc.core.Contexts._
+import dotc.core.Phases.Phase
+import dotc.typer.FrontEnd
+import dotc.{ CompilationUnit, Compiler, Driver, Run }
+import io.PlainFile
+import model.Package
+import model.json._
+
+import _root_.java.util.{ Map => JMap }
+
+/** Custom Compiler with phases for the documentation tool
+ *
+ * The idea here is to structure `dottydoc` around the new infrastructure. As
+ * such, dottydoc will itself be a compiler. It will, however, produce a format
+ * that can be used by other tools or web-browsers.
+ *
+ * Example:
+ * 1. Use the existing FrontEnd to typecheck the code being fed to dottydoc
+ * 2. Create an AST that is serializable
+ * 3. Serialize to JS object
+ */
+class DocCompiler extends Compiler {
+ override def phases: List[List[Phase]] = List(
+ List(new DocFrontEnd),
+ List(new DocImplicitsPhase),
+ List(new DocASTPhase),
+ List(DocMiniTransformations(new LinkReturnTypes,
+ new LinkParamListTypes,
+ new LinkImplicitlyAddedTypes,
+ new LinkSuperTypes,
+ new AlternateConstructors,
+ new SortMembers))
+ )
+}
+
+class DocFrontEnd extends FrontEnd {
+ override protected def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
+ unit.isJava
+}
+
+abstract class DocDriver extends Driver {
+ import scala.collection.JavaConverters._
+
+ override def setup(args: Array[String], rootCtx: Context): (List[String], Context) = {
+ val ctx = rootCtx.fresh
+ val summary = CompilerCommand.distill(args)(ctx)
+
+ ctx.setSettings(summary.sstate)
+ ctx.setSetting(ctx.settings.YkeepComments, true)
+
+ val fileNames = CompilerCommand.checkUsage(summary, sourcesRequired)(ctx)
+ (fileNames, ctx)
+ }
+
+ override def newCompiler(implicit ctx: Context): Compiler = new DocCompiler
+
+ def compiledDocs(args: Array[String]): collection.Map[String, Package] = {
+ val (fileNames, ctx) = setup(args, initCtx.fresh)
+ doCompile(newCompiler(ctx), fileNames)(ctx)
+
+ ctx.docbase.packages[Package]
+ }
+
+ def compiledDocsJava(args: Array[String]): JMap[String, Package] =
+ compiledDocs(args).asJava
+
+ def indexToJson(index: collection.Map[String, Package]): String =
+ index.json
+
+ def indexToJsonJava(index: JMap[String, Package]): String =
+ indexToJson(index.asScala)
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java b/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java
new file mode 100644
index 000000000..1bdfe0488
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/api/java/Dottydoc.java
@@ -0,0 +1,63 @@
+package dotty.tools.dottydoc.api.java;
+
+import dotty.tools.dottydoc.DocDriver;
+import dotty.tools.dottydoc.model.Package;
+import dotty.tools.dottydoc.util.OutputWriter;
+import java.util.Map;
+import java.util.List;
+import java.net.URL;
+
+/**
+ * The Dottydoc API is fairly simple. The tool creates an index by calling:
+ * "createIndex" with the same argument list as you would the compiler - e.g:
+ *
+ * {{{
+ * String[] array = {
+ * "-language:Scala2"
+ * };
+ *
+ * Map<String, Package> index = createIndex(array);
+ * }}}
+ *
+ * Once the index has been generated, the tool can also build a documentation
+ * API given a Mustache template and a flat resources structure (i.e. absolute
+ * paths to each resource, which will be put in the same directory).
+ *
+ * {{{
+ * buildDocs("path/to/output/dir", templateURL, resources, index);
+ * }}}
+ *
+ * The tool can also generate JSON from the created index using "toJson(index)"
+ * or directly using "createJsonIndex"
+ */
+public class Dottydoc extends DocDriver {
+
+ /** Creates index from compiler arguments */
+ public Map<String, Package> createIndex(String[] args) {
+ return compiledDocsJava(args);
+ }
+
+ /** Creates JSON from compiler arguments */
+ public String createJsonIndex(String[] args) {
+ return indexToJsonJava(createIndex(args));
+ }
+
+ public String toJson(Map<String, Package> index) {
+ return indexToJsonJava(index);
+ }
+
+ /** Creates a documentation from the given parameters */
+ public void buildDocs(
+ String outputDir,
+ URL template,
+ List<URL> resources,
+ Map<String, Package> index
+ ) {
+ new OutputWriter().writeJava(index, outputDir, template, resources);
+ }
+
+ /** Writes JSON to an output directory as "index.json" */
+ public void writeJson(Map<String, Package> index, String outputDir) {
+ new OutputWriter().writeJsonJava(index, outputDir);
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala b/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala
new file mode 100644
index 000000000..15db81a95
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/api/scala/Dottydoc.scala
@@ -0,0 +1,49 @@
+package dotty.tools.dottydoc.api.scala
+
+import dotty.tools.dottydoc.DocDriver
+import dotty.tools.dottydoc.model.Package
+import dotty.tools.dottydoc.util.OutputWriter
+
+import scala.collection.Map
+import java.net.URL
+
+/**
+ * The Dottydoc API is fairly simple. The tool creates an index by calling:
+ * "createIndex" with the same argument list as you would the compiler - e.g:
+ *
+ * {{{
+ * val array: Array[String] = Array(
+ * "-language:Scala2"
+ * )
+ *
+ * val index: Map[String, Package] = createIndex(array)
+ * }}}
+ *
+ * Once the index has been generated, the tool can also build a documentation
+ * API given a Mustache template and a flat resources structure (i.e. absolute
+ * paths to each resource, which will be put in the same directory).
+ *
+ * {{{
+ * buildDocs("path/to/output/dir", templateURL, resources, index)
+ * }}}
+ *
+ * The tool can also generate JSON from the created index using "indexToJson"
+ * or directly using "createJsonIndex"
+ */
+trait Dottydoc extends DocDriver {
+ /** Creates index from compiler arguments */
+ def createIndex(args: Array[String]): Map[String, Package] =
+ compiledDocs(args)
+
+ /** Creates JSON from compiler arguments */
+ def createJsonIndex(args: Array[String]): String =
+ indexToJson(compiledDocs(args))
+
+ /** Creates a documentation from the given parameters */
+ def buildDocs(outDir: String, template: URL, resources: List[URL], index: Map[String, Package]) =
+ new OutputWriter().write(index, outDir, template, resources)
+
+ /** Writes JSON to an output directory as "index.json" */
+ def writeJson(index: Map[String, Package], outputDir: String) =
+ new OutputWriter().writeJson(index, outputDir)
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala
new file mode 100644
index 000000000..53c96fc87
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/AlternateConstructorsPhase.scala
@@ -0,0 +1,34 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotc.core.Contexts.Context
+
+import transform.DocMiniPhase
+import model._
+import model.internal._
+
+/** This DocMiniPhase adds the alternate constructors, currently defined as
+ * methods with the name `<init>`, to the Entity#constructors list
+ */
+class AlternateConstructors extends DocMiniPhase {
+ def partitionMembers(ent: Entity with Constructors with Members): (List[List[ParamList]], List[Entity]) = {
+ val (constructors, members) = ent.members.partition(x => x.name == "<init>")
+
+ val paramLists: List[List[ParamList]] = constructors.collect {
+ case df: Def => df.paramLists
+ }
+
+ (ent.constructors ++ paramLists, members)
+ }
+
+ override def transformClass(implicit ctx: Context) = { case cls: ClassImpl =>
+ val (constructors, members) = partitionMembers(cls)
+ cls.copy(members = members, constructors = constructors)
+ }
+
+ override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl =>
+ val (constructors, members) = partitionMembers(cc)
+ cc.copy(members = members, constructors = constructors)
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala
new file mode 100644
index 000000000..7744752ce
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/DocASTPhase.scala
@@ -0,0 +1,191 @@
+package dotty.tools
+package dottydoc
+package core
+
+/** Dotty and Dottydoc imports */
+import dotc.ast.Trees._
+import dotc.CompilationUnit
+import dotc.config.Printers.dottydoc
+import dotc.core.Contexts.Context
+import dotc.core.Phases.Phase
+import dotc.core.Symbols.{ Symbol, NoSymbol }
+
+class DocASTPhase extends Phase {
+ import model._
+ import model.factories._
+ import model.internal._
+ import model.parsers.WikiParser
+ import model.comment.Comment
+ import dotty.tools.dotc.core.Flags
+ import dotty.tools.dotc.ast.tpd._
+ import util.traversing._
+ import util.internal.setters._
+
+ def phaseName = "docphase"
+
+ private[this] val commentParser = new WikiParser
+
+ /** Saves the commentParser function for later evaluation, for when the AST has been filled */
+ def track(symbol: Symbol, ctx: Context, parent: Symbol = NoSymbol)(op: => Entity) = {
+ val entity = op
+
+ if (entity != NonEntity)
+ commentParser += (entity, symbol, parent, ctx)
+
+ entity
+ }
+
+ /** Build documentation hierarchy from existing tree */
+ def collect(tree: Tree, prev: List[String] = Nil)(implicit ctx: Context): Entity = track(tree.symbol, ctx) {
+ val implicitConversions = ctx.docbase.defs(tree.symbol)
+
+ def collectList(xs: List[Tree], ps: List[String]): List[Entity] =
+ xs.map(collect(_, ps)).filter(_ != NonEntity)
+
+ def collectEntityMembers(xs: List[Tree], ps: List[String]) =
+ collectList(xs, ps).asInstanceOf[List[Entity with Members]]
+
+ def collectMembers(tree: Tree, ps: List[String] = prev)(implicit ctx: Context): List[Entity] = {
+ val defs = (tree match {
+ case t: Template => collectList(t.body, ps)
+ case _ => Nil
+ })
+
+ defs ++ implicitConversions.flatMap(membersFromSymbol)
+ }
+
+ def membersFromSymbol(sym: Symbol): List[Entity] = {
+ val defs = sym.info.bounds.hi.membersBasedOnFlags(Flags.Method, Flags.Synthetic | Flags.Private)
+ .filterNot(_.symbol.owner.name.show == "Any")
+ .map { meth =>
+ track(meth.symbol, ctx, tree.symbol) {
+ DefImpl(
+ meth.symbol.name.show,
+ Nil,
+ path(meth.symbol),
+ returnType(meth.info),
+ typeParams(meth.symbol),
+ paramLists(meth.info),
+ implicitlyAddedFrom = Some(returnType(meth.symbol.owner.info))
+ )
+ }
+ }.toList
+
+ val vals = sym.info.fields.filterNot(_.symbol.is(Flags.Private | Flags.Synthetic)).map { value =>
+ track(value.symbol, ctx, tree.symbol) {
+ ValImpl(
+ value.symbol.name.show,
+ Nil, path(value.symbol),
+ returnType(value.info),
+ implicitlyAddedFrom = Some(returnType(value.symbol.owner.info))
+ )
+ }
+ }
+
+ defs ++ vals
+ }
+
+
+ tree match {
+ /** package */
+ case pd @ PackageDef(pid, st) =>
+ val newPath = prev :+ pid.name.toString
+ addEntity(PackageImpl(newPath.mkString("."), collectEntityMembers(st, newPath), newPath))
+
+ /** trait */
+ case t @ TypeDef(n, rhs) if t.symbol.is(Flags.Trait) =>
+ val name = n.decode.toString
+ val newPath = prev :+ name
+ //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well
+ TraitImpl(name, collectMembers(rhs), flags(t), newPath, typeParams(t.symbol), traitParameters(t.symbol), superTypes(t))
+
+ /** objects, on the format "Object$" so drop the last letter */
+ case o @ TypeDef(n, rhs) if o.symbol.is(Flags.Module) =>
+ val name = n.decode.toString.dropRight(1)
+ //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well
+ ObjectImpl(name, collectMembers(rhs, prev :+ name), flags(o), prev :+ (name + "$"), superTypes(o))
+
+ /** class / case class */
+ case c @ TypeDef(n, rhs) if c.symbol.isClass =>
+ val name = n.decode.toString
+ val newPath = prev :+ name
+ //TODO: should not `collectMember` from `rhs` - instead: get from symbol, will get inherited members as well
+ (name, collectMembers(rhs), flags(c), newPath, typeParams(c.symbol), constructors(c.symbol), superTypes(c), None) match {
+ case x if c.symbol.is(Flags.CaseClass) => CaseClassImpl.tupled(x)
+ case x => ClassImpl.tupled(x)
+ }
+
+ /** def */
+ case d: DefDef =>
+ DefImpl(d.name.decode.toString, flags(d), path(d.symbol), returnType(d.tpt.tpe), typeParams(d.symbol), paramLists(d.symbol.info))
+
+ /** val */
+ case v: ValDef if !v.symbol.is(Flags.ModuleVal) =>
+ ValImpl(v.name.decode.toString, flags(v), path(v.symbol), returnType(v.tpt.tpe))
+
+ case x => {
+ //dottydoc.println(s"Found unwanted entity: $x (${x.pos},\n${x.show}")
+ NonEntity
+ }
+ }
+ }
+
+ var packages: Map[String, Package] = Map.empty
+
+ def addEntity(p: Package): Package = {
+ def mergedChildren(x1s: List[Entity], x2s: List[Entity]): List[Entity] = {
+ val (packs1, others1) = x1s.partition(_.kind == "package")
+ val (packs2, others2) = x2s.partition(_.kind == "package")
+
+ val others = others1 ::: others2
+ val packs = (packs1 ::: packs2).groupBy(_.path).map(_._2.head)
+
+ (others ++ packs).sortBy(_.name)
+ }
+
+ val path = p.path.mkString(".")
+ val newPack = packages.get(path).map {
+ case ex: PackageImpl =>
+ if (!ex.comment.isDefined) ex.comment = p.comment
+ ex.members = mergedChildren(ex.members, p.members)
+ ex
+ }.getOrElse(p)
+
+ packages = packages + (path -> newPack)
+ newPack
+ }
+
+ private[this] var totalRuns = 0
+ private[this] var currentRun = 0
+
+ override def run(implicit ctx: Context): Unit = {
+ currentRun += 1
+ println(s"Compiling ($currentRun/$totalRuns): ${ctx.compilationUnit.source.file.name}")
+ collect(ctx.compilationUnit.tpdTree) // Will put packages in `packages` var
+ }
+
+ override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
+ // (1) Create package structure for all `units`, this will give us a complete structure
+ totalRuns = units.length
+ val compUnits = super.runOn(units)
+
+ // (2) Set parents of entities, needed for linking
+ for {
+ parent <- packages.values
+ child <- parent.children
+ } setParent(child, to = parent)
+
+ // (3) Create documentation template from docstrings, with internal links
+ println("Generating documentation, this might take a while...")
+ commentParser.parse(packages)
+
+ // (4) Clear caches
+ commentParser.clear()
+
+ // (5) Update Doc AST in ctx.base
+ for (kv <- packages) ctx.docbase.packages += kv
+
+ // Return super's result
+ compUnits
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala
new file mode 100644
index 000000000..f322d7a5a
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/DocImplicitsPhase.scala
@@ -0,0 +1,27 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotty.tools.dotc.transform.TreeTransforms.{ MiniPhaseTransform, TransformerInfo }
+import dotty.tools.dotc.core.Flags
+import dotc.core.Contexts.Context
+
+class DocImplicitsPhase extends MiniPhaseTransform { thisTransformer =>
+ import dotty.tools.dotc.ast.tpd._
+
+ def phaseName = "addImplicitsPhase"
+
+ override def transformDefDef(tree: DefDef)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (
+ tree.symbol.is(Flags.Implicit) && // has to have an implicit flag
+ tree.symbol.owner.isStaticOwner && // owner has to be static (e.g. top-level `object`)
+ tree.vparamss.length > 0 &&
+ tree.vparamss(0).length == 1 // should only take one arg, since it has to be a transformation
+ ) {
+ val convertee = tree.vparamss(0)(0).symbol.info.widenDealias.finalResultType.typeSymbol // the pimped type (i.e. `class`)
+ ctx.docbase.addDef(convertee, tree.symbol.info.widenDealias.finalResultType.typeSymbol)
+ }
+
+ tree
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala b/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala
new file mode 100644
index 000000000..2690ac7b7
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/MiniPhaseTransform.scala
@@ -0,0 +1,199 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotc.CompilationUnit
+import dotc.core.Contexts.Context
+import dotc.core.Phases.Phase
+import model._
+import model.internal._
+
+object transform {
+ /**
+ * The idea behind DocMiniTransformations is to fuse transformations to the
+ * doc AST, much like `MiniPhaseTransform` in dotty core - but in a much more
+ * simple implementation
+ *
+ * Usage
+ * -----
+ *
+ * Create a `DocMiniPhase` which overrides the relevant method:
+ *
+ * {{{
+ * override def transformDef(implicit ctx: Context) = {
+ * case x if shouldTransform(x) => x.copy(newValue = ...)
+ * }
+ * }}}
+ *
+ * On each node in the AST, the appropriate method in `DocMiniPhase` will be
+ * called in the order that they are supplied in
+ * `DocMiniphaseTransformations`.
+ *
+ * There won't be a match-error as `transformX` is composed with an
+ * `identity` function.
+ *
+ * The transformations in `DocMiniTransformations` will apply transformations
+ * to all nodes - this means that you do _not_ need to transform children in
+ * `transformPackage`, because `transformX` will be called for the relevant
+ * children. If you want to add children to `Package` you need to do that in
+ * `transformPackage`, these additions will be persisted.
+ */
+ abstract class DocMiniTransformations(transformations: List[DocMiniPhase]) extends Phase {
+
+ override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
+ for {
+ rootName <- rootPackages
+ pack = ctx.docbase.packages[Package](rootName)
+ transformed = performPackageTransform(pack)
+ } yield ctx.docbase.packages(rootName) = transformed
+ super.runOn(units)
+ }
+
+ private def rootPackages(implicit ctx: Context): List[String] = {
+ var currentDepth = Int.MaxValue
+ var packs = List.empty[String]
+
+ for (key <- ctx.docbase.packages.keys) {
+ val keyDepth = key.split("\\.").length
+ packs =
+ if (keyDepth < currentDepth) {
+ currentDepth = keyDepth
+ key :: Nil
+ } else if (keyDepth == currentDepth) {
+ key :: packs
+ } else packs
+ }
+ packs
+ }
+
+ private def performPackageTransform(pack: Package)(implicit ctx: Context): Package = {
+ def transformEntity[E <: Entity](e: E, f: DocMiniPhase => E => E)(createNew: E => E): E = {
+ val transformedEntity = transformations.foldLeft(e) { case (oldE, transf) =>
+ f(transf)(oldE)
+ }
+ createNew(transformedEntity)
+ }
+
+ def traverse(ent: Entity): Entity = ent match {
+ case p: Package => transformEntity(p, _.packageTransformation) { p =>
+ val newPackage = PackageImpl(
+ p.name,
+ p.members.map(traverse),
+ p.path,
+ p.comment
+ )
+
+ // Update reference in context to newPackage
+ ctx.docbase.packages[Package] += (newPackage.path.mkString(".") -> newPackage)
+
+ newPackage
+ }
+ case c: Class => transformEntity(c, _.classTransformation) { cls =>
+ ClassImpl(
+ cls.name,
+ cls.members.map(traverse),
+ cls.modifiers,
+ cls.path,
+ cls.typeParams,
+ cls.constructors,
+ cls.superTypes,
+ cls.comment
+ )
+ }
+ case cc: CaseClass => transformEntity(cc, _.caseClassTransformation) { cc =>
+ CaseClassImpl(
+ cc.name,
+ cc.members.map(traverse),
+ cc.modifiers,
+ cc.path,
+ cc.typeParams,
+ cc.constructors,
+ cc.superTypes,
+ cc.comment
+ )
+ }
+ case trt: Trait => transformEntity(trt, _.traitTransformation) { trt =>
+ TraitImpl(
+ trt.name,
+ trt.members.map(traverse),
+ trt.modifiers,
+ trt.path,
+ trt.typeParams,
+ trt.traitParams,
+ trt.superTypes,
+ trt.comment
+ )
+ }
+ case obj: Object => transformEntity(obj, _.objectTransformation) { obj =>
+ ObjectImpl(
+ obj.name,
+ obj.members.map(traverse),
+ obj.modifiers,
+ obj.path,
+ obj.superTypes,
+ obj.comment
+ )
+ }
+ case df: Def => transformEntity(df, _.defTransformation) { df =>
+ DefImpl(
+ df.name,
+ df.modifiers,
+ df.path,
+ df.returnValue,
+ df.typeParams,
+ df.paramLists,
+ df.comment,
+ df.implicitlyAddedFrom
+ )
+ }
+ case vl: Val => transformEntity(vl, _.valTransformation) { vl =>
+ ValImpl(
+ vl.name,
+ vl.modifiers,
+ vl.path,
+ vl.returnValue,
+ vl.comment,
+ vl.implicitlyAddedFrom
+ )
+ }
+ }
+
+ traverse(pack).asInstanceOf[Package]
+ }
+
+ override def run(implicit ctx: Context): Unit = ()
+ }
+
+ object DocMiniTransformations {
+ private var previousPhase = 0
+ def apply(transformations: DocMiniPhase*) =
+ new DocMiniTransformations(transformations.toList) {
+ val packages = Map.empty[String, Package]
+
+ def phaseName = s"MiniTransformation${ previousPhase += 1 }"
+ }
+ }
+
+ trait DocMiniPhase { phase =>
+ private def identity[E]: PartialFunction[E, E] = {
+ case id => id
+ }
+
+ // Partial functions instead????
+ def transformPackage(implicit ctx: Context): PartialFunction[Package, Package] = identity
+ def transformClass(implicit ctx: Context): PartialFunction[Class, Class] = identity
+ def transformCaseClass(implicit ctx: Context): PartialFunction[CaseClass, CaseClass] = identity
+ def transformTrait(implicit ctx: Context): PartialFunction[Trait, Trait] = identity
+ def transformObject(implicit ctx: Context): PartialFunction[Object, Object] = identity
+ def transformDef(implicit ctx: Context): PartialFunction[Def, Def] = identity
+ def transformVal(implicit ctx: Context): PartialFunction[Val, Val] = identity
+
+ private[transform] def packageTransformation(p: Package)(implicit ctx: Context) = (transformPackage orElse identity)(p)
+ private[transform] def classTransformation(cls: Class)(implicit ctx: Context) = (transformClass orElse identity)(cls)
+ private[transform] def caseClassTransformation(cc: CaseClass)(implicit ctx: Context) = (transformCaseClass orElse identity)(cc)
+ private[transform] def traitTransformation(trt: Trait)(implicit ctx: Context) = (transformTrait orElse identity)(trt)
+ private[transform] def objectTransformation(obj: Object)(implicit ctx: Context) = (transformObject orElse identity)(obj)
+ private[transform] def defTransformation(df: Def)(implicit ctx: Context) = (transformDef orElse identity)(df)
+ private[transform] def valTransformation(vl: Val)(implicit ctx: Context) = (transformVal orElse identity)(vl)
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala b/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala
new file mode 100644
index 000000000..c8de532bb
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/SortMembersPhase.scala
@@ -0,0 +1,32 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotc.core.Contexts.Context
+
+import transform.DocMiniPhase
+import model._
+import model.internal._
+
+/** This DocMiniPhase sorts the members of all classes, traits, objects and packages */
+class SortMembers extends DocMiniPhase {
+ override def transformPackage(implicit ctx: Context) = { case p: PackageImpl =>
+ p.copy(members = p.members.sortBy(_.name))
+ }
+
+ override def transformClass(implicit ctx: Context) = { case c: ClassImpl =>
+ c.copy(members = c.members.sortBy(_.name))
+ }
+
+ override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl =>
+ cc.copy(members = cc.members.sortBy(_.name))
+ }
+
+ override def transformTrait(implicit ctx: Context) = { case t: TraitImpl =>
+ t.copy(members = t.members.sortBy(_.name))
+ }
+
+ override def transformObject(implicit ctx: Context) = { case o: ObjectImpl =>
+ o.copy(members = o.members.sortBy(_.name))
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala b/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala
new file mode 100644
index 000000000..ae07effa9
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/core/TypeLinkingPhases.scala
@@ -0,0 +1,115 @@
+package dotty.tools
+package dottydoc
+package core
+
+import dotc.core.Contexts.Context
+import dotc.util.Positions.NoPosition
+
+import transform.DocMiniPhase
+import model._
+import model.internal._
+import model.comment._
+import model.references._
+import BodyParsers._
+import util.MemberLookup
+import util.traversing._
+import util.internal.setters._
+
+class LinkReturnTypes extends DocMiniPhase with TypeLinker {
+ override def transformDef(implicit ctx: Context) = { case df: DefImpl =>
+ val returnValue = linkReference(df, df.returnValue, ctx.docbase.packages[Package].toMap)
+ df.copy(returnValue = returnValue)
+ }
+
+ override def transformVal(implicit ctx: Context) = { case vl: ValImpl =>
+ val returnValue = linkReference(vl, vl.returnValue, ctx.docbase.packages[Package].toMap)
+ vl.copy(returnValue = returnValue)
+ }
+}
+
+class LinkParamListTypes extends DocMiniPhase with TypeLinker {
+ override def transformDef(implicit ctx: Context) = { case df: DefImpl =>
+ val newParamLists = for {
+ ParamListImpl(list, isImplicit) <- df.paramLists
+ newList = list.map(linkReference(df, _, ctx.docbase.packages[Package].toMap))
+ } yield ParamListImpl(newList.asInstanceOf[List[NamedReference]], isImplicit)
+
+ df.copy(paramLists = newParamLists)
+ }
+}
+
+class LinkSuperTypes extends DocMiniPhase with TypeLinker {
+ def linkSuperTypes(ent: Entity with SuperTypes)(implicit ctx: Context): List[MaterializableLink] =
+ ent.superTypes.collect {
+ case UnsetLink(title, query) =>
+ val packages = ctx.docbase.packages[Package].toMap
+ val entityLink = makeEntityLink(ent, packages, Text(title), NoPosition, query).link
+ handleEntityLink(title, entityLink, ent)
+ }
+
+ override def transformClass(implicit ctx: Context) = { case cls: ClassImpl =>
+ cls.copy(superTypes = linkSuperTypes(cls))
+ }
+
+ override def transformCaseClass(implicit ctx: Context) = { case cc: CaseClassImpl =>
+ cc.copy(superTypes = linkSuperTypes(cc))
+ }
+
+ override def transformTrait(implicit ctx: Context) = { case trt: TraitImpl =>
+ trt.copy(superTypes = linkSuperTypes(trt))
+ }
+
+ override def transformObject(implicit ctx: Context) = { case obj: ObjectImpl =>
+ obj.copy(superTypes = linkSuperTypes(obj))
+ }
+}
+
+class LinkImplicitlyAddedTypes extends DocMiniPhase with TypeLinker {
+ override def transformDef(implicit ctx: Context) = {
+ case df: DefImpl if df.implicitlyAddedFrom.isDefined =>
+ val implicitlyAddedFrom = linkReference(df, df.implicitlyAddedFrom.get, ctx.docbase.packages[Package].toMap)
+ df.copy(implicitlyAddedFrom = Some(implicitlyAddedFrom))
+ }
+
+ override def transformVal(implicit ctx: Context) = {
+ case vl: ValImpl if vl.implicitlyAddedFrom.isDefined =>
+ val implicitlyAddedFrom = linkReference(vl, vl.implicitlyAddedFrom.get, ctx.docbase.packages[Package].toMap)
+ vl.copy(implicitlyAddedFrom = Some(implicitlyAddedFrom))
+ }
+}
+
+trait TypeLinker extends MemberLookup {
+ def handleEntityLink(title: String, lt: LinkTo, ent: Entity): MaterializableLink = lt match {
+ case Tooltip(str) => NoLink(title, str)
+ case LinkToExternal(_, url) => MaterializedLink(title, url)
+ case LinkToEntity(target) => MaterializedLink(title, util.traversing.relativePath(ent, target))
+ }
+
+ def linkReference(ent: Entity, ref: Reference, packs: Map[String, Package]): Reference = {
+ def linkRef(ref: Reference) = linkReference(ent, ref, packs)
+
+ ref match {
+ case ref @ TypeReference(_, UnsetLink(t, query), tps) =>
+ val inlineToHtml = InlineToHtml(ent)
+ val title = t
+
+ val target = handleEntityLink(title, makeEntityLink(ent, packs, Text(t), NoPosition, query).link, ent)
+ val tpTargets = tps.map(linkReference(ent, _, packs))
+ ref.copy(tpeLink = target, paramLinks = tpTargets)
+ case ref @ OrTypeReference(left, right) =>
+ ref.copy(left = linkReference(ent, left, packs), right = linkReference(ent, right, packs))
+ case ref @ AndTypeReference(left, right) =>
+ ref.copy(left = linkReference(ent, left, packs), right = linkReference(ent, right, packs))
+ case ref @ NamedReference(_, rf, _, _) =>
+ ref.copy(ref = linkRef(rf))
+ case ref @ FunctionReference(args, rv) =>
+ ref.copy(args = args.map(linkReference(ent, _, packs)), returnValue = linkReference(ent, rv, packs))
+ case ref @ TupleReference(args) =>
+ ref.copy(args = args.map(linkRef))
+ case ref @ BoundsReference(low, high) =>
+ ref.copy(low = linkRef(low), high = linkRef(high))
+ case _ =>
+ ref
+ }
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala
new file mode 100644
index 000000000..29fe48de3
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyEntities.scala
@@ -0,0 +1,94 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+import scala.collection._
+
+/** A body of text. A comment has a single body, which is composed of
+ * at least one block. Inside every body is exactly one summary (see
+ * [[scala.tools.nsc.doc.model.comment.Summary]]). */
+final case class Body(blocks: Seq[Block]) {
+
+ /** The summary text of the comment body. */
+ lazy val summary: Option[Body] = {
+ def summaryInBlock(block: Block): Seq[Inline] = block match {
+ case Title(text, _) => summaryInInline(text)
+ case Paragraph(text) => summaryInInline(text)
+ case UnorderedList(items) => items flatMap summaryInBlock
+ case OrderedList(items, _) => items flatMap summaryInBlock
+ case DefinitionList(items) => items.values.toSeq flatMap summaryInBlock
+ case _ => Nil
+ }
+ def summaryInInline(text: Inline): Seq[Inline] = text match {
+ case Summary(text) => List(text)
+ case Chain(items) => items flatMap summaryInInline
+ case Italic(text) => summaryInInline(text)
+ case Bold(text) => summaryInInline(text)
+ case Underline(text) => summaryInInline(text)
+ case Superscript(text) => summaryInInline(text)
+ case Subscript(text) => summaryInInline(text)
+ case Link(_, title) => summaryInInline(title)
+ case _ => Nil
+ }
+ (blocks flatMap summaryInBlock).toList match {
+ case Nil => None
+ case inline :: Nil => Some(Body(Seq(Paragraph(inline))))
+ case inlines => Some(Body(Seq(Paragraph(Chain(inlines)))))
+ }
+ }
+}
+
+/** A block-level element of text, such as a paragraph or code block. */
+sealed abstract class Block
+
+final case class Title(text: Inline, level: Int) extends Block
+final case class Paragraph(text: Inline) extends Block
+final case class Code(data: String) extends Block
+final case class UnorderedList(items: Seq[Block]) extends Block
+final case class OrderedList(items: Seq[Block], style: String) extends Block
+final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block
+final case class HorizontalRule() extends Block
+
+/** An section of text inside a block, possibly with formatting. */
+sealed abstract class Inline
+
+final case class Chain(items: Seq[Inline]) extends Inline
+final case class Italic(text: Inline) extends Inline
+final case class Bold(text: Inline) extends Inline
+final case class Underline(text: Inline) extends Inline
+final case class Superscript(text: Inline) extends Inline
+final case class Subscript(text: Inline) extends Inline
+final case class Link(target: String, title: Inline) extends Inline
+final case class Monospace(text: Inline) extends Inline
+final case class Text(text: String) extends Inline
+abstract class EntityLink(val title: Inline) extends Inline { def link: LinkTo }
+object EntityLink {
+ def apply(title: Inline, linkTo: LinkTo) = new EntityLink(title) { def link: LinkTo = linkTo }
+ def unapply(el: EntityLink): Option[(Inline, LinkTo)] = Some((el.title, el.link))
+}
+final case class HtmlTag(data: String) extends Inline {
+ private val Pattern = """(?ms)\A<(/?)(.*?)[\s>].*\z""".r
+ private val (isEnd, tagName) = data match {
+ case Pattern(s1, s2) =>
+ (! s1.isEmpty, Some(s2.toLowerCase))
+ case _ =>
+ (false, None)
+ }
+
+ def canClose(open: HtmlTag) = {
+ isEnd && tagName == open.tagName
+ }
+
+ private val TagsNotToClose = Set("br", "img")
+ def close = tagName collect { case name if !TagsNotToClose(name) => HtmlTag(s"</$name>") }
+}
+
+/** The summary of a comment, usually its first sentence. There must be exactly one summary per body. */
+final case class Summary(text: Inline) extends Inline
+
+sealed trait LinkTo
+final case class LinkToExternal(name: String, url: String) extends LinkTo
+final case class Tooltip(name: String) extends LinkTo
+
+/** Linking directly to entities is not picklable because of cyclic references */
+final case class LinkToEntity(entity: Entity) extends LinkTo
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala
new file mode 100644
index 000000000..8c1fa8d49
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/BodyParsers.scala
@@ -0,0 +1,82 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+object BodyParsers {
+
+ implicit class BodyToHtml(val body: Body) extends AnyVal {
+ def toHtml(origin: Entity): String = {
+ val inlineToHtml = InlineToHtml(origin)
+
+ def bodyToHtml(body: Body): String =
+ (body.blocks map blockToHtml).mkString
+
+ def blockToHtml(block: Block): String = block match {
+ case Title(in, 1) => s"<h1>${inlineToHtml(in)}</h1>"
+ case Title(in, 2) => s"<h2>${inlineToHtml(in)}</h2>"
+ case Title(in, 3) => s"<h3>${inlineToHtml(in)}</h3>"
+ case Title(in, _) => s"<h4>${inlineToHtml(in)}</h4>"
+ case Paragraph(in) => s"<p>${inlineToHtml(in)}</p>"
+ case Code(data) => s"""<pre><code class="scala">$data</code></pre>"""
+ case UnorderedList(items) =>
+ s"<ul>${listItemsToHtml(items)}</ul>"
+ case OrderedList(items, listStyle) =>
+ s"<ol class=${listStyle}>${listItemsToHtml(items)}</ol>"
+ case DefinitionList(items) =>
+ s"<dl>${items map { case (t, d) => s"<dt>${inlineToHtml(t)}</dt><dd>${blockToHtml(d)}</dd>" } }</dl>"
+ case HorizontalRule() =>
+ "<hr/>"
+ }
+
+ def listItemsToHtml(items: Seq[Block]) =
+ items.foldLeft(""){ (list, item) =>
+ item match {
+ case OrderedList(_, _) | UnorderedList(_) => // html requires sub ULs to be put into the last LI
+ list + s"<li>${blockToHtml(item)}</li>"
+ case Paragraph(inline) =>
+ list + s"<li>${inlineToHtml(inline)}</li>" // LIs are blocks, no need to use Ps
+ case block =>
+ list + s"<li>${blockToHtml(block)}</li>"
+ }
+ }
+
+ bodyToHtml(body)
+ }
+ }
+
+ case class InlineToHtml(origin: Entity) {
+ def apply(inline: Inline) = toHtml(inline)
+
+ def relativePath(target: Entity) =
+ util.traversing.relativePath(origin, target)
+
+ def toHtml(inline: Inline): String = inline match {
+ case Chain(items) => (items map toHtml).mkString
+ case Italic(in) => s"<i>${toHtml(in)}</i>"
+ case Bold(in) => s"<b>${toHtml(in)}</b>"
+ case Underline(in) => s"<u>${toHtml(in)}</u>"
+ case Superscript(in) => s"<sup>${toHtml(in)}</sup>"
+ case Subscript(in) => s"<sub>${toHtml(in) }</sub>"
+ case Link(raw, title) => s"""<a href=$raw target="_blank">${toHtml(title)}</a>"""
+ case Monospace(in) => s"<code>${toHtml(in)}</code>"
+ case Text(text) => text
+ case Summary(in) => toHtml(in)
+ case HtmlTag(tag) => tag
+ case EntityLink(target, link) => enityLinkToHtml(target, link)
+ }
+
+ def enityLinkToHtml(target: Inline, link: LinkTo) = link match {
+ case Tooltip(_) => toHtml(target)
+ case LinkToExternal(n, url) => s"""<a href="$url">$n</a>"""
+ case LinkToEntity(t: Entity) => t match {
+ // Entity is a package member
+ case e: Entity with Members =>
+ s"""<a href="${relativePath(t)}">${toHtml(target)}</a>"""
+ // Entity is a Val / Def
+ case x => x.parent.fold(toHtml(target)) { xpar =>
+ s"""<a href="${relativePath(xpar)}#${x.name}">${toHtml(target)}</a>"""
+ }
+ }
+ }
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala
new file mode 100644
index 000000000..c4f6ccf5d
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/Comment.scala
@@ -0,0 +1,28 @@
+package dotty.tools
+package dottydoc
+package model
+package comment
+
+case class Comment (
+ body: String,
+ short: String,
+ authors: List[String],
+ see: List[String],
+ result: Option[String],
+ throws: Map[String, String],
+ valueParams: Map[String, String],
+ typeParams: Map[String, String],
+ version: Option[String],
+ since: Option[String],
+ todo: List[String],
+ deprecated: Option[String],
+ note: List[String],
+ example: List[String],
+ constructor: Option[String],
+ group: Option[String],
+ groupDesc: Map[String, String],
+ groupNames: Map[String, String],
+ groupPrio: Map[String, String],
+ /** List of conversions to hide - containing e.g: `scala.Predef.FloatArrayOps` */
+ hideImplicitConversions: List[String]
+)
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala
new file mode 100644
index 000000000..27b0ff977
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentCleaner.scala
@@ -0,0 +1,25 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+trait CommentCleaner {
+ import Regexes._
+
+ def clean(comment: String): List[String] = {
+ def cleanLine(line: String): String = {
+ // Remove trailing whitespaces
+ TrailingWhitespace.replaceAllIn(line, "") match {
+ case CleanCommentLine(ctl) => ctl
+ case tl => tl
+ }
+ }
+ val strippedComment = comment.trim.stripPrefix("/*").stripSuffix("*/")
+ val safeComment = DangerousTags.replaceAllIn(strippedComment, { htmlReplacement(_) })
+ val javadoclessComment = JavadocTags.replaceAllIn(safeComment, { javadocReplacement(_) })
+ val markedTagComment =
+ SafeTags.replaceAllIn(javadoclessComment, { mtch =>
+ _root_.java.util.regex.Matcher.quoteReplacement(safeTagMarker + mtch.matched + safeTagMarker)
+ })
+ markedTagComment.lines.toList map (cleanLine(_))
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala
new file mode 100644
index 000000000..32a0d8128
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentExpander.scala
@@ -0,0 +1,344 @@
+/*
+ * Port of DocComment.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+
+package dotty.tools
+package dottydoc
+package model
+package comment
+
+import dotc.config.Printers.dottydoc
+import dotc.core.Contexts.Context
+import dotc.core.Symbols._
+import dotc.core.Flags
+import dotc.util.Positions._
+
+import scala.collection.mutable
+
+trait CommentExpander {
+ import CommentUtils._
+
+ def expand(sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val parent = if (site != NoSymbol) site else sym
+ defineVariables(parent)
+ expandedDocComment(sym, parent)
+ }
+
+ /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
+ *
+ * @param sym The symbol for which doc comment is returned
+ * @param site The class for which doc comments are generated
+ * @throws ExpansionLimitExceeded when more than 10 successive expansions
+ * of the same string are done, which is
+ * interpreted as a recursive variable definition.
+ */
+ def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(implicit ctx: Context): String = {
+ // when parsing a top level class or module, use the (module-)class itself to look up variable definitions
+ val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym
+ else site
+ expandVariables(cookedDocComment(sym, docStr), sym, parent)
+ }
+
+ private def template(raw: String): String = {
+ val sections = tagIndex(raw)
+
+ val defines = sections filter { startsWithTag(raw, _, "@define") }
+ val usecases = sections filter { startsWithTag(raw, _, "@usecase") }
+
+ val end = startTag(raw, (defines ::: usecases).sortBy(_._1))
+
+ if (end == raw.length - 2) raw else raw.substring(0, end) + "*/"
+ }
+
+ def defines(raw: String): List[String] = {
+ val sections = tagIndex(raw)
+ val defines = sections filter { startsWithTag(raw, _, "@define") }
+ val usecases = sections filter { startsWithTag(raw, _, "@usecase") }
+ val end = startTag(raw, (defines ::: usecases).sortBy(_._1))
+
+ defines map { case (start, end) => raw.substring(start, end) }
+ }
+
+ private def replaceInheritDocToInheritdoc(docStr: String): String =
+ docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc")
+
+ /** The cooked doc comment of an overridden symbol */
+ protected def superComment(sym: Symbol)(implicit ctx: Context): Option[String] =
+ allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "")
+
+ private val cookedDocComments = mutable.HashMap[Symbol, String]()
+
+ /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
+ * missing sections of an inherited doc comment.
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the doc comment of the overridden version is copied instead.
+ */
+ def cookedDocComment(sym: Symbol, docStr: String = "")(implicit ctx: Context): String = cookedDocComments.getOrElseUpdate(sym, {
+ var ownComment =
+ if (docStr.length == 0) ctx.docbase.docstring(sym).map(c => template(c.chrs)).getOrElse("")
+ else template(docStr)
+ ownComment = replaceInheritDocToInheritdoc(ownComment)
+
+ superComment(sym) match {
+ case None =>
+ // SI-8210 - The warning would be false negative when this symbol is a setter
+ if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter)
+ dottydoc.println(s"${sym.pos}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.")
+ ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>")
+ case Some(sc) =>
+ if (ownComment == "") sc
+ else expandInheritdoc(sc, merge(sc, ownComment, sym), sym)
+ }
+ })
+
+ private def isMovable(str: String, sec: (Int, Int)): Boolean =
+ startsWithTag(str, sec, "@param") ||
+ startsWithTag(str, sec, "@tparam") ||
+ startsWithTag(str, sec, "@return")
+
+ def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = {
+ val srcSections = tagIndex(src)
+ val dstSections = tagIndex(dst)
+ val srcParams = paramDocs(src, "@param", srcSections)
+ val dstParams = paramDocs(dst, "@param", dstSections)
+ val srcTParams = paramDocs(src, "@tparam", srcSections)
+ val dstTParams = paramDocs(dst, "@tparam", dstSections)
+ val out = new StringBuilder
+ var copied = 0
+ var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
+
+ if (copyFirstPara) {
+ val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment
+ (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections)
+ out append src.substring(0, eop).trim
+ copied = 3
+ tocopy = 3
+ }
+
+ def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match {
+ case Some((start, end)) =>
+ if (end > tocopy) tocopy = end
+ case None =>
+ srcSec match {
+ case Some((start1, end1)) => {
+ out append dst.substring(copied, tocopy).trim
+ out append "\n"
+ copied = tocopy
+ out append src.substring(start1, end1).trim
+ }
+ case None =>
+ }
+ }
+
+ //TODO: enable this once you know how to get `sym.paramss`
+ /*
+ for (params <- sym.paramss; param <- params)
+ mergeSection(srcParams get param.name.toString, dstParams get param.name.toString)
+ for (tparam <- sym.typeParams)
+ mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString)
+
+ mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections))
+ mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections))
+ */
+
+ if (out.length == 0) dst
+ else {
+ out append dst.substring(copied)
+ out.toString
+ }
+ }
+
+ /**
+ * Expand inheritdoc tags
+ * - for the main comment we transform the inheritdoc into the super variable,
+ * and the variable expansion can expand it further
+ * - for the param, tparam and throws sections we must replace comments on the spot
+ *
+ * This is done separately, for two reasons:
+ * 1. It takes longer to run compared to merge
+ * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
+ * impacts performance
+ *
+ * @param parent The source (or parent) comment
+ * @param child The child (overriding member or usecase) comment
+ * @param sym The child symbol
+ * @return The child comment with the inheritdoc sections expanded
+ */
+ def expandInheritdoc(parent: String, child: String, sym: Symbol): String =
+ if (child.indexOf("@inheritdoc") == -1)
+ child
+ else {
+ val parentSections = tagIndex(parent)
+ val childSections = tagIndex(child)
+ val parentTagMap = sectionTagMap(parent, parentSections)
+ val parentNamedParams = Map() +
+ ("@param" -> paramDocs(parent, "@param", parentSections)) +
+ ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) +
+ ("@throws" -> paramDocs(parent, "@throws", parentSections))
+
+ val out = new StringBuilder
+
+ def replaceInheritdoc(childSection: String, parentSection: => String) =
+ if (childSection.indexOf("@inheritdoc") == -1)
+ childSection
+ else
+ childSection.replaceAllLiterally("@inheritdoc", parentSection)
+
+ def getParentSection(section: (Int, Int)): String = {
+
+ def getSectionHeader = extractSectionTag(child, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section)
+ case other => other
+ }
+
+ def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String =
+ paramMap.get(param) match {
+ case Some(section) =>
+ // Cleanup the section tag and parameter
+ val sectionTextBounds = extractSectionText(parent, section)
+ cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
+ case None =>
+ dottydoc.println(s"""${sym.pos}: the """" + getSectionHeader + "\" annotation of the " + sym +
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.")
+ "<invalid inheritdoc annotation>"
+ }
+
+ child.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") =>
+ sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
+ case _ =>
+ sectionString(extractSectionTag(child, section), parentTagMap)
+ }
+ }
+
+ def mainComment(str: String, sections: List[(Int, Int)]): String =
+ if (str.trim.length > 3)
+ str.trim.substring(3, startTag(str, sections))
+ else
+ ""
+
+ // Append main comment
+ out.append("/**")
+ out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections)))
+
+ // Append sections
+ for (section <- childSections)
+ out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section)))
+
+ out.append("*/")
+ out.toString
+ }
+
+ protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val expandLimit = 10
+
+ def expandInternal(str: String, depth: Int): String = {
+ if (depth >= expandLimit)
+ throw new ExpansionLimitExceeded(str)
+
+ val out = new StringBuilder
+ var copied, idx = 0
+ // excluding variables written as \$foo so we can use them when
+ // necessary to document things like Symbol#decode
+ def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\'
+ while (idx < str.length) {
+ if ((str charAt idx) != '$' || isEscaped)
+ idx += 1
+ else {
+ val vstart = idx
+ idx = skipVariable(str, idx + 1)
+ def replaceWith(repl: String) {
+ out append str.substring(copied, vstart)
+ out append repl
+ copied = idx
+ }
+ variableName(str.substring(vstart + 1, idx)) match {
+ case "super" =>
+ superComment(sym) foreach { sc =>
+ val superSections = tagIndex(sc)
+ replaceWith(sc.substring(3, startTag(sc, superSections)))
+ for (sec @ (start, end) <- superSections)
+ if (!isMovable(sc, sec)) out append sc.substring(start, end)
+ }
+ case "" => idx += 1
+ case vname =>
+ lookupVariable(vname, site) match {
+ case Some(replacement) => replaceWith(replacement)
+ case None =>
+ dottydoc.println(s"Variable $vname undefined in comment for $sym in $site")
+ }
+ }
+ }
+ }
+ if (out.length == 0) str
+ else {
+ out append str.substring(copied)
+ expandInternal(out.toString, depth + 1)
+ }
+ }
+
+ // We suppressed expanding \$ throughout the recursion, and now we
+ // need to replace \$ with $ so it looks as intended.
+ expandInternal(initialStr, 0).replaceAllLiterally("""\$""", "$")
+ }
+
+ def defineVariables(sym: Symbol)(implicit ctx: Context) = {
+ val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r
+
+ val raw = ctx.docbase.docstring(sym).map(_.chrs).getOrElse("")
+ defs(sym) ++= defines(raw).map {
+ str => {
+ val start = skipWhitespace(str, "@define".length)
+ val (key, value) = str.splitAt(skipVariable(str, start))
+ key.drop(start) -> value
+ }
+ } map {
+ case (key, Trim(value)) =>
+ variableName(key) -> value.replaceAll("\\s+\\*+$", "")
+ }
+ }
+
+ /** Maps symbols to the variable -> replacement maps that are defined
+ * in their doc comments
+ */
+ private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map()
+
+ /** Lookup definition of variable.
+ *
+ * @param vble The variable for which a definition is searched
+ * @param site The class for which doc comments are generated
+ */
+ def lookupVariable(vble: String, site: Symbol)(implicit ctx: Context): Option[String] = site match {
+ case NoSymbol => None
+ case _ =>
+ val searchList =
+ if (site.flags.is(Flags.Module)) site :: site.info.baseClasses
+ else site.info.baseClasses
+
+ searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match {
+ case Some(str) if str startsWith "$" => lookupVariable(str.tail, site)
+ case res => res orElse lookupVariable(vble, site.owner)
+ }
+ }
+
+ /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the position of the doc comment of the overridden version is returned instead.
+ */
+ def docCommentPos(sym: Symbol)(implicit ctx: Context): Position =
+ ctx.docbase.docstring(sym).map(_.pos).getOrElse(NoPosition)
+
+ /** A version which doesn't consider self types, as a temporary measure:
+ * an infinite loop has broken out between superComment and cookedDocComment
+ * since r23926.
+ */
+ private def allInheritedOverriddenSymbols(sym: Symbol)(implicit ctx: Context): List[Symbol] = {
+ if (!sym.owner.isClass) Nil
+ else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..`
+ //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
+ }
+
+ class ExpansionLimitExceeded(str: String) extends Exception
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala
new file mode 100644
index 000000000..9685b6934
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentParser.scala
@@ -0,0 +1,846 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+import dotty.tools.dotc.util.Positions._
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Contexts.Context
+import scala.collection.mutable
+import dotty.tools.dotc.config.Printers.dottydoc
+import scala.util.matching.Regex
+
+trait CommentParser extends util.MemberLookup {
+ import Regexes._
+ import model.internal._
+
+ case class FullComment (
+ body: Body,
+ authors: List[Body],
+ see: List[Body],
+ result: Option[Body],
+ throws: Map[String, Body],
+ valueParams: Map[String, Body],
+ typeParams: Map[String, Body],
+ version: Option[Body],
+ since: Option[Body],
+ todo: List[Body],
+ deprecated: Option[Body],
+ note: List[Body],
+ example: List[Body],
+ constructor: Option[Body],
+ group: Option[Body],
+ groupDesc: Map[String, Body],
+ groupNames: Map[String, Body],
+ groupPrio: Map[String, Body],
+ hideImplicitConversions: List[Body],
+ shortDescription: List[Body]
+ ) {
+
+ /**
+ * Transform this CommentParser.FullComment to a Comment using the supplied
+ * Body transformer
+ */
+ def toComment(transform: Body => String) = Comment(
+ transform(body),
+ short =
+ if (shortDescription.nonEmpty) shortDescription.map(transform).mkString
+ else body.summary.map(transform).getOrElse(""),
+ authors.map(transform),
+ see.map(transform),
+ result.map(transform),
+ throws.map { case (k, v) => (k, transform(v)) },
+ valueParams.map { case (k, v) => (k, transform(v)) },
+ typeParams.map { case (k, v) => (k, transform(v)) },
+ version.map(transform),
+ since.map(transform),
+ todo.map(transform),
+ deprecated.map(transform),
+ note.map(transform),
+ example.map(transform),
+ constructor.map(transform),
+ group.map(transform),
+ groupDesc.map { case (k, v) => (k, transform(v)) },
+ groupNames.map { case (k, v) => (k, transform(v)) },
+ groupPrio.map { case (k, v) => (k, transform(v)) },
+ hideImplicitConversions.map(transform)
+ )
+ }
+
+ /** Parses a raw comment string into a `Comment` object.
+ * @param packages all packages parsed by Scaladoc tool, used for lookup
+ * @param cleanComment a cleaned comment to be parsed
+ * @param src the raw comment source string.
+ * @param pos the position of the comment in source.
+ */
+ def parse(
+ entity: Entity,
+ packages: Map[String, Package],
+ comment: List[String],
+ src: String,
+ pos: Position,
+ site: Symbol = NoSymbol
+ )(implicit ctx: Context): FullComment = {
+
+ /** Parses a comment (in the form of a list of lines) to a `Comment`
+ * instance, recursively on lines. To do so, it splits the whole comment
+ * into main body and tag bodies, then runs the `WikiParser` on each body
+ * before creating the comment instance.
+ *
+ * @param docBody The body of the comment parsed until now.
+ * @param tags All tags parsed until now.
+ * @param lastTagKey The last parsed tag, or `None` if the tag section
+ * hasn't started. Lines that are not tagged are part
+ * of the previous tag or, if none exists, of the body.
+ * @param remaining The lines that must still recursively be parsed.
+ * @param inCodeBlock Whether the next line is part of a code block (in
+ * which no tags must be read).
+ */
+ def parseComment (
+ docBody: StringBuilder,
+ tags: Map[TagKey, List[String]],
+ lastTagKey: Option[TagKey],
+ remaining: List[String],
+ inCodeBlock: Boolean
+ ): FullComment = remaining match {
+
+ case CodeBlockStartRegex(before, marker, after) :: ls if (!inCodeBlock) =>
+ if (!before.trim.isEmpty && !after.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = false)
+ else if (!before.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = false)
+ else if (!after.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = true)
+ else lastTagKey match {
+ case Some(key) =>
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + marker) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parseComment(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = true)
+ case None =>
+ parseComment(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = true)
+ }
+
+ case CodeBlockEndRegex(before, marker, after) :: ls => {
+ if (!before.trim.isEmpty && !after.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, before :: marker :: after :: ls, inCodeBlock = true)
+ if (!before.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, before :: marker :: ls, inCodeBlock = true)
+ else if (!after.trim.isEmpty)
+ parseComment(docBody, tags, lastTagKey, marker :: after :: ls, inCodeBlock = false)
+ else lastTagKey match {
+ case Some(key) =>
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + marker) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ parseComment(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock = false)
+ case None =>
+ parseComment(docBody append endOfLine append marker, tags, lastTagKey, ls, inCodeBlock = false)
+ }
+ }
+
+ case SymbolTagRegex(name, sym, body) :: ls if (!inCodeBlock) => {
+ val key = SymbolTagKey(name, sym)
+ val value = body :: tags.getOrElse(key, Nil)
+ parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
+
+ case SimpleTagRegex(name, body) :: ls if (!inCodeBlock) => {
+ val key = SimpleTagKey(name)
+ val value = body :: tags.getOrElse(key, Nil)
+ parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
+
+ case SingleTagRegex(name) :: ls if (!inCodeBlock) => {
+ val key = SimpleTagKey(name)
+ val value = "" :: tags.getOrElse(key, Nil)
+ parseComment(docBody, tags + (key -> value), Some(key), ls, inCodeBlock)
+ }
+
+ case line :: ls if (lastTagKey.isDefined) => {
+ val newtags = if (!line.isEmpty) {
+ val key = lastTagKey.get
+ val value =
+ ((tags get key): @unchecked) match {
+ case Some(b :: bs) => (b + endOfLine + line) :: bs
+ case None => oops("lastTagKey set when no tag exists for key")
+ }
+ tags + (key -> value)
+ } else tags
+ parseComment(docBody, newtags, lastTagKey, ls, inCodeBlock)
+ }
+
+ case line :: ls => {
+ if (docBody.length > 0) docBody append endOfLine
+ docBody append line
+ parseComment(docBody, tags, lastTagKey, ls, inCodeBlock)
+ }
+
+ case Nil => {
+ // Take the {inheritance, content} diagram keys aside, as it doesn't need any parsing
+ val inheritDiagramTag = SimpleTagKey("inheritanceDiagram")
+ val contentDiagramTag = SimpleTagKey("contentDiagram")
+
+ val inheritDiagramText: List[String] = tags.get(inheritDiagramTag) match {
+ case Some(list) => list
+ case None => List.empty
+ }
+
+ val contentDiagramText: List[String] = tags.get(contentDiagramTag) match {
+ case Some(list) => list
+ case None => List.empty
+ }
+
+ val stripTags=List(inheritDiagramTag, contentDiagramTag, SimpleTagKey("template"), SimpleTagKey("documentable"))
+ val tagsWithoutDiagram = tags.filterNot(pair => stripTags.contains(pair._1))
+
+ val bodyTags: mutable.Map[TagKey, List[Body]] =
+ mutable.Map((tagsWithoutDiagram mapValues {tag => tag map (parseWikiAtSymbol(entity, packages, _, pos, site))}).toSeq: _*)
+
+ def oneTag(key: SimpleTagKey, filterEmpty: Boolean = true): Option[Body] =
+ ((bodyTags remove key): @unchecked) match {
+ case Some(r :: rs) if !(filterEmpty && r.blocks.isEmpty) =>
+ if (!rs.isEmpty) dottydoc.println(s"$pos: only one '@${key.name}' tag is allowed")
+ Some(r)
+ case _ => None
+ }
+
+ def allTags[B](key: SimpleTagKey): List[Body] =
+ (bodyTags remove key).getOrElse(Nil).filterNot(_.blocks.isEmpty).reverse
+
+ def allSymsOneTag(key: TagKey, filterEmpty: Boolean = true): Map[String, Body] = {
+ val keys: Seq[SymbolTagKey] =
+ bodyTags.keys.toSeq flatMap {
+ case stk: SymbolTagKey if (stk.name == key.name) => Some(stk)
+ case stk: SimpleTagKey if (stk.name == key.name) =>
+ dottydoc.println(s"$pos: tag '@${stk.name}' must be followed by a symbol name")
+ None
+ case _ => None
+ }
+ val pairs: Seq[(String, Body)] =
+ for (key <- keys) yield {
+ val bs = (bodyTags remove key).get
+ if (bs.length > 1)
+ dottydoc.println(s"$pos: only one '@${key.name}' tag for symbol ${key.symbol} is allowed")
+ (key.symbol, bs.head)
+ }
+ Map.empty[String, Body] ++ (if (filterEmpty) pairs.filterNot(_._2.blocks.isEmpty) else pairs)
+ }
+
+ def linkedExceptions: Map[String, Body] = {
+ val m = allSymsOneTag(SimpleTagKey("throws"), filterEmpty = false)
+
+ m.map { case (targetStr,body) =>
+ val link = lookup(entity, packages, targetStr, pos)
+ val newBody = body match {
+ case Body(List(Paragraph(Chain(content)))) =>
+ val descr = Text(" ") +: content
+ val entityLink = EntityLink(Monospace(Text(targetStr)), link)
+ Body(List(Paragraph(Chain(entityLink +: descr))))
+ case _ => body
+ }
+ (targetStr, newBody)
+ }
+ }
+
+ val cmt = FullComment(
+ body = parseWikiAtSymbol(entity, packages, docBody.toString, pos, site),
+ authors = allTags(SimpleTagKey("author")),
+ see = allTags(SimpleTagKey("see")),
+ result = oneTag(SimpleTagKey("return")),
+ throws = linkedExceptions,
+ valueParams = allSymsOneTag(SimpleTagKey("param")),
+ typeParams = allSymsOneTag(SimpleTagKey("tparam")),
+ version = oneTag(SimpleTagKey("version")),
+ since = oneTag(SimpleTagKey("since")),
+ todo = allTags(SimpleTagKey("todo")),
+ deprecated = oneTag(SimpleTagKey("deprecated"), filterEmpty = false),
+ note = allTags(SimpleTagKey("note")),
+ example = allTags(SimpleTagKey("example")),
+ constructor = oneTag(SimpleTagKey("constructor")),
+ group = oneTag(SimpleTagKey("group")),
+ groupDesc = allSymsOneTag(SimpleTagKey("groupdesc")),
+ groupNames = allSymsOneTag(SimpleTagKey("groupname")),
+ groupPrio = allSymsOneTag(SimpleTagKey("groupprio")),
+ hideImplicitConversions = allTags(SimpleTagKey("hideImplicitConversion")),
+ shortDescription = allTags(SimpleTagKey("shortDescription"))
+ )
+
+ for ((key, _) <- bodyTags)
+ dottydoc.println(s"$pos: Tag '@${key.name}' is not recognised")
+
+ cmt
+ }
+ }
+
+ parseComment(new StringBuilder(comment.size), Map.empty, None, comment, inCodeBlock = false)
+ }
+
+ /** A key used for a tag map. The key is built from the name of the tag and
+ * from the linked symbol if the tag has one.
+ * Equality on tag keys is structural. */
+ private sealed abstract class TagKey {
+ def name: String
+ }
+
+ private final case class SimpleTagKey(name: String) extends TagKey
+ private final case class SymbolTagKey(name: String, symbol: String) extends TagKey
+
+ /** Something that should not have happened, happened, and Scaladoc should exit. */
+ private def oops(msg: String): Nothing =
+ throw new IllegalArgumentException("program logic: " + msg)
+
+ /** Parses a string containing wiki syntax into a `Comment` object.
+ * Note that the string is assumed to be clean:
+ * - Removed Scaladoc start and end markers.
+ * - Removed start-of-line star and one whitespace afterwards (if present).
+ * - Removed all end-of-line whitespace.
+ * - Only `endOfLine` is used to mark line endings. */
+ def parseWikiAtSymbol(
+ entity: Entity,
+ packages: Map[String, Package],
+ string: String,
+ pos: Position,
+ site: Symbol
+ )(implicit ctx: Context): Body = new WikiParser(entity, packages, string, pos, site).document()
+
+ /** Original wikiparser from NSC
+ * @author Ingo Maier
+ * @author Manohar Jonnalagedda
+ * @author Gilles Dubochet
+ */
+ protected final class WikiParser(
+ entity: Entity,
+ packages: Map[String, Package],
+ val buffer: String,
+ pos: Position,
+ site: Symbol
+ )(implicit ctx: Context) extends CharReader(buffer) { wiki =>
+ var summaryParsed = false
+
+ def document(): Body = {
+ val blocks = new mutable.ListBuffer[Block]
+ while (char != endOfText)
+ blocks += block()
+ Body(blocks.toList)
+ }
+
+ /* BLOCKS */
+
+ /** {{{ block ::= code | title | hrule | listBlock | para }}} */
+ def block(): Block = {
+ if (checkSkipInitWhitespace("{{{"))
+ code()
+ else if (checkSkipInitWhitespace('='))
+ title()
+ else if (checkSkipInitWhitespace("----"))
+ hrule()
+ else if (checkList)
+ listBlock
+ else {
+ para()
+ }
+ }
+
+ /** listStyle ::= '-' spc | '1.' spc | 'I.' spc | 'i.' spc | 'A.' spc | 'a.' spc
+ * Characters used to build lists and their constructors */
+ protected val listStyles = Map[String, (Seq[Block] => Block)](
+ "- " -> ( UnorderedList(_) ),
+ "1. " -> ( OrderedList(_,"decimal") ),
+ "I. " -> ( OrderedList(_,"upperRoman") ),
+ "i. " -> ( OrderedList(_,"lowerRoman") ),
+ "A. " -> ( OrderedList(_,"upperAlpha") ),
+ "a. " -> ( OrderedList(_,"lowerAlpha") )
+ )
+
+ /** Checks if the current line is formed with more than one space and one the listStyles */
+ def checkList =
+ (countWhitespace > 0) && (listStyles.keys exists { checkSkipInitWhitespace(_) })
+
+ /** {{{
+ * nListBlock ::= nLine { mListBlock }
+ * nLine ::= nSpc listStyle para '\n'
+ * }}}
+ * Where n and m stand for the number of spaces. When `m > n`, a new list is nested. */
+ def listBlock(): Block = {
+
+ /** Consumes one list item block and returns it, or None if the block is
+ * not a list or a different list. */
+ def listLine(indent: Int, style: String): Option[Block] =
+ if (countWhitespace > indent && checkList)
+ Some(listBlock)
+ else if (countWhitespace != indent || !checkSkipInitWhitespace(style))
+ None
+ else {
+ jumpWhitespace()
+ jump(style)
+ val p = Paragraph(inline(isInlineEnd = false))
+ blockEnded("end of list line ")
+ Some(p)
+ }
+
+ /** Consumes all list item blocks (possibly with nested lists) of the
+ * same list and returns the list block. */
+ def listLevel(indent: Int, style: String): Block = {
+ val lines = mutable.ListBuffer.empty[Block]
+ var line: Option[Block] = listLine(indent, style)
+ while (line.isDefined) {
+ lines += line.get
+ line = listLine(indent, style)
+ }
+ val constructor = listStyles(style)
+ constructor(lines)
+ }
+
+ val indent = countWhitespace
+ val style = (listStyles.keys find { checkSkipInitWhitespace(_) }).getOrElse(listStyles.keys.head)
+ listLevel(indent, style)
+ }
+
+ def code(): Block = {
+ jumpWhitespace()
+ jump("{{{")
+ val str = readUntil("}}}")
+ if (char == endOfText)
+ reportError(pos, "unclosed code block")
+ else
+ jump("}}}")
+ blockEnded("code block")
+ Code(normalizeIndentation(str))
+ }
+
+ /** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
+ def title(): Block = {
+ jumpWhitespace()
+ val inLevel = repeatJump('=')
+ val text = inline(check("=" * inLevel))
+ val outLevel = repeatJump('=', inLevel)
+ if (inLevel != outLevel)
+ reportError(pos, "unbalanced or unclosed heading")
+ blockEnded("heading")
+ Title(text, inLevel)
+ }
+
+ /** {{{ hrule ::= "----" { '-' } '\n' }}} */
+ def hrule(): Block = {
+ jumpWhitespace()
+ repeatJump('-')
+ blockEnded("horizontal rule")
+ HorizontalRule()
+ }
+
+ /** {{{ para ::= inline '\n' }}} */
+ def para(): Block = {
+ val p =
+ if (summaryParsed)
+ Paragraph(inline(isInlineEnd = false))
+ else {
+ val s = summary()
+ val r =
+ if (checkParaEnded()) List(s) else List(s, inline(isInlineEnd = false))
+ summaryParsed = true
+ Paragraph(Chain(r))
+ }
+ while (char == endOfLine && char != endOfText)
+ nextChar()
+ p
+ }
+
+ /* INLINES */
+
+ val OPEN_TAG = "^<([A-Za-z]+)( [^>]*)?(/?)>$".r
+ val CLOSE_TAG = "^</([A-Za-z]+)>$".r
+ private def readHTMLFrom(begin: HtmlTag): String = {
+ val list = mutable.ListBuffer.empty[String]
+ val stack = mutable.ListBuffer.empty[String]
+
+ begin.close match {
+ case Some(HtmlTag(CLOSE_TAG(s))) =>
+ stack += s
+ case _ =>
+ return ""
+ }
+
+ do {
+ val str = readUntil { char == safeTagMarker || char == endOfText }
+ nextChar()
+
+ list += str
+
+ str match {
+ case OPEN_TAG(s, _, standalone) => {
+ if (standalone != "/") {
+ stack += s
+ }
+ }
+ case CLOSE_TAG(s) => {
+ if (s == stack.last) {
+ stack.remove(stack.length-1)
+ }
+ }
+ case _ => ;
+ }
+ } while (stack.length > 0 && char != endOfText)
+
+ list mkString ""
+ }
+
+ def inline(isInlineEnd: => Boolean): Inline = {
+
+ def inline0(): Inline = {
+ if (char == safeTagMarker) {
+ val tag = htmlTag()
+ HtmlTag(tag.data + readHTMLFrom(tag))
+ }
+ else if (check("'''")) bold()
+ else if (check("''")) italic()
+ else if (check("`")) monospace()
+ else if (check("__")) underline()
+ else if (check("^")) superscript()
+ else if (check(",,")) subscript()
+ else if (check("[[")) link()
+ else {
+ val str = readUntil {
+ char == safeTagMarker ||
+ check("''") ||
+ char == '`' ||
+ check("__") ||
+ char == '^' ||
+ check(",,") ||
+ check("[[") ||
+ isInlineEnd ||
+ checkParaEnded ||
+ char == endOfLine
+ }
+ Text(str)
+ }
+ }
+
+ val inlines: List[Inline] = {
+ val iss = mutable.ListBuffer.empty[Inline]
+ iss += inline0()
+ while (!isInlineEnd && !checkParaEnded) {
+ val skipEndOfLine = if (char == endOfLine) {
+ nextChar()
+ true
+ } else {
+ false
+ }
+
+ val current = inline0()
+ (iss.last, current) match {
+ case (Text(t1), Text(t2)) if skipEndOfLine =>
+ iss.update(iss.length - 1, Text(t1 + endOfLine + t2))
+ case (i1, i2) if skipEndOfLine =>
+ iss ++= List(Text(endOfLine.toString), i2)
+ case _ => iss += current
+ }
+ }
+ iss.toList
+ }
+
+ inlines match {
+ case Nil => Text("")
+ case i :: Nil => i
+ case is => Chain(is)
+ }
+
+ }
+
+ def htmlTag(): HtmlTag = {
+ jump(safeTagMarker)
+ val read = readUntil(safeTagMarker)
+ if (char != endOfText) jump(safeTagMarker)
+ HtmlTag(read)
+ }
+
+ def bold(): Inline = {
+ jump("'''")
+ val i = inline(check("'''"))
+ jump("'''")
+ Bold(i)
+ }
+
+ def italic(): Inline = {
+ jump("''")
+ val i = inline(check("''"))
+ jump("''")
+ Italic(i)
+ }
+
+ def monospace(): Inline = {
+ jump("`")
+ val i = inline(check("`"))
+ jump("`")
+ Monospace(i)
+ }
+
+ def underline(): Inline = {
+ jump("__")
+ val i = inline(check("__"))
+ jump("__")
+ Underline(i)
+ }
+
+ def superscript(): Inline = {
+ jump("^")
+ val i = inline(check("^"))
+ if (jump("^")) {
+ Superscript(i)
+ } else {
+ Chain(Seq(Text("^"), i))
+ }
+ }
+
+ def subscript(): Inline = {
+ jump(",,")
+ val i = inline(check(",,"))
+ jump(",,")
+ Subscript(i)
+ }
+
+ def summary(): Inline = {
+ val i = inline(checkSentenceEnded())
+ Summary(
+ if (jump("."))
+ Chain(List(i, Text(".")))
+ else
+ i
+ )
+ }
+
+ def link(): Inline = {
+ val SchemeUri = """([a-z]+:.*)""".r
+ jump("[[")
+ val parens = 2 + repeatJump('[')
+ val stop = "]" * parens
+ val target = readUntil { check(stop) || isWhitespaceOrNewLine(char) }
+ val title =
+ if (!check(stop)) Some({
+ jumpWhitespaceOrNewLine()
+ inline(check(stop))
+ })
+ else None
+ jump(stop)
+
+ (target, title) match {
+ case (SchemeUri(uri), optTitle) =>
+ Link(uri, optTitle getOrElse Text(uri))
+ case (qualName, optTitle) =>
+ makeEntityLink(entity, packages, optTitle getOrElse Text(target), pos, target)
+ }
+ }
+
+ /* UTILITY */
+
+ /** {{{ eol ::= { whitespace } '\n' }}} */
+ def blockEnded(blockType: String): Unit = {
+ if (char != endOfLine && char != endOfText) {
+ reportError(pos, "no additional content on same line after " + blockType)
+ jumpUntil(endOfLine)
+ }
+ while (char == endOfLine)
+ nextChar()
+ }
+
+ /**
+ * Eliminates the (common) leading spaces in all lines, based on the first line
+ * For indented pieces of code, it reduces the indent to the least whitespace prefix:
+ * {{{
+ * indented example
+ * another indented line
+ * if (condition)
+ * then do something;
+ * ^ this is the least whitespace prefix
+ * }}}
+ */
+ def normalizeIndentation(_code: String): String = {
+
+ val code = _code.replaceAll("\\s+$", "").dropWhile(_ == '\n') // right-trim + remove all leading '\n'
+ val lines = code.split("\n")
+
+ // maxSkip - size of the longest common whitespace prefix of non-empty lines
+ val nonEmptyLines = lines.filter(_.trim.nonEmpty)
+ val maxSkip = if (nonEmptyLines.isEmpty) 0 else nonEmptyLines.map(line => line.prefixLength(_ == ' ')).min
+
+ // remove common whitespace prefix
+ lines.map(line => if (line.trim.nonEmpty) line.substring(maxSkip) else line).mkString("\n")
+ }
+
+ def checkParaEnded(): Boolean = {
+ (char == endOfText) ||
+ ((char == endOfLine) && {
+ val poff = offset
+ nextChar() // read EOL
+ val ok = {
+ checkSkipInitWhitespace(endOfLine) ||
+ checkSkipInitWhitespace('=') ||
+ checkSkipInitWhitespace("{{{") ||
+ checkList ||
+ checkSkipInitWhitespace('\u003D')
+ }
+ offset = poff
+ ok
+ })
+ }
+
+ def checkSentenceEnded(): Boolean = {
+ (char == '.') && {
+ val poff = offset
+ nextChar() // read '.'
+ val ok = char == endOfText || char == endOfLine || isWhitespace(char)
+ offset = poff
+ ok
+ }
+ }
+
+ def reportError(pos: Position, message: String) =
+ dottydoc.println(s"$pos: $message")
+ }
+
+ protected sealed class CharReader(buffer: String) { reader =>
+
+ var offset: Int = 0
+ def char: Char =
+ if (offset >= buffer.length) endOfText else buffer charAt offset
+
+ final def nextChar() =
+ offset += 1
+
+ final def check(chars: String): Boolean = {
+ val poff = offset
+ val ok = jump(chars)
+ offset = poff
+ ok
+ }
+
+ def checkSkipInitWhitespace(c: Char): Boolean = {
+ val poff = offset
+ jumpWhitespace()
+ val ok = jump(c)
+ offset = poff
+ ok
+ }
+
+ def checkSkipInitWhitespace(chars: String): Boolean = {
+ val poff = offset
+ jumpWhitespace()
+ val (ok0, chars0) =
+ if (chars.charAt(0) == ' ')
+ (offset > poff, chars substring 1)
+ else
+ (true, chars)
+ val ok = ok0 && jump(chars0)
+ offset = poff
+ ok
+ }
+
+ def countWhitespace: Int = {
+ var count = 0
+ val poff = offset
+ while (isWhitespace(char) && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ offset = poff
+ count
+ }
+
+ /* Jumpers */
+
+ /** Jumps a character and consumes it
+ * @return true only if the correct character has been jumped */
+ final def jump(ch: Char): Boolean = {
+ if (char == ch) {
+ nextChar()
+ true
+ }
+ else false
+ }
+
+ /** Jumps all the characters in chars, consuming them in the process.
+ * @return true only if the correct characters have been jumped
+ */
+ final def jump(chars: String): Boolean = {
+ var index = 0
+ while (index < chars.length && char == chars.charAt(index) && char != endOfText) {
+ nextChar()
+ index += 1
+ }
+ index == chars.length
+ }
+
+ final def repeatJump(c: Char, max: Int = Int.MaxValue): Int = {
+ var count = 0
+ while (jump(c) && count < max)
+ count += 1
+ count
+ }
+
+ final def jumpUntil(ch: Char): Int = {
+ var count = 0
+ while (char != ch && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ count
+ }
+
+ final def jumpUntil(pred: => Boolean): Int = {
+ var count = 0
+ while (!pred && char != endOfText) {
+ nextChar()
+ count += 1
+ }
+ count
+ }
+
+ def jumpWhitespace() = jumpUntil(!isWhitespace(char))
+
+ def jumpWhitespaceOrNewLine() = jumpUntil(!isWhitespaceOrNewLine(char))
+
+
+ /* Readers */
+ final def readUntil(c: Char): String = {
+ withRead {
+ while (char != c && char != endOfText) {
+ nextChar()
+ }
+ }
+ }
+
+ final def readUntil(chars: String): String = {
+ assert(chars.length > 0)
+ withRead {
+ val c = chars.charAt(0)
+ while (!check(chars) && char != endOfText) {
+ nextChar()
+ while (char != c && char != endOfText)
+ nextChar()
+ }
+ }
+ }
+
+ final def readUntil(pred: => Boolean): String = {
+ withRead {
+ while (char != endOfText && !pred) {
+ nextChar()
+ }
+ }
+ }
+
+ private def withRead(read: => Unit): String = {
+ val start = offset
+ read
+ buffer.substring(start, offset)
+ }
+
+ /* Chars classes */
+ def isWhitespace(c: Char) = c == ' ' || c == '\t'
+
+ def isWhitespaceOrNewLine(c: Char) = isWhitespace(c) || c == '\n'
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala
new file mode 100644
index 000000000..2d75b0c66
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentRegex.scala
@@ -0,0 +1,84 @@
+package dotty.tools.dottydoc
+package model
+package comment
+
+import scala.util.matching.Regex
+
+object Regexes {
+ val TrailingWhitespace = """\s+$""".r
+
+ /** The body of a line, dropping the (optional) start star-marker,
+ * one leading whitespace and all trailing whitespace
+ */
+ val CleanCommentLine =
+ new Regex("""(?:\s*\*\s?)?(.*)""")
+
+ /** Dangerous HTML tags that should be replaced by something safer,
+ * such as wiki syntax, or that should be dropped
+ */
+ val DangerousTags =
+ new Regex("""<(/?(div|ol|ul|li|h[1-6]|p))( [^>]*)?/?>|<!--.*-->""")
+
+ /** Javadoc tags that should be replaced by something useful, such as wiki
+ * syntax, or that should be dropped. */
+ val JavadocTags =
+ new Regex("""\{\@(code|docRoot|linkplain|link|literal|value)\p{Zs}*([^}]*)\}""")
+
+ /** Maps a javadoc tag to a useful wiki replacement, or an empty string if it cannot be salvaged. */
+ def javadocReplacement(mtch: Regex.Match): String = {
+ mtch.group(1) match {
+ case "code" => "<code>" + mtch.group(2) + "</code>"
+ case "docRoot" => ""
+ case "link" => "`[[" + mtch.group(2) + "]]`"
+ case "linkplain" => "[[" + mtch.group(2) + "]]"
+ case "literal" => "`" + mtch.group(2) + "`"
+ case "value" => "`" + mtch.group(2) + "`"
+ case _ => ""
+ }
+ }
+
+ /** Maps a dangerous HTML tag to a safe wiki replacement, or an empty string
+ * if it cannot be salvaged. */
+ def htmlReplacement(mtch: Regex.Match): String = mtch.group(1) match {
+ case "p" | "div" => "\n\n"
+ case "h1" => "\n= "
+ case "/h1" => " =\n"
+ case "h2" => "\n== "
+ case "/h2" => " ==\n"
+ case "h3" => "\n=== "
+ case "/h3" => " ===\n"
+ case "h4" | "h5" | "h6" => "\n==== "
+ case "/h4" | "/h5" | "/h6" => " ====\n"
+ case "li" => "\n * - "
+ case _ => ""
+ }
+
+ /** Safe HTML tags that can be kept. */
+ val SafeTags =
+ new Regex("""((&\w+;)|(&#\d+;)|(</?(abbr|acronym|address|area|a|bdo|big|blockquote|br|button|b|caption|cite|code|col|colgroup|dd|del|dfn|em|fieldset|form|hr|img|input|ins|i|kbd|label|legend|link|map|object|optgroup|option|param|pre|q|samp|select|small|span|strong|sub|sup|table|tbody|td|textarea|tfoot|th|thead|tr|tt|var)( [^>]*)?/?>))""")
+
+ val safeTagMarker = '\u000E'
+ val endOfLine = '\u000A'
+ val endOfText = '\u0003'
+
+ /** A Scaladoc tag not linked to a symbol and not followed by text */
+ val SingleTagRegex =
+ new Regex("""\s*@(\S+)\s*""")
+
+ /** A Scaladoc tag not linked to a symbol. Returns the name of the tag, and the rest of the line. */
+ val SimpleTagRegex =
+ new Regex("""\s*@(\S+)\s+(.*)""")
+
+ /** A Scaladoc tag linked to a symbol. Returns the name of the tag, the name
+ * of the symbol, and the rest of the line. */
+ val SymbolTagRegex =
+ new Regex("""\s*@(param|tparam|throws|groupdesc|groupname|groupprio)\s+(\S*)\s*(.*)""")
+
+ /** The start of a Scaladoc code block */
+ val CodeBlockStartRegex =
+ new Regex("""(.*?)((?:\{\{\{)|(?:\u000E<pre(?: [^>]*)?>\u000E))(.*)""")
+
+ /** The end of a Scaladoc code block */
+ val CodeBlockEndRegex =
+ new Regex("""(.*?)((?:\}\}\})|(?:\u000E</pre>\u000E))(.*)""")
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala
new file mode 100644
index 000000000..e5307bd3c
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/comment/CommentUtils.scala
@@ -0,0 +1,224 @@
+/*
+ * Port of DocStrings.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+
+package dotty.tools
+package dottydoc
+package model
+package comment
+
+import scala.reflect.internal.Chars._
+
+object CommentUtils {
+
+ /** Returns index of string `str` following `start` skipping longest
+ * sequence of whitespace characters characters (but no newlines)
+ */
+ def skipWhitespace(str: String, start: Int): Int =
+ if (start < str.length && isWhitespace(str charAt start)) skipWhitespace(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipIdent(str: String, start: Int): Int =
+ if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipTag(str: String, start: Int): Int =
+ if (start < str.length && (str charAt start) == '@') skipIdent(str, start + 1)
+ else start
+
+
+ /** Returns index of string `str` after `start` skipping longest
+ * sequence of space and tab characters, possibly also containing
+ * a single `*` character or the `/``**` sequence.
+ * @pre start == str.length || str(start) == `\n`
+ */
+ def skipLineLead(str: String, start: Int): Int =
+ if (start == str.length) start
+ else {
+ val idx = skipWhitespace(str, start + 1)
+ if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1)
+ else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*')
+ skipWhitespace(str, idx + 3)
+ else idx
+ }
+
+ /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`.
+ */
+ def skipToEol(str: String, start: Int): Int =
+ if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3
+ else if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1)
+ else start
+
+ /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
+ * which satisfies predicate `p`.
+ */
+ def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
+ val idx = skipLineLead(str, skipToEol(str, start))
+ if (idx < str.length && !p(idx)) findNext(str, idx)(p)
+ else idx
+ }
+
+ /** Return first index following `start` and starting a line (i.e. after skipLineLead)
+ * which satisfies predicate `p`.
+ */
+ def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = {
+ val idx = findNext(str, start)(p)
+ if (idx == str.length) List()
+ else idx :: findAll(str, idx)(p)
+ }
+
+ /** Produces a string index, which is a list of `sections`, i.e
+ * pairs of start/end positions of all tagged sections in the string.
+ * Every section starts with an at sign and extends to the next at sign,
+ * or to the end of the comment string, but excluding the final two
+ * characters which terminate the comment.
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
+ */
+ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = {
+ var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx))
+ indices = mergeUsecaseSections(str, indices)
+ indices = mergeInheritdocSections(str, indices)
+
+ indices match {
+ case List() => List()
+ case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ }
+ }
+
+ /**
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
+ */
+ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
+ idxs.indexWhere(str.startsWith("@usecase", _)) match {
+ case firstUCIndex if firstUCIndex != -1 =>
+ val commentSections = idxs.take(firstUCIndex)
+ val usecaseSections = idxs.drop(firstUCIndex).filter(str.startsWith("@usecase", _))
+ commentSections ::: usecaseSections
+ case _ =>
+ idxs
+ }
+ }
+
+ /**
+ * Merge the inheritdoc sections, as they never make sense on their own
+ */
+ def mergeInheritdocSections(str: String, idxs: List[Int]): List[Int] =
+ idxs.filterNot(str.startsWith("@inheritdoc", _))
+
+ /** Does interval `iv` start with given `tag`?
+ */
+ def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
+ startsWithTag(str, section._1, tag)
+
+ def startsWithTag(str: String, start: Int, tag: String): Boolean =
+ str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length))
+
+ /** The first start tag of a list of tag intervals,
+ * or the end of the whole comment string - 2 if list is empty
+ */
+ def startTag(str: String, sections: List[(Int, Int)]) = sections match {
+ case Nil => str.length - 2
+ case (start, _) :: _ => start
+ }
+
+ /** A map from parameter names to start/end indices describing all parameter
+ * sections in `str` tagged with `tag`, where `sections` is the index of `str`.
+ */
+ def paramDocs(str: String, tag: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections if startsWithTag(str, section, tag)) yield {
+ val start = skipWhitespace(str, section._1 + tag.length)
+ str.substring(start, skipIdent(str, start)) -> section
+ }
+ }
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @group. */
+ def groupDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@group"))
+
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @return.
+ */
+ def returnDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@return"))
+
+ /** Extracts variable name from a string, stripping any pair of surrounding braces */
+ def variableName(str: String): String =
+ if (str.length >= 2 && (str charAt 0) == '{' && (str charAt (str.length - 1)) == '}')
+ str.substring(1, str.length - 1)
+ else
+ str
+
+ /** Returns index following variable, or start index if no variable was recognized
+ */
+ def skipVariable(str: String, start: Int): Int = {
+ var idx = start
+ if (idx < str.length && (str charAt idx) == '{') {
+ do idx += 1
+ while (idx < str.length && (str charAt idx) != '}')
+ if (idx < str.length) idx + 1 else start
+ } else {
+ while (idx < str.length && isVarPart(str charAt idx))
+ idx += 1
+ idx
+ }
+ }
+
+ /** A map from the section tag to section parameters */
+ def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections) yield
+ extractSectionTag(str, section) -> section
+ }
+
+ /** Extract the section tag, treating the section tag as an identifier */
+ def extractSectionTag(str: String, section: (Int, Int)): String =
+ str.substring(section._1, skipTag(str, section._1))
+
+ /** Extract the section parameter */
+ def extractSectionParam(str: String, section: (Int, Int)): String = {
+ val (beg, _) = section
+ assert(str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+
+ val start = skipWhitespace(str, skipTag(str, beg))
+ val finish = skipIdent(str, start)
+
+ str.substring(start, finish)
+ }
+
+ /** Extract the section text, except for the tag and comment newlines */
+ def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = {
+ val (beg, end) = section
+ if (str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+ (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end)
+ else
+ (skipWhitespace(str, skipTag(str, beg)), end)
+ }
+
+ /** Cleanup section text */
+ def cleanupSectionText(str: String) = {
+ var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n")
+ while (result.endsWith("\n"))
+ result = result.substring(0, str.length - 1)
+ result
+ }
+
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/entities.scala b/dottydoc/src/dotty/tools/dottydoc/model/entities.scala
new file mode 100644
index 000000000..76792070c
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/entities.scala
@@ -0,0 +1,115 @@
+package dotty.tools.dottydoc
+package model
+
+import comment._
+import references._
+
+trait Entity {
+ def name: String
+
+ /** Path from root, i.e. `scala.Option$` */
+ def path: List[String]
+
+ def comment: Option[Comment]
+
+ def kind: String
+
+ def parent: Entity
+
+ /** All parents from package level i.e. Package to Object to Member etc */
+ def parents: List[Entity] = parent match {
+ case NonEntity => Nil
+ case e => e :: e.parents
+ }
+
+ /** Applies `f` to entity if != `NonEntity` */
+ def fold[A](nonEntity: A)(f: Entity => A) = this match {
+ case NonEntity => nonEntity
+ case x => f(x)
+ }
+}
+
+trait SuperTypes {
+ def superTypes: List[MaterializableLink]
+}
+
+trait Members {
+ def members: List[Entity]
+}
+
+trait Modifiers {
+ def modifiers: List[String]
+
+ val isPrivate: Boolean =
+ modifiers.contains("private")
+}
+
+trait TypeParams {
+ def typeParams: List[String]
+}
+
+trait ReturnValue {
+ def returnValue: Reference
+}
+
+trait ParamList {
+ def list: List[NamedReference]
+ def isImplicit: Boolean
+}
+
+trait Constructors {
+ def constructors: List[List[ParamList]]
+}
+
+trait ImplicitlyAddedEntity extends Entity {
+ def implicitlyAddedFrom: Option[Reference]
+}
+
+trait Package extends Entity with Members {
+ val kind = "package"
+
+ def children: List[Entity with Members]
+}
+
+trait Class extends Entity with Modifiers with TypeParams with Constructors with SuperTypes with Members {
+ val kind = "class"
+}
+
+trait CaseClass extends Entity with Modifiers with TypeParams with Constructors with SuperTypes with Members {
+ override val kind = "case class"
+}
+
+trait Trait extends Entity with Modifiers with TypeParams with SuperTypes with Members {
+ def traitParams: List[ParamList]
+ override val kind = "trait"
+}
+
+trait Object extends Entity with Modifiers with SuperTypes with Members {
+ override val kind = "object"
+}
+
+trait Def extends Entity with Modifiers with TypeParams with ReturnValue with ImplicitlyAddedEntity {
+ val kind = "def"
+ def paramLists: List[ParamList]
+}
+
+trait Val extends Entity with Modifiers with ReturnValue with ImplicitlyAddedEntity {
+ val kind = "val"
+}
+
+trait Var extends Entity with Modifiers with ReturnValue {
+ val kind = "var"
+}
+
+trait NonEntity extends Entity {
+ val name = ""
+ val comment = None
+ val path = Nil
+ val kind = ""
+ val parent = NonEntity
+}
+
+final case object NonEntity extends NonEntity
+final case object RootEntity extends NonEntity {
+ override val name = "root"
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/factories.scala b/dottydoc/src/dotty/tools/dottydoc/model/factories.scala
new file mode 100644
index 000000000..b19b836ee
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/factories.scala
@@ -0,0 +1,183 @@
+package dotty.tools.dottydoc
+package model
+
+import comment._
+import references._
+import dotty.tools.dotc
+import dotc.core.Types._
+import dotc.core.TypeApplications._
+import dotc.core.Contexts.Context
+import dotc.core.Symbols.{ Symbol, ClassSymbol }
+import dotty.tools.dotc.core.SymDenotations._
+import dotty.tools.dotc.core.Names.TypeName
+import dotc.ast.Trees._
+
+
+object factories {
+ import dotty.tools.dotc.ast.tpd._
+ import dotty.tools.dottydoc.model.internal.ParamListImpl
+ import dotc.core.Flags._
+
+ type TypeTree = dotty.tools.dotc.ast.Trees.Tree[Type]
+
+ def flags(t: Tree)(implicit ctx: Context): List[String] =
+ (t.symbol.flags & SourceModifierFlags)
+ .flagStrings.toList
+ .filter(_ != "<trait>")
+ .filter(_ != "interface")
+
+ def path(sym: Symbol)(implicit ctx: Context): List[String] = sym match {
+ case sym if sym.name.decode.toString == "<root>" => Nil
+ case sym => path(sym.owner) :+ sym.name.show
+ }
+
+
+ private val product = """Product[1-9][0-9]*""".r
+
+ def returnType(t: Type)(implicit ctx: Context): Reference = {
+ val defn = ctx.definitions
+
+ def typeRef(name: String, query: String = "", params: List[Reference] = Nil) = {
+ val realQuery = if (query != "") query else name
+ TypeReference(name, UnsetLink(name, realQuery), params)
+ }
+
+ def expandTpe(t: Type, params: List[Reference] = Nil): Reference = t match {
+ case tl: TypeLambda =>
+ //FIXME: should be handled correctly
+ // example, in `Option`:
+ //
+ // {{{
+ // def companion: GenericCompanion[collection.Iterable]
+ // }}}
+ //
+ // Becomes: def companion: [+X0] -> collection.Iterable[X0]
+ typeRef(tl.show + " (not handled)")
+ case AppliedType(tycon, args) =>
+ val cls = tycon.typeSymbol
+ if (tycon.isRepeatedParam)
+ expandTpe(args.head)
+ else if (defn.isFunctionClass(cls))
+ FunctionReference(args.init.map(expandTpe(_, Nil)), expandTpe(args.last))
+ else if (defn.isTupleClass(cls))
+ TupleReference(args.map(expandTpe(_, Nil)))
+ else {
+ val query = tycon.show
+ val name = query.split("\\.").last
+ typeRef(name, query, params = args.map(expandTpe(_, Nil)))
+ }
+
+ case ref @ RefinedType(parent, rn, info) =>
+ expandTpe(parent) //FIXME: will be a refined HK, aka class Foo[X] { def bar: List[X] } or similar
+ case ref @ HKApply(tycon, args) =>
+ expandTpe(tycon, args.map(expandTpe(_, params)))
+ case TypeRef(_, n) =>
+ val name = n.decode.toString.split("\\$").last
+ typeRef(name, params = params)
+ case ta: TypeAlias =>
+ expandTpe(ta.alias.widenDealias)
+ case OrType(left, right) =>
+ OrTypeReference(expandTpe(left), expandTpe(right))
+ case AndType(left, right) =>
+ AndTypeReference(expandTpe(left), expandTpe(right))
+ case tb @ TypeBounds(lo, hi) =>
+ BoundsReference(expandTpe(lo), expandTpe(hi))
+ case AnnotatedType(tpe, _) =>
+ expandTpe(tpe)
+ case ExprType(tpe) =>
+ expandTpe(tpe)
+ case c: ConstantType =>
+ ConstantReference(c.show)
+ case tt: ThisType =>
+ expandTpe(tt.underlying)
+ case ci: ClassInfo =>
+ val query = path(ci.typeSymbol).mkString(".")
+ typeRef(ci.cls.name.show, query = query)
+ case mt: MethodType =>
+ expandTpe(mt.resultType)
+ case pt: PolyType =>
+ expandTpe(pt.resultType)
+ case pp: PolyParam =>
+ val paramName = pp.paramName.show
+ val name =
+ if (paramName.contains('$'))
+ paramName.split("\\$\\$").last
+ else paramName
+
+ typeRef(name)
+ }
+
+ expandTpe(t)
+ }
+
+ def typeParams(sym: Symbol)(implicit ctx: Context): List[String] =
+ sym.info match {
+ case pt: PolyType => // TODO: not sure if this case is needed anymore
+ pt.paramNames.map(_.show.split("\\$").last)
+ case ClassInfo(_, _, _, decls, _) =>
+ decls.iterator
+ .filter(_.flags is TypeParam)
+ .map { tp =>
+ val prefix =
+ if (tp.flags is Covariant) "+"
+ else if (tp.flags is Contravariant) "-"
+ else ""
+ prefix + tp.name.show.split("\\$").last
+ }
+ .toList
+ case _ =>
+ Nil
+ }
+
+ def constructors(sym: Symbol)(implicit ctx: Context): List[List[ParamList]] = sym match {
+ case sym: ClassSymbol =>
+ paramLists(sym.primaryConstructor.info) :: Nil
+ case _ => Nil
+ }
+
+ def traitParameters(sym: Symbol)(implicit ctx: Context): List[ParamList] =
+ constructors(sym).head
+
+ def paramLists(tpe: Type)(implicit ctx: Context): List[ParamList] = tpe match {
+ case pt: PolyType =>
+ paramLists(pt.resultType)
+
+ case mt: MethodType =>
+ ParamListImpl(mt.paramNames.zip(mt.paramTypes).map { case (name, tpe) =>
+ NamedReference(
+ name.decode.toString,
+ returnType(tpe),
+ isByName = tpe.isInstanceOf[ExprType],
+ isRepeated = tpe.isRepeatedParam
+ )
+ }, mt.isImplicit) :: paramLists(mt.resultType)
+
+ case annot: AnnotatedType => paramLists(annot.tpe)
+ case (_: PolyParam | _: RefinedType | _: TypeRef | _: ThisType |
+ _: ExprType | _: OrType | _: AndType | _: HKApply) => Nil // return types should not be in the paramlist
+ }
+
+ def superTypes(t: Tree)(implicit ctx: Context): List[MaterializableLink] = t.symbol.denot match {
+ case cd: ClassDenotation =>
+ def isJavaLangObject(prefix: Type): Boolean =
+ prefix match {
+ case TypeRef(ThisType(TypeRef(NoPrefix, outerName)), innerName) =>
+ outerName.toString == "lang" && innerName.toString == "Object"
+ case _ => false
+ }
+
+ def isProductWithArity(prefix: Type): Boolean = prefix match {
+ case TypeRef(TermRef(TermRef(NoPrefix, root), scala), prod) =>
+ root.toString == "_root_" &&
+ scala.toString == "scala" &&
+ product.findFirstIn(prod.toString).isDefined
+ case _ => false
+ }
+
+ cd.classParents.collect {
+ case t: TypeRef if !isJavaLangObject(t) && !isProductWithArity(t) =>
+ UnsetLink(t.name.toString, path(t.symbol).mkString("."))
+ }
+ case _ => Nil
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/internal.scala b/dottydoc/src/dotty/tools/dottydoc/model/internal.scala
new file mode 100644
index 000000000..6afb1ec9b
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/internal.scala
@@ -0,0 +1,89 @@
+package dotty.tools.dottydoc
+package model
+
+import comment.Comment
+import references._
+
+object internal {
+
+ trait Impl {
+ var parent: Entity = NonEntity
+ }
+
+ final case class PackageImpl(
+ name: String,
+ var members: List[Entity],
+ path: List[String],
+ var comment: Option[Comment] = None
+ ) extends Package with Impl {
+ def children: List[Entity with Members] =
+ members.collect { case x: Entity with Members => x }
+ }
+
+ final case class ClassImpl(
+ name: String,
+ members: List[Entity],
+ modifiers: List[String],
+ path: List[String],
+ typeParams: List[String] = Nil,
+ constructors: List[List[ParamList]] = Nil,
+ superTypes: List[MaterializableLink] = Nil,
+ var comment: Option[Comment] = None
+ ) extends Class with Impl
+
+ final case class CaseClassImpl(
+ name: String,
+ members: List[Entity],
+ modifiers: List[String],
+ path: List[String],
+ typeParams: List[String] = Nil,
+ constructors: List[List[ParamList]] = Nil,
+ superTypes: List[MaterializableLink] = Nil,
+ var comment: Option[Comment] = None
+ ) extends CaseClass with Impl
+
+ final case class TraitImpl(
+ name: String,
+ members: List[Entity],
+ modifiers: List[String],
+ path: List[String],
+ typeParams: List[String] = Nil,
+ traitParams: List[ParamList] = Nil,
+ superTypes: List[MaterializableLink] = Nil,
+ var comment: Option[Comment] = None
+ ) extends Trait with Impl
+
+ final case class ObjectImpl(
+ name: String,
+ members: List[Entity],
+ modifiers: List[String],
+ path: List[String],
+ superTypes: List[MaterializableLink] = Nil,
+ var comment: Option[Comment] = None
+ ) extends Object with Impl
+
+ final case class DefImpl(
+ name: String,
+ modifiers: List[String],
+ path: List[String],
+ returnValue: Reference,
+ typeParams: List[String] = Nil,
+ paramLists: List[ParamList] = Nil,
+ var comment: Option[Comment] = None,
+ implicitlyAddedFrom: Option[Reference] = None
+ ) extends Def with Impl
+
+ final case class ValImpl(
+ name: String,
+ modifiers: List[String],
+ path: List[String],
+ returnValue: Reference,
+ var comment: Option[Comment] = None,
+ implicitlyAddedFrom: Option[Reference] = None
+ ) extends Val with Impl
+
+ final case class ParamListImpl(
+ list: List[NamedReference],
+ isImplicit: Boolean
+ ) extends ParamList
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/java.scala b/dottydoc/src/dotty/tools/dottydoc/model/java.scala
new file mode 100644
index 000000000..410085061
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/java.scala
@@ -0,0 +1,223 @@
+package dotty.tools.dottydoc
+package model
+
+import comment._
+import references._
+
+object java {
+ import scala.collection.JavaConverters._
+ import _root_.java.util.{ Optional => JOptional, Map => JMap }
+
+ implicit class JavaOption[A](val opt: Option[A]) extends AnyVal {
+ def asJava: JOptional[A] =
+ opt.map(a => JOptional.of(a)).getOrElse(JOptional.empty[A])
+ }
+
+ implicit class JavaComment(val cmt: Comment) extends AnyVal {
+ def asJava: JMap[String, _] = Map(
+ "body" -> cmt.body,
+ "short" -> cmt.short,
+ "authors" -> cmt.authors.asJava,
+ "see" -> cmt.see.asJava,
+ "result" -> cmt.result.asJava,
+ "throws" -> cmt.throws.asJava,
+ "valueParams" -> cmt.valueParams.asJava,
+ "typeParams" -> cmt.typeParams.asJava,
+ "version" -> cmt.version.asJava,
+ "since" -> cmt.since.asJava,
+ "todo" -> cmt.todo.asJava,
+ "deprecated" -> cmt.deprecated.asJava,
+ "note" -> cmt.note.asJava,
+ "example" -> cmt.example.asJava,
+ "constructor" -> cmt.constructor.asJava,
+ "group" -> cmt.group.asJava,
+ "groupDesc" -> cmt.groupDesc.asJava,
+ "groupNames" -> cmt.groupNames.asJava,
+ "groupPrio" -> cmt.groupPrio.asJava,
+ "hideImplicitConversions" -> cmt.hideImplicitConversions.asJava
+ ).asJava
+ }
+
+ implicit class JavaPackage(val ent: Package) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "path" -> ent.path.asJava,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaCaseClass(val ent: CaseClass) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "typeParams" -> ent.typeParams.asJava,
+ "superTypes" -> ent.superTypes.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaClass(val ent: Class) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "typeParams" -> ent.typeParams.asJava,
+ "superTypes" -> ent.superTypes.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaTrait(val ent: Trait) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "typeParams" -> ent.typeParams.asJava,
+ "superTypes" -> ent.superTypes.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaObject(val ent: Object) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = (Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "members" -> ent.members.map(_.asJava()).asJava,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "superTypes" -> ent.superTypes.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava
+ ) ++ extras).asJava
+ }
+
+ implicit class JavaDef(val ent: Def) extends AnyVal {
+ def asJava: JMap[String, _] = Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "returnValue" -> ent.returnValue.asJava,
+ "typeParams" -> ent.typeParams.asJava,
+ "paramLists" -> ent.paramLists.map(_.asJava).asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava,
+ "implicitlyAddedFrom" -> ent.implicitlyAddedFrom.map(_.asJava).asJava
+ ).asJava
+ }
+
+ implicit class JavaVal(val ent: Val) extends AnyVal {
+ def asJava: JMap[String, _] = Map(
+ "kind" -> ent.kind,
+ "name" -> ent.name,
+ "modifiers" -> ent.modifiers.asJava,
+ "path" -> ent.path.asJava,
+ "returnValue" -> ent.returnValue.asJava,
+ "comment" -> ent.comment.map(_.asJava).asJava,
+ "implicitlyAddedFrom" -> ent.implicitlyAddedFrom.map(_.asJava).asJava
+ ).asJava
+ }
+
+ implicit class JavaParamList(val pl: ParamList) extends AnyVal {
+ def asJava: JMap[String, _] = Map(
+ "list" -> pl.list.map(_.asJava).asJava,
+ "isImplicit" -> pl.isImplicit
+ ).asJava
+ }
+
+ implicit class JavaReference(val ref: Reference) extends AnyVal {
+ def asJava: JMap[String, _] = ref match {
+ case TypeReference(title, tpeLink, paramLinks) => Map(
+ "kind" -> "TypeReference",
+ "title" -> title,
+ "tpeLink" -> tpeLink.asJava,
+ "paramLinks" -> paramLinks.map(_.asJava).asJava
+ ).asJava
+
+ case OrTypeReference(left, right) => Map(
+ "kind" -> "OrTypeReference",
+ "left" -> left.asJava,
+ "right" -> right.asJava
+ ).asJava
+
+ case AndTypeReference(left, right) => Map(
+ "kind" -> "AndTypeReference",
+ "left" -> left.asJava,
+ "right" -> right.asJava
+ ).asJava
+
+ case FunctionReference(args, returnValue) => Map(
+ "kind" -> "FunctionReference",
+ "args" -> args.map(_.asJava).asJava,
+ "returnValue" -> returnValue
+ ).asJava
+
+ case TupleReference(args) => Map(
+ "kind" -> "TupleReference",
+ "args" -> args.map(_.asJava).asJava
+ ).asJava
+
+ case BoundsReference(low, high) => Map(
+ "kind" -> "BoundsReference",
+ "low" -> low.asJava,
+ "hight" -> high.asJava
+ ).asJava
+
+ case NamedReference(title, ref, isByName, isRepeated) => Map(
+ "kind" -> "NamedReference",
+ "title" -> title,
+ "ref" -> ref.asJava,
+ "isByName" -> isByName,
+ "isRepeated" -> isRepeated
+ ).asJava
+
+ case ConstantReference(title) => Map(
+ "kind" -> "ConstantReference",
+ "title" -> title
+ ).asJava
+ }
+ }
+
+ implicit class JavaMaterializableLink(val link: MaterializableLink) extends AnyVal {
+ def asJava: JMap[String, _] = link match {
+ case UnsetLink(title, query) => Map(
+ "kind" -> "UnsetLink",
+ "title" -> title,
+ "query" -> query
+ ).asJava
+
+ case MaterializedLink(title, target) => Map(
+ "kind" -> "MaterializedLink",
+ "title" -> title,
+ "target" -> target
+ ).asJava
+
+ case NoLink(title, target) => Map(
+ "kind" -> "NoLink",
+ "title" -> title,
+ "target" -> target
+ ).asJava
+ }
+ }
+
+ implicit class JavaEntity(val ent: Entity) extends AnyVal {
+ def asJava(extras: Map[String, _] = Map.empty): JMap[String, _] = parseEntity(ent, extras)
+ }
+
+ private def parseEntity(ent: Entity, extras: Map[String, _]): JMap[String, _] = ent match {
+ case ent: Package => ent.asJava(extras)
+ case ent: CaseClass => ent.asJava(extras)
+ case ent: Class => ent.asJava(extras)
+ case ent: Trait => ent.asJava(extras)
+ case ent: Object => ent.asJava(extras)
+ case ent: Def => ent.asJava
+ case ent: Val => ent.asJava
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/json.scala b/dottydoc/src/dotty/tools/dottydoc/model/json.scala
new file mode 100644
index 000000000..145728f8a
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/json.scala
@@ -0,0 +1,93 @@
+package dotty.tools.dottydoc
+package model
+
+import comment._
+import references._
+
+/** This object provides a protocol for serializing the package AST to JSON
+ *
+ * TODO: It might be a good ideat to represent the JSON better than just
+ * serializing a big string-blob in the future.
+ */
+object json {
+ implicit class JsonString(val str: String) extends AnyVal {
+ def json: String = {
+ val cleanedString = str
+ .replaceAll("\\\\","\\\\\\\\")
+ .replaceAll("\\\"", "\\\\\"")
+ .replaceAll("\n", "\\\\n")
+
+ s""""$cleanedString""""
+ }
+ }
+
+ implicit class JsonBoolean(val boo: Boolean) extends AnyVal {
+ def json: String = if (boo) "true" else "false"
+ }
+
+ implicit class JsonComment(val cmt: Comment) extends AnyVal {
+ def json: String =
+ s"""{"body":${cmt.body.json},"short":${cmt.short.json},"authors":${cmt.authors.map(_.json).mkString("[",",","]")},"see":${cmt.see.map(_.json).mkString("[",",","]")},${cmt.result.map(res => s""""result":${res.json},""").getOrElse("")}"throws":${cmt.throws.map { case (k, v) => s"${k.json}:${v.json}" }.mkString("{",",","}")},"valueParams":${cmt.valueParams.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"typeParams":${cmt.typeParams.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},${cmt.version.map(x => s""""version":${x.json},""").getOrElse("")}${cmt.since.map(x => s""""since":${x.json},""").getOrElse("")}"todo":${cmt.todo.map(_.json).mkString("[",",","]")},${cmt.deprecated.map(x => s""""deprecated":${x.json},""").getOrElse("")}"note":${cmt.note.map(_.json).mkString("[",",","]")},"example":${cmt.example.map(_.json).mkString("[",",","]")},${cmt.constructor.map(x => s""""constructor":${x.json},""").getOrElse("")}${cmt.group.map(x => s""""group":${x.json},""").getOrElse("")}"groupDesc":${cmt.groupDesc.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"groupNames":${cmt.groupNames.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"groupPrio":${cmt.groupPrio.map { case (k, v) => s"${k.json}:${v.json}"}.mkString("{",",","}")},"hideImplicitConversions":${cmt.hideImplicitConversions.map(_.json).mkString("[",",","]")}}"""
+ }
+
+ implicit class LinkJson(val link: MaterializableLink) extends AnyVal {
+ def json: String = {
+ val (secondTitle, secondValue, kind) = link match {
+ case ul: UnsetLink => ("query".json, ul.query.json, "UnsetLink".json)
+ case ml: MaterializedLink => ("target".json, ml.target.json, "MaterializedLink".json)
+ case nl: NoLink => ("target".json, nl.target.json, "NoLink".json)
+ }
+ s"""{"title":${link.title.json},$secondTitle:${secondValue},"kind":$kind}"""
+ }
+ }
+
+ implicit class ParamListJson(val plist: ParamList) extends AnyVal {
+ def json: String =
+ s"""{"list":${plist.list.map(_.json).mkString("[",",","]")},"isImplicit":${plist.isImplicit.json}}"""
+ }
+
+ private def refToJson(ref: Reference): String = ref match {
+ case ref: TypeReference =>
+ s"""{"title":${ref.title.json},"tpeLink":${ref.tpeLink.json},"paramLinks":${ref.paramLinks.map(_.json).mkString("[",",","]")},"kind":"TypeReference"}"""
+ case ref: AndTypeReference =>
+ s"""{"left":${refToJson(ref.left)},"right":${refToJson(ref.right)},"kind":"AndTypeReference"}"""
+ case ref: OrTypeReference =>
+ s"""{"left":${refToJson(ref.left)},"right":${refToJson(ref.right)},"kind":"OrTypeReference"}"""
+ case ref: BoundsReference =>
+ s"""{"low":${refToJson(ref.low)},"high":${refToJson(ref.high)},"kind":"BoundsReference"}"""
+ case ref: NamedReference =>
+ s"""{"title":${ref.title.json},"ref":${refToJson(ref.ref)},"isByName":${ref.isByName.json},"isRepeated":${ref.isRepeated.json},"kind":"NamedReference"}"""
+ case ref: ConstantReference =>
+ s"""{"title":${ref.title.json},"kind": "ConstantReference"}"""
+ case ref: FunctionReference =>
+ s"""{"args":${ref.args.map(refToJson).mkString("[",",","]")},"returnValue":${refToJson(ref.returnValue)},"kind": "FunctionReference"}"""
+ case ref: TupleReference =>
+ s"""{"args":${ref.args.map(refToJson).mkString("[",",","]")},"kind": "TupleReference"}"""
+ }
+ implicit class ReferenceJson(val ref: Reference) extends AnyVal { def json: String = refToJson(ref) }
+
+ private def entToJson(ent: Entity): String = ent match {
+ case ent: Package =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"package"}"""
+ case ent: Class =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"constructors":${ent.constructors.map(xs => xs.map(_.json).mkString("[",",","]")).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"class"}"""
+ case ent: CaseClass =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"constructors":${ent.constructors.map(xs => xs.map(_.json).mkString("[",",","]")).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"case class"}"""
+ case ent: Trait =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"traitParams":${ent.traitParams.map(_.json).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"trait"}"""
+ case ent: Object =>
+ s"""{"name":${ent.name.json},"members":${ent.members.map(_.json).mkString("[",",","]")},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"superTypes":${ent.superTypes.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}"kind":"object"}"""
+ case ent: Def =>
+ s"""{"name":${ent.name.json},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"returnValue":${ent.returnValue.json},"typeParams":${ent.typeParams.map(_.json).mkString("[",",","]")},"paramLists":${ent.paramLists.map(_.json).mkString("[",",","]")},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}${ent.implicitlyAddedFrom.fold("")(ref => s""""implicitlyAddedFrom":${ref.json},""")}"kind":"def"}"""
+ case ent: Val =>
+ s"""{"name":${ent.name.json},"modifiers":${ent.modifiers.map(_.json).mkString("[",",","]")},"path":${ent.path.map(_.json).mkString("[",",","]")},"returnValue":${ent.returnValue.json},${ent.comment.map(_.json).fold("")(cmt => s""""comment":$cmt,""")}${ent.implicitlyAddedFrom.fold("")(ref => s""""implicitlyAddedFrom":${ref.json},""")}"kind":"val"}"""
+ }
+ implicit class EntityJson(val ent: Entity) extends AnyVal { def json: String = entToJson(ent) }
+ implicit class PackageJson(val pack: Package) extends AnyVal { def json: String = (pack: Entity).json }
+
+ implicit class PackMapJson(val packs: collection.Map[String, Package]) extends AnyVal {
+ def json: String = packs
+ .map { case (k, v) => s"${k.json}: ${v.json}" }
+ .mkString("{",",","}")
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala b/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala
new file mode 100644
index 000000000..fa54163e5
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/parsers.scala
@@ -0,0 +1,98 @@
+package dotty.tools
+package dottydoc
+package model
+
+import dotc.core.Symbols.Symbol
+import dotc.core.Contexts.Context
+import dotc.util.Positions.NoPosition
+
+object parsers {
+ import comment._
+ import BodyParsers._
+ import model.internal._
+ import util.MemberLookup
+ import util.traversing._
+ import util.internal.setters._
+
+ class WikiParser extends CommentCleaner with CommentParser with CommentExpander {
+ private[this] var commentCache: Map[String, (Entity, Map[String, Package]) => Option[Comment]] = Map.empty
+
+ /** Parses comment and returns the path to the entity with an optional comment
+ *
+ * The idea here is to use this fact to create `Future[Seq[(String, Option[Comment]]]`
+ * which can then be awaited near the end of the run - before the pickling.
+ */
+ def parseHtml(sym: Symbol, parent: Symbol, entity: Entity, packages: Map[String, Package])(implicit ctx: Context): (String, Option[Comment]) = {
+ val cmt = ctx.docbase.docstring(sym).map { d =>
+ val expanded = expand(sym, parent)
+ parse(entity, packages, clean(expanded), expanded, d.pos).toComment(_.toHtml(entity))
+ }
+
+ (entity.path.mkString("."), cmt)
+ }
+
+
+ def add(entity: Entity, symbol: Symbol, parent: Symbol, ctx: Context): Unit = {
+ val commentParser = { (entity: Entity, packs: Map[String, Package]) =>
+ parseHtml(symbol, parent, entity, packs)(ctx)._2
+ }
+
+ /** TODO: this if statement searches for doc comments in parent
+ * definitions if one is not defined for the current symbol.
+ *
+ * It might be a good idea to factor this out of the WikiParser - since
+ * it mutates the state of docbase sort of silently.
+ */
+ implicit val implCtx = ctx
+ if (!ctx.docbase.docstring(symbol).isDefined) {
+ val parentCmt =
+ symbol.extendedOverriddenSymbols
+ .find(ctx.docbase.docstring(_).isDefined)
+ .flatMap(p => ctx.docbase.docstring(p))
+
+ ctx.docbase.addDocstring(symbol, parentCmt)
+ }
+
+
+ val path = entity.path.mkString(".")
+ if (!commentCache.contains(path) || ctx.docbase.docstring(symbol).isDefined)
+ commentCache = commentCache + (path -> commentParser)
+ }
+
+ def +=(entity: Entity, symbol: Symbol, parent: Symbol, ctx: Context) = add(entity, symbol, parent, ctx)
+
+ def size: Int = commentCache.size
+
+ private def parse(entity: Entity, packs: Map[String, Package]): Option[Comment] =
+ commentCache(entity.path.mkString("."))(entity, packs)
+
+ def parse(packs: Map[String, Package]): Unit = {
+ def rootPackages: List[String] = {
+ var currentDepth = Int.MaxValue
+ var packages: List[String] = Nil
+
+ for (key <- packs.keys) {
+ val keyDepth = key.split("\\.").length
+ packages =
+ if (keyDepth < currentDepth) {
+ currentDepth = keyDepth
+ key :: Nil
+ } else if (keyDepth == currentDepth) {
+ key :: packages
+ } else packages
+ }
+
+ packages
+ }
+
+ for (pack <- rootPackages) {
+ mutateEntities(packs(pack)) { e =>
+ val comment = parse(e, packs)
+ setComment(e, to = comment)
+ }
+ }
+ }
+
+ def clear(): Unit = commentCache = Map.empty
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/model/references.scala b/dottydoc/src/dotty/tools/dottydoc/model/references.scala
new file mode 100644
index 000000000..a28148fa7
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/model/references.scala
@@ -0,0 +1,20 @@
+package dotty.tools.dottydoc
+package model
+
+object references {
+ sealed trait Reference
+ final case class TypeReference(title: String, tpeLink: MaterializableLink, paramLinks: List[Reference]) extends Reference
+ final case class OrTypeReference(left: Reference, right: Reference) extends Reference
+ final case class AndTypeReference(left: Reference, right: Reference) extends Reference
+ final case class FunctionReference(args: List[Reference], returnValue: Reference) extends Reference
+ final case class TupleReference(args: List[Reference]) extends Reference
+ final case class BoundsReference(low: Reference, high: Reference) extends Reference
+ final case class NamedReference(title: String, ref: Reference, isByName: Boolean = false, isRepeated: Boolean = false) extends Reference
+ final case class ConstantReference(title: String) extends Reference
+
+ /** Use MaterializableLink for entities that need be picklable */
+ sealed trait MaterializableLink { def title: String }
+ final case class UnsetLink(title: String, query: String) extends MaterializableLink
+ final case class MaterializedLink(title: String, target: String) extends MaterializableLink
+ final case class NoLink(title: String, target: String) extends MaterializableLink
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala b/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala
new file mode 100644
index 000000000..40c775428
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/util/MemberLookup.scala
@@ -0,0 +1,92 @@
+package dotty.tools
+package dottydoc
+package util
+
+import dotc.config.Printers.dottydoc
+import dotc.core.Contexts.Context
+import dotc.core.Flags
+import dotc.core.Names._
+import dotc.core.Symbols._
+import dotc.core.Types._
+import dotc.core.Names._
+import dotc.util.Positions._
+import model.internal._
+import model.comment._
+import model._
+
+trait MemberLookup {
+ /** Performs a lookup based on the provided (pruned) query string
+ *
+ * Will return a `Tooltip` if unsucessfull, otherwise a LinkToEntity or LinkToExternal
+ */
+ def lookup(
+ entity: Entity,
+ packages: Map[String, Package],
+ query: String,
+ pos: Position
+ ): LinkTo = {
+ val notFound: LinkTo = Tooltip(query)
+ val querys = query.split("\\.").toList
+
+ /** Looks for the specified entity among `ent`'s members */
+ def localLookup(ent: Entity with Members, searchStr: String): LinkTo =
+ ent
+ .members
+ .collect { case x if x.name == searchStr => x }
+ .sortBy(_.path.last)
+ .headOption
+ .fold(notFound)(e => LinkToEntity(e))
+
+ /** Looks for an entity down in the structure, if the search list is Nil,
+ * the search stops
+ */
+ def downwardLookup(ent: Entity with Members, search: List[String]): LinkTo =
+ search match {
+ case Nil => notFound
+ case x :: Nil =>
+ localLookup(ent, x)
+ case x :: xs =>
+ ent
+ .members
+ .collect { case e: Entity with Members if e.name == x => e }
+ .headOption
+ .fold(notFound)(e => downwardLookup(e, xs))
+ }
+
+ /** Finds package with longest matching name, then does downwardLookup in
+ * the package
+ */
+ def globalLookup: LinkTo = {
+ def longestMatch(list: List[String]): List[String] =
+ if (list == Nil) Nil
+ else
+ packages
+ .get(list.mkString("."))
+ .map(_ => list)
+ .getOrElse(longestMatch(list.dropRight(1)))
+
+ longestMatch(querys) match {
+ case Nil => notFound
+ case xs => downwardLookup(packages(xs.mkString(".")), querys diff xs)
+ }
+ }
+
+ (querys, entity) match {
+ case (x :: Nil, e: Entity with Members) =>
+ localLookup(e, x)
+ case (x :: _, e: Entity with Members) if x == entity.name =>
+ downwardLookup(e, querys)
+ case (x :: xs, _) =>
+ if (xs.nonEmpty) globalLookup
+ else lookup(entity, packages, "scala." + query, pos)
+ }
+ }
+
+ def makeEntityLink(
+ entity: Entity,
+ packages: Map[String, Package],
+ title: Inline,
+ pos: Position,
+ query: String
+ ): EntityLink = EntityLink(title, lookup(entity, packages, query, pos))
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala b/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala
new file mode 100644
index 000000000..2084e0a97
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/util/OutputWriter.scala
@@ -0,0 +1,125 @@
+package dotty.tools.dottydoc
+package util
+
+import dotty.tools.dotc.config.Printers.dottydoc
+
+import _root_.java.io.{
+ File => JFile,
+ PrintWriter => JPrintWriter,
+ FileReader => JFileReader,
+ BufferedInputStream,
+ InputStream,
+ InputStreamReader,
+ FileOutputStream,
+ BufferedOutputStream,
+ FileNotFoundException
+}
+import _root_.java.net.URL
+import _root_.java.util.{ Map => JMap, List => JList }
+import model.{ Entity, Package }
+import model.json._
+import com.github.mustachejava.DefaultMustacheFactory
+import scala.collection.JavaConverters._
+
+class OutputWriter {
+
+ def writeJava(packs: JMap[String, Package], outPath: String, template: URL, resources: JList[URL]): Unit = {
+ write(packs.asScala, outPath, template, resources.asScala)
+ }
+
+ def write(packs: collection.Map[String, Package], outPath: String, template: URL, resources: Traversable[URL]): Unit = {
+ // Write all packages to `outPath`
+ for (pack <- packs.values) {
+ println(s"""Writing '${pack.path.mkString(".")}'""")
+ writeFile(
+ expandTemplate(template, pack, outPath),
+ outPath + pack.path.mkString("/", "/", "/"),
+ "index.html")
+
+ // Write all package children to outPath
+ for {
+ child <- pack.children
+ if child.kind != "package"
+ } {
+ println(s"""Writing '${child.path.mkString(".")}'""")
+ writeFile(
+ expandTemplate(template, child, outPath),
+ outPath + child.path.dropRight(1).mkString("/", "/", "/"),
+ child.path.last + ".html")
+ }
+ }
+
+ // Write full index to outPath
+ val js = "Index = {}; Index.packages = " + packs.json + ";"
+ println("Writing index.js...")
+ writeFile(js, outPath + "/docassets/", "index.js")
+
+ // Write resources to outPath
+ println("Copying CSS/JS resources to destination...")
+ assert(resources.nonEmpty)
+
+ // TODO: splitting the URL by '/' and taking the last means that we don't
+ // allow folders among the resources
+ resources.foreach(url => copy(url.openStream, outPath, url.getFile.split("/").last))
+
+ println("Done writing static material, building js-app")
+ }
+
+ def writeJsonJava(index: JMap[String, Package], outputDir: String): Unit =
+ writeJson(index.asScala, outputDir)
+
+ def writeJson(index: collection.Map[String, Package], outputDir: String): Unit =
+ writeFile(index.json, outputDir + "/", "index.json")
+
+ def expandTemplate(template: URL, entity: Entity, outPath: String): String = try {
+ import model.json._
+ import model.java._
+
+ val inputStream = template.openStream
+ val writer = new _root_.java.io.StringWriter()
+ val mf = new DefaultMustacheFactory()
+
+ def toRoot = "../" * (entity.path.length - { if (entity.isInstanceOf[Package]) 0 else 1 })
+
+ val entityWithExtras = entity.asJava(Map(
+ "assets" -> s"${toRoot}docassets",
+ "index" -> s"${toRoot}docassets/index.js",
+ "currentEntity" -> entity.json
+ ))
+
+ mf.compile(new InputStreamReader(inputStream), "template")
+ .execute(writer, entityWithExtras)
+
+ inputStream.close()
+ writer.flush()
+ writer.toString
+ } catch {
+ case fnf: FileNotFoundException =>
+ dottydoc.println(s"""Couldn't find the template: "${template.getFile}"...exiting""")
+ System.exit(1); ""
+ }
+
+ def writeFile(str: String, path: String, file: String): Unit =
+ writeFile(str.map(_.toByte).toArray, path, file)
+
+ def writeFile(bytes: Array[Byte], path: String, file: String): Unit = {
+ def printToFile(f: JFile)(op: JPrintWriter => Unit) = {
+ val bos = new BufferedOutputStream(new FileOutputStream(f))
+ try {
+ Stream.continually(bos.write(bytes))
+ } finally bos.close()
+ }
+
+ new JFile(path).mkdirs()
+ printToFile(new JFile(path + file))(printer => bytes.foreach(printer.print))
+ }
+
+ def copy(src: InputStream, path: String, name: String): Unit = {
+ val reader = new BufferedInputStream(src)
+ try {
+ val bytes = Stream.continually(reader.read).takeWhile(-1 != _).map(_.toByte)
+ writeFile(bytes.toArray, path + "/docassets/", name)
+ src.close()
+ } finally reader.close()
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala b/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala
new file mode 100644
index 000000000..a3b60fa44
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/util/Traversing.scala
@@ -0,0 +1,25 @@
+package dotty.tools.dottydoc
+package util
+
+object traversing {
+ import model._
+
+ def mutateEntities(e: Entity)(trans: Entity => Unit): Unit = e match {
+ case e: Entity with Members =>
+ trans(e)
+ e.members.map(mutateEntities(_)(trans))
+ case e: Entity => trans(e)
+ }
+
+ def relativePath(from: Entity, to: Entity) = {
+ val offset = from match {
+ case v: Val if v.implicitlyAddedFrom.isDefined => 3
+ case d: Def if d.implicitlyAddedFrom.isDefined => 3
+ case _: Val | _: Def => 2
+ case _ => 1
+ }
+
+ "../" * (from.path.length - offset) +
+ to.path.mkString("", "/", ".html")
+ }
+}
diff --git a/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala b/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala
new file mode 100644
index 000000000..a5a4dfec6
--- /dev/null
+++ b/dottydoc/src/dotty/tools/dottydoc/util/mutate.scala
@@ -0,0 +1,65 @@
+package dotty.tools.dottydoc
+package util
+package internal
+
+object setters {
+ import model._
+ import comment.Comment
+ import model.references._
+ import internal._
+
+ def setComment(ent: Entity, to: Option[Comment]) = ent match {
+ case x: PackageImpl => x.comment = to
+ case x: ClassImpl => x.comment = to
+ case x: CaseClassImpl => x.comment = to
+ case x: TraitImpl => x.comment = to
+ case x: ObjectImpl => x.comment = to
+ case x: DefImpl => x.comment = to
+ case x: ValImpl => x.comment = to
+ }
+
+ def setParent(ent: Entity, to: Entity): Unit = ent match {
+ case e: ClassImpl =>
+ e.parent = to
+ e.members.foreach(setParent(_, e))
+ case e: CaseClassImpl =>
+ e.parent = to
+ e.members.foreach(setParent(_, e))
+ case e: ObjectImpl =>
+ e.parent = to
+ e.members.foreach(setParent(_, e))
+ case e: TraitImpl =>
+ e.parent = to
+ e.members.foreach(setParent(_, e))
+ case e: ValImpl =>
+ e.parent = to
+ case e: DefImpl =>
+ e.parent = to
+ case _ => ()
+ }
+
+ implicit class FlattenedEntity(val ent: Entity) extends AnyVal {
+ /** Returns a flat copy if anything was changed (Entity with Members) else
+ * the identity
+ */
+ def flat: Entity = {
+ def flattenMember: Entity => Entity = {
+ case e: PackageImpl => e.copy(members = Nil)
+ case e: ObjectImpl => e.copy(members = Nil)
+ case e: CaseClassImpl => e.copy(members = Nil)
+ case e: ClassImpl => e.copy(members = Nil)
+ case e: TraitImpl => e.copy(members = Nil)
+ case other => other
+ }
+
+ ent match {
+ case e: PackageImpl => e.copy(members = e.members.map(flattenMember))
+ case e: ObjectImpl => e.copy(members = e.members.map(flattenMember))
+ case e: CaseClassImpl => e.copy(members = e.members.map(flattenMember))
+ case e: ClassImpl => e.copy(members = e.members.map(flattenMember))
+ case e: TraitImpl => e.copy(members = e.members.map(flattenMember))
+ case other => other
+ }
+ }
+ }
+}
diff --git a/dottydoc/test/BaseTest.scala b/dottydoc/test/BaseTest.scala
new file mode 100644
index 000000000..2233d03c8
--- /dev/null
+++ b/dottydoc/test/BaseTest.scala
@@ -0,0 +1,57 @@
+package dotty.tools
+package dottydoc
+
+import dotc.core.Contexts
+import Contexts.{ Context, ContextBase, FreshContext }
+import dotc.util.SourceFile
+import dotc.core.Phases.Phase
+import dotc.typer.FrontEnd
+import dottydoc.core.DocASTPhase
+import model.Package
+
+trait DottyTest {
+ dotty.tools.dotc.parsing.Scanners // initialize keywords
+
+ implicit var ctx: FreshContext = {
+ val base = new ContextBase
+ import base.settings._
+ val ctx = base.initialCtx.fresh
+ ctx.setSetting(ctx.settings.language, List("Scala2"))
+ ctx.setSetting(ctx.settings.YkeepComments, true)
+ base.initialize()(ctx)
+ ctx
+ }
+
+ private def compilerWithChecker(assertion: Map[String, Package] => Unit) = new DocCompiler {
+ private[this] val assertionPhase: List[List[Phase]] =
+ List(new Phase {
+ def phaseName = "assertionPhase"
+ override def run(implicit ctx: Context): Unit =
+ assertion(ctx.docbase.packages[Package].toMap)
+ }) :: Nil
+
+ override def phases =
+ super.phases ++ assertionPhase
+ }
+
+ def checkSource(source: String)(assertion: Map[String, Package] => Unit): Unit = {
+ val c = compilerWithChecker(assertion)
+ c.rootContext(ctx)
+ val run = c.newRun
+ run.compile(source)
+ }
+
+ def checkFiles(sources: List[String])(assertion: Map[String, Package] => Unit): Unit = {
+ val c = compilerWithChecker(assertion)
+ c.rootContext(ctx)
+ val run = c.newRun
+ run.compile(sources)
+ }
+
+ def checkSources(sourceFiles: List[SourceFile])(assertion: Map[String, Package] => Unit): Unit = {
+ val c = compilerWithChecker(assertion)
+ c.rootContext(ctx)
+ val run = c.newRun
+ run.compileSources(sourceFiles)
+ }
+}
diff --git a/dottydoc/test/ConstructorTest.scala b/dottydoc/test/ConstructorTest.scala
new file mode 100644
index 000000000..8aa883022
--- /dev/null
+++ b/dottydoc/test/ConstructorTest.scala
@@ -0,0 +1,211 @@
+package dotty.tools
+package dottydoc
+
+import org.junit.Test
+import org.junit.Assert._
+
+import dotc.util.SourceFile
+import model._
+import model.internal._
+import model.references._
+
+class Constructors extends DottyTest {
+ @Test def singleClassConstructor = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |class Class(val str: String)
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: Class), _, _) =>
+ cls.constructors.headOption match {
+ case Some(ParamListImpl(NamedReference("str", _, false, false) :: Nil, false) :: Nil) =>
+ // success!
+ case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}")
+ }
+ }
+ }
+ }
+
+ @Test def constructorPlusImplicitArgList = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |class Class(val str1: String)(implicit str2: String)
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: Class), _, _) =>
+ cls.constructors match {
+ case (
+ ParamListImpl(NamedReference("str1", _, false, false) :: Nil, false) ::
+ ParamListImpl(NamedReference("str2", _, false, false) :: Nil, true) :: Nil
+ ) :: Nil =>
+ // success!
+ case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}")
+ }
+ }
+ }
+ }
+
+ @Test def multipleArgumentListsForConstructor = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |class Class(val str1: String)(val str2: String)(implicit str3: String)
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: Class), _, _) =>
+ cls.constructors match {
+ case (
+ ParamListImpl(NamedReference("str1", _, false, false) :: Nil, false) ::
+ ParamListImpl(NamedReference("str2", _, false, false) :: Nil, false) ::
+ ParamListImpl(NamedReference("str3", _, false, false) :: Nil, true) :: Nil
+ ) :: Nil =>
+ // success!
+ case _ => assert(false, s"Incorrect constructor found: ${cls.constructors}")
+ }
+ }
+ }
+ }
+
+ @Test def multipleConstructors = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |class Class(val main: String) {
+ | def this(alt1: Int) =
+ | this("String")
+ |
+ | def this(alt2: List[String]) =
+ | this(alt2.head)
+ |}
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: Class), _, _) =>
+ cls.constructors match {
+ case (
+ ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil
+ ) :: (
+ ParamListImpl(NamedReference("alt1", _, false, false) :: Nil, false) :: Nil
+ ) :: (
+ ParamListImpl(NamedReference("alt2", _, false, false) :: Nil, false) :: Nil
+ ) :: Nil =>
+ // success!
+ case _ =>
+ assert(
+ false,
+ s"""Incorrect constructor found:\n${cls.constructors.mkString("\n")}"""
+ )
+ }
+ }
+ }
+ }
+
+ @Test def multipleConstructorsCC = {
+ val source = new SourceFile (
+ "Class.scala",
+ """
+ |package scala
+ |
+ |case class Class(val main: String) {
+ | def this(alt1: Int) =
+ | this("String")
+ |
+ | def this(alt2: List[String]) =
+ | this(alt2.head)
+ |}
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cls: CaseClass, obj: Object), _, _) =>
+ cls.constructors match {
+ case (
+ ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil
+ ) :: (
+ ParamListImpl(NamedReference("alt1", _, false, false) :: Nil, false) :: Nil
+ ) :: (
+ ParamListImpl(NamedReference("alt2", _, false, false) :: Nil, false) :: Nil
+ ) :: Nil =>
+ // success!
+ case _ =>
+ println(obj.members.map(x => x.kind + " " + x.name))
+ assert(
+ false,
+ s"""Incorrect constructor found:\n${cls.constructors.mkString("\n")}"""
+ )
+ }
+ }
+ }
+ }
+
+ @Test def traitParameters = {
+ val source = new SourceFile (
+ "Trait.scala",
+ """
+ |package scala
+ |
+ |trait Trait(val main: String)
+ """.stripMargin
+ )
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(trt: Trait), _, _) =>
+ trt.traitParams match {
+ case ParamListImpl(NamedReference("main", _, false, false) :: Nil, false) :: Nil =>
+ case _ =>
+ assert(
+ false,
+ s"""Incorrect constructor found:\n${trt.traitParams.mkString("\n")}"""
+ )
+ }
+ }
+ }
+ }
+
+ @Test def testJson = {
+ val actualSource =
+ """
+ |package scala
+ |
+ |trait Trait(val main: String)
+ |class Class(val main: String)
+ |case class CaseClass(main: String)
+ """.stripMargin
+
+ val source = new SourceFile ("JsonTest.scala", actualSource)
+
+ checkSources(source :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(cc: CaseClass, _, cls: Class, trt: Trait), _, _) =>
+ import model.json._
+ lazy val incorrectJson = s"The json generated for:\n$actualSource\n\nIs not correct"
+ assert(cc.json.contains(s""""constructors":[[{"list":[{"title":"main""""), incorrectJson)
+ assert(cls.json.contains(s""""constructors":[[{"list":[{"title":"main""""), incorrectJson)
+ assert(trt.json.contains(s""""traitParams":[{"list":[{"title":"main""""), incorrectJson)
+ }
+ }
+ }
+}
diff --git a/dottydoc/test/PackageStructure.scala b/dottydoc/test/PackageStructure.scala
new file mode 100644
index 000000000..00caaa2c0
--- /dev/null
+++ b/dottydoc/test/PackageStructure.scala
@@ -0,0 +1,89 @@
+package dotty.tools
+package dottydoc
+
+import org.junit.Test
+import org.junit.Assert._
+
+import dotc.util.SourceFile
+import model.internal._
+
+class PackageStructure extends DottyTest {
+ @Test def multipleCompilationUnits = {
+ val source1 = new SourceFile(
+ "TraitA.scala",
+ """
+ |package scala
+ |
+ |trait A
+ """.stripMargin
+ )
+
+ val source2 = new SourceFile(
+ "TraitB.scala",
+ """
+ |package scala
+ |
+ |trait B
+ """.stripMargin
+ )
+
+ checkSources(source1 :: source2 :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(_, List(tA, tB), _, _) =>
+ assert(
+ tA.name == "A" && tB.name == "B",
+ s"trait A had name '${tA.name}' and trait B had name '${tB.name}'"
+ )
+ case _ => fail("Incorrect package structure after run")
+ }
+ }
+ }
+
+
+ @Test def multiplePackages = {
+ val source1 = new SourceFile(
+ "TraitA.scala",
+ """
+ |package scala
+ |package collection
+ |
+ |trait A
+ """.stripMargin)
+
+ val source2 = new SourceFile(
+ "TraitB.scala",
+ """
+ |package scala
+ |package collection
+ |
+ |trait B
+ """.stripMargin)
+
+ checkSources(source1 :: source2 :: Nil) { packages =>
+ packages("scala") match {
+ case PackageImpl(
+ "scala",
+ List(PackageImpl("scala.collection", List(tA, tB), _, _)),
+ _, _
+ ) =>
+ assert(
+ tA.name == "A" && tB.name == "B",
+ s"trait A had name '${tA.name}' and trait B had name '${tB.name}'"
+ )
+
+ case _ =>
+ fail(s"""Incorrect package structure for 'scala' package: ${packages("scala")}""")
+ }
+
+ packages("scala.collection") match {
+ case PackageImpl("scala.collection", List(tA, tB), _, _) =>
+ assert(
+ tA.name == "A" && tB.name == "B",
+ s"trait A had name '${tA.name}' and trait B had name '${tB.name}'"
+ )
+
+ case _ => fail("Incorrect package structure for 'scala.collection' package")
+ }
+ }
+ }
+}
diff --git a/dottydoc/test/SimpleComments.scala b/dottydoc/test/SimpleComments.scala
new file mode 100644
index 000000000..959eb1745
--- /dev/null
+++ b/dottydoc/test/SimpleComments.scala
@@ -0,0 +1,29 @@
+package dotty.tools
+package dottydoc
+
+import org.junit.Test
+import org.junit.Assert._
+
+class TestSimpleComments extends DottyTest {
+
+ @Test def simpleComment = {
+ val source =
+ """
+ |package scala
+ |
+ |/** Hello, world! */
+ |trait HelloWorld
+ """.stripMargin
+
+ checkSource(source) { packages =>
+ val traitCmt =
+ packages("scala")
+ .children.find(_.path.mkString(".") == "scala.HelloWorld")
+ .flatMap(_.comment.map(_.body))
+ .get
+
+ assertEquals(traitCmt, "<p>Hello, world!</p>")
+ }
+ }
+
+}
diff --git a/dottydoc/test/WhitelistedStdLib.scala b/dottydoc/test/WhitelistedStdLib.scala
new file mode 100644
index 000000000..48697ea7f
--- /dev/null
+++ b/dottydoc/test/WhitelistedStdLib.scala
@@ -0,0 +1,45 @@
+package dotty.tools
+package dottydoc
+
+import org.junit.Test
+import org.junit.Assert._
+
+class TestWhitelistedCollections extends DottyTest {
+ val files: List[String] = {
+ val whitelist = "./test/dotc/scala-collections.whitelist"
+
+ scala.io.Source.fromFile(whitelist, "UTF8")
+ .getLines()
+ .map(_.trim) // allow identation
+ .filter(!_.startsWith("#")) // allow comment lines prefixed by #
+ .map(_.takeWhile(_ != '#').trim) // allow comments in the end of line
+ .filter(_.nonEmpty)
+ .filterNot(_.endsWith("package.scala"))
+ .toList
+ }
+
+ @Test def arrayHasDocumentation =
+ checkFiles(files) { packages =>
+ val array =
+ packages("scala")
+ .children.find(_.path.mkString(".") == "scala.Array")
+ .get
+
+ assert(array.comment.get.body.length > 0)
+ }
+
+ @Test def traitImmutableHasDocumentation =
+ checkFiles(files) { packages =>
+ val imm =
+ packages("scala")
+ .children.find(_.path.mkString(".") == "scala.Immutable")
+ .get
+
+ assert(
+ imm.kind == "trait" && imm.name == "Immutable",
+ "Found wrong `Immutable`")
+ assert(
+ imm.comment.map(_.body).get.length > 0,
+ "Imm did not have a comment with length > 0")
+ }
+}
diff --git a/project/Build.scala b/project/Build.scala
index b7822907d..1412556a9 100644
--- a/project/Build.scala
+++ b/project/Build.scala
@@ -10,7 +10,7 @@ import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
object DottyBuild extends Build {
val baseVersion = "0.1"
- val isNightly = sys.props.get("NIGHTLYBUILD") == Some("yes")
+ val isNightly = sys.env.get("NIGHTLYBUILD") == Some("yes")
val jenkinsMemLimit = List("-Xmx1300m")
@@ -73,7 +73,9 @@ object DottyBuild extends Build {
javaSource in Test := baseDirectory.value / "test",
resourceDirectory in Compile := baseDirectory.value / "resources",
unmanagedSourceDirectories in Compile := Seq((scalaSource in Compile).value),
+ unmanagedSourceDirectories in Compile += baseDirectory.value / "dottydoc" / "src",
unmanagedSourceDirectories in Test := Seq((scalaSource in Test).value),
+ unmanagedSourceDirectories in Test += baseDirectory.value / "dottydoc" / "test",
// set system in/out for repl
connectInput in run := true,
@@ -98,8 +100,9 @@ object DottyBuild extends Build {
libraryDependencies ++= partestDeps.value,
libraryDependencies ++= Seq("org.scala-lang.modules" %% "scala-xml" % "1.0.1",
"org.scala-lang.modules" %% "scala-partest" % "1.0.11" % "test",
+ "ch.epfl.lamp" % "dottydoc-client" % "0.1.0",
"com.novocode" % "junit-interface" % "0.11" % "test",
- "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0",
+ "com.github.spullara.mustache.java" % "compiler" % "0.9.3",
"com.typesafe.sbt" % "sbt-interface" % sbtVersion.value),
// enable improved incremental compilation algorithm
incOptions := incOptions.value.withNameHashing(true),
@@ -199,7 +202,8 @@ object DottyBuild extends Build {
settings(
addCommandAlias("partest", ";test:package;package;test:runMain dotc.build;lockPartestFile;test:test;runPartestRunner") ++
addCommandAlias("partest-only", ";test:package;package;test:runMain dotc.build;lockPartestFile;test:test-only dotc.tests;runPartestRunner") ++
- addCommandAlias("partest-only-no-bootstrap", ";test:package;package; lockPartestFile;test:test-only dotc.tests;runPartestRunner")
+ addCommandAlias("partest-only-no-bootstrap", ";test:package;package; lockPartestFile;test:test-only dotc.tests;runPartestRunner") ++
+ addCommandAlias("dottydoc", ";dottydoc/run")
).
settings(publishing)
@@ -263,7 +267,6 @@ object DottyInjectedPlugin extends AutoPlugin {
).
settings(publishing)
-
/** A sandbox to play with the Scala.js back-end of dotty.
*
* This sandbox is compiled with dotty with support for Scala.js. It can be
diff --git a/project/plugins.sbt b/project/plugins.sbt
index 57bd46581..8ac4d69bf 100644
--- a/project/plugins.sbt
+++ b/project/plugins.sbt
@@ -8,3 +8,5 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0")
addSbtPlugin("org.scalastyle" %% "scalastyle-sbt-plugin" % "0.8.0")
addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.8")
+
+addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.3.5")
diff --git a/src/dotty/tools/dotc/Compiler.scala b/src/dotty/tools/dotc/Compiler.scala
index d1f126860..2120fa73e 100644
--- a/src/dotty/tools/dotc/Compiler.scala
+++ b/src/dotty/tools/dotc/Compiler.scala
@@ -25,7 +25,7 @@ class Compiler {
/** Meta-ordering constraint:
*
- * DenotTransformers that change the signature of their denotation's info must go
+ * DenotTransformers that change the signature of their denotation's info must go
* after erasure. The reason is that denotations are permanently referred to by
* TermRefs which contain a signature. If the signature of a symbol would change,
* all refs to it would become outdated - they could not be dereferenced in the
@@ -83,7 +83,7 @@ class Compiler {
new CapturedVars, // Represent vars captured by closures as heap objects
new Constructors, // Collect initialization code in primary constructors
// Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it
- new FunctionalInterfaces,// Rewrites closures to implement @specialized types of Functions.
+ new FunctionalInterfaces, // Rewrites closures to implement @specialized types of Functions.
new GetClass), // Rewrites getClass calls on primitive types.
List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments
// Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here
diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala
index 8a4b9cfe8..70d8f2d5e 100644
--- a/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/src/dotty/tools/dotc/ast/Desugar.scala
@@ -8,6 +8,7 @@ import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._
import Decorators._
import language.higherKinds
import collection.mutable.ListBuffer
+import util.Attachment
import config.Printers._
object desugar {
@@ -17,11 +18,16 @@ object desugar {
import untpd._
+ /** Tags a .withFilter call generated by desugaring a for expression.
+ * Such calls can alternatively be rewritten to use filter.
+ */
+ val MaybeFilter = new Attachment.Key[Unit]
+
/** Info of a variable in a pattern: The named tree and its type */
private type VarInfo = (NameTree, Tree)
/** Names of methods that are added unconditionally to case classes */
- def isDesugaredCaseClassMethodName(name: Name)(implicit ctx: Context) =
+ def isDesugaredCaseClassMethodName(name: Name)(implicit ctx: Context): Boolean =
name == nme.isDefined ||
name == nme.copy ||
name == nme.productArity ||
@@ -616,16 +622,20 @@ object desugar {
*
* { cases }
* ==>
- * x$1 => x$1 match { cases }
+ * x$1 => (x$1 @unchecked) match { cases }
*
* If `nparams` != 1, expand instead to
*
- * (x$1, ..., x$n) => (x$0, ..., x${n-1}) match { cases }
+ * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked) match { cases }
*/
- def makeCaseLambda(cases: List[CaseDef], nparams: Int = 1)(implicit ctx: Context) = {
+ def makeCaseLambda(cases: List[CaseDef], nparams: Int = 1, unchecked: Boolean = true)(implicit ctx: Context) = {
val params = (1 to nparams).toList.map(makeSyntheticParameter(_))
val selector = makeTuple(params.map(p => Ident(p.name)))
- Function(params, Match(selector, cases))
+
+ if (unchecked)
+ Function(params, Match(Annotated(New(ref(defn.UncheckedAnnotType)), selector), cases))
+ else
+ Function(params, Match(selector, cases))
}
/** Map n-ary function `(p1, ..., pn) => body` where n != 1 to unary function as follows:
@@ -753,7 +763,7 @@ object desugar {
case VarPattern(named, tpt) =>
Function(derivedValDef(named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body)
case _ =>
- makeCaseLambda(CaseDef(pat, EmptyTree, body) :: Nil)
+ makeCaseLambda(CaseDef(pat, EmptyTree, body) :: Nil, unchecked = false)
}
/** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap
@@ -769,6 +779,12 @@ object desugar {
(Bind(name, pat), Ident(name))
}
+ /** Add MaybeFilter attachment */
+ def orFilter(tree: Tree): tree.type = {
+ tree.putAttachment(MaybeFilter, ())
+ tree
+ }
+
/** Make a pattern filter:
* rhs.withFilter { case pat => true case _ => false }
*
@@ -799,7 +815,7 @@ object desugar {
val cases = List(
CaseDef(pat, EmptyTree, Literal(Constant(true))),
CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))))
- Apply(Select(rhs, nme.withFilter), Match(EmptyTree, cases))
+ Apply(orFilter(Select(rhs, nme.withFilter)), makeCaseLambda(cases))
}
/** Is pattern `pat` irrefutable when matched against `rhs`?
@@ -854,7 +870,7 @@ object desugar {
val vfrom1 = new IrrefutableGenFrom(makeTuple(allpats), rhs1)
makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
case (gen: GenFrom) :: test :: rest =>
- val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen.pat, test))
+ val filtered = Apply(orFilter(rhsSelect(gen, nme.withFilter)), makeLambda(gen.pat, test))
val genFrom =
if (isIrrefutableGenFrom(gen)) new IrrefutableGenFrom(gen.pat, filtered)
else GenFrom(gen.pat, filtered)
diff --git a/src/dotty/tools/dotc/ast/TreeInfo.scala b/src/dotty/tools/dotc/ast/TreeInfo.scala
index a48651ebf..7c3f7f385 100644
--- a/src/dotty/tools/dotc/ast/TreeInfo.scala
+++ b/src/dotty/tools/dotc/ast/TreeInfo.scala
@@ -630,20 +630,6 @@ object TreeInfo {
}
}
- def isApplyDynamicName(name: Name) = (name == nme.updateDynamic) || (name == nme.selectDynamic) || (name == nme.applyDynamic) || (name == nme.applyDynamicNamed)
-
- class DynamicApplicationExtractor(nameTest: Name => Boolean) {
- def unapply(tree: Tree) = tree match {
- case Apply(TypeApply(Select(qual, oper), _), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name))
- case Apply(Select(qual, oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name))
- case Apply(Ident(oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((EmptyTree(), name))
- case _ => None
- }
- }
- object DynamicUpdate extends DynamicApplicationExtractor(_ == nme.updateDynamic)
- object DynamicApplication extends DynamicApplicationExtractor(isApplyDynamicName)
- object DynamicApplicationNamed extends DynamicApplicationExtractor(_ == nme.applyDynamicNamed)
-
object MacroImplReference {
private def refPart(tree: Tree): Tree = tree match {
case TypeApply(fun, _) => refPart(fun)
diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala
index 20ae02994..cf11c27fa 100644
--- a/src/dotty/tools/dotc/ast/Trees.scala
+++ b/src/dotty/tools/dotc/ast/Trees.scala
@@ -15,6 +15,7 @@ import printing.Printer
import util.{Stats, Attachment, DotClass}
import annotation.unchecked.uncheckedVariance
import language.implicitConversions
+import parsing.Scanners.Comment
object Trees {
@@ -30,7 +31,7 @@ object Trees {
@sharable var ntrees = 0
/** Attachment key for trees with documentation strings attached */
- val DocComment = new Attachment.Key[String]
+ val DocComment = new Attachment.Key[Comment]
/** Modifiers and annotations for definitions
* @param flags The set flags
@@ -324,7 +325,7 @@ object Trees {
private[ast] def rawMods: Modifiers[T] =
if (myMods == null) genericEmptyModifiers else myMods
- def rawComment: Option[String] = getAttachment(DocComment)
+ def rawComment: Option[Comment] = getAttachment(DocComment)
def withMods(mods: Modifiers[Untyped]): ThisTree[Untyped] = {
val tree = if (myMods == null || (myMods == mods)) this else clone.asInstanceOf[MemberDef[Untyped]]
@@ -334,7 +335,7 @@ object Trees {
def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(Modifiers(flags))
- def setComment(comment: Option[String]): ThisTree[Untyped] = {
+ def setComment(comment: Option[Comment]): ThisTree[Untyped] = {
comment.map(putAttachment(DocComment, _))
asInstanceOf[ThisTree[Untyped]]
}
diff --git a/src/dotty/tools/dotc/config/ScalaSettings.scala b/src/dotty/tools/dotc/config/ScalaSettings.scala
index d0c4cc02c..c090a5515 100644
--- a/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -163,6 +163,7 @@ class ScalaSettings extends Settings.SettingGroup {
val YkeepComments = BooleanSetting("-Ykeep-comments", "Keep comments when scanning source files.")
val YforceSbtPhases = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.")
val YdumpSbtInc = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.")
+ val YcheckAllPatmat = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm)")
def stop = YstopAfter
/** Area-specific debug output.
@@ -196,4 +197,68 @@ class ScalaSettings extends Settings.SettingGroup {
val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, 0 to 999)
+
+ /** Doc specific settings */
+ val template = OptionSetting[String](
+ "-template",
+ "A mustache template for rendering each top-level entity in the API"
+ )
+
+ val resources = OptionSetting[String](
+ "-resources",
+ "A directory containing static resources needed for the API documentation"
+ )
+
+ val DocTitle = StringSetting (
+ "-Ydoc-title",
+ "title",
+ "The overall name of the Scaladoc site",
+ ""
+ )
+
+ val DocVersion = StringSetting (
+ "-Ydoc-version",
+ "version",
+ "An optional version number, to be appended to the title",
+ ""
+ )
+
+ val DocOutput = StringSetting (
+ "-Ydoc-output",
+ "outdir",
+ "The output directory in which to place the documentation",
+ "."
+ )
+
+ val DocFooter = StringSetting (
+ "-Ydoc-footer",
+ "footer",
+ "A footer on every Scaladoc page, by default the EPFL/Lightbend copyright notice. Can be overridden with a custom footer.",
+ ""
+ )
+
+ val DocUncompilable = StringSetting (
+ "-Ydoc-no-compile",
+ "path",
+ "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)",
+ ""
+ )
+
+ //def DocUncompilableFiles(implicit ctx: Context) = DocUncompilable.value match {
+ // case "" => Nil
+ // case path => io.Directory(path).deepFiles.filter(_ hasExtension "scala").toList
+ //}
+
+ val DocExternalDoc = MultiStringSetting (
+ "-Ydoc-external-doc",
+ "external-doc",
+ "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
+ )
+
+ val DocAuthor = BooleanSetting("-Ydoc-author", "Include authors.", true)
+
+ val DocGroups = BooleanSetting (
+ "-Ydoc:groups",
+ "Group similar functions together (based on the @group annotation)"
+ )
}
diff --git a/src/dotty/tools/dotc/config/Settings.scala b/src/dotty/tools/dotc/config/Settings.scala
index f30cedaa0..73df4e1ec 100644
--- a/src/dotty/tools/dotc/config/Settings.scala
+++ b/src/dotty/tools/dotc/config/Settings.scala
@@ -235,8 +235,8 @@ object Settings {
setting
}
- def BooleanSetting(name: String, descr: String): Setting[Boolean] =
- publish(Setting(name, descr, false))
+ def BooleanSetting(name: String, descr: String, initialValue: Boolean = false): Setting[Boolean] =
+ publish(Setting(name, descr, initialValue))
def StringSetting(name: String, helpArg: String, descr: String, default: String): Setting[String] =
publish(Setting(name, descr, default, helpArg))
diff --git a/src/dotty/tools/dotc/core/Constraint.scala b/src/dotty/tools/dotc/core/Constraint.scala
index 436b035dc..99b4af0a9 100644
--- a/src/dotty/tools/dotc/core/Constraint.scala
+++ b/src/dotty/tools/dotc/core/Constraint.scala
@@ -143,6 +143,9 @@ abstract class Constraint extends Showable {
/** The uninstantiated typevars of this constraint */
def uninstVars: collection.Seq[TypeVar]
+ /** The weakest constraint that subsumes both this constraint and `other` */
+ def & (other: Constraint)(implicit ctx: Context): Constraint
+
/** Check that no constrained parameter contains itself as a bound */
def checkNonCyclic()(implicit ctx: Context): Unit
diff --git a/src/dotty/tools/dotc/core/Contexts.scala b/src/dotty/tools/dotc/core/Contexts.scala
index 262443314..cd76fe88b 100644
--- a/src/dotty/tools/dotc/core/Contexts.scala
+++ b/src/dotty/tools/dotc/core/Contexts.scala
@@ -29,6 +29,7 @@ import printing._
import config.{Settings, ScalaSettings, Platform, JavaPlatform, SJSPlatform}
import language.implicitConversions
import DenotTransformers.DenotTransformer
+import parsing.Scanners.Comment
import xsbti.AnalysisCallback
object Contexts {
@@ -531,6 +532,9 @@ object Contexts {
/** The symbol loaders */
val loaders = new SymbolLoaders
+ /** Documentation base */
+ val docbase = new DocBase
+
/** The platform, initialized by `initPlatform()`. */
private var _platform: Platform = _
@@ -567,14 +571,32 @@ object Contexts {
def squashed(p: Phase): Phase = {
allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase)
}
+ }
- val _docstrings: mutable.Map[Symbol, String] =
+ class DocBase {
+ private[this] val _docstrings: mutable.Map[Symbol, Comment] =
mutable.Map.empty
- def docstring(sym: Symbol): Option[String] = _docstrings.get(sym)
+ def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym)
- def addDocstring(sym: Symbol, doc: Option[String]): Unit =
+ def addDocstring(sym: Symbol, doc: Option[Comment]): Unit =
doc.map(d => _docstrings += (sym -> d))
+
+ /*
+ * Dottydoc places instances of `Package` in this map - but we do not want
+ * to depend on `dottydoc` for the compiler, as such this is defined as a
+ * map of `String -> AnyRef`
+ */
+ private[this] val _packages: mutable.Map[String, AnyRef] = mutable.Map.empty
+ def packages[A]: mutable.Map[String, A] = _packages.asInstanceOf[mutable.Map[String, A]]
+
+ /** Should perhaps factorize this into caches that get flushed */
+ private var _defs: Map[Symbol, Set[Symbol]] = Map.empty
+ def defs(sym: Symbol): Set[Symbol] = _defs.get(sym).getOrElse(Set.empty)
+
+ def addDef(s: Symbol, d: Symbol): Unit = _defs = (_defs + {
+ s -> _defs.get(s).map(xs => xs + d).getOrElse(Set(d))
+ })
}
/** The essential mutable state of a context base, collected into a common class */
diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala
index b0170b67c..e7e388be9 100644
--- a/src/dotty/tools/dotc/core/OrderingConstraint.scala
+++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala
@@ -15,11 +15,13 @@ import annotation.tailrec
object OrderingConstraint {
+ type ArrayValuedMap[T] = SimpleMap[GenericType, Array[T]]
+
/** The type of `OrderingConstraint#boundsMap` */
- type ParamBounds = SimpleMap[GenericType, Array[Type]]
+ type ParamBounds = ArrayValuedMap[Type]
/** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */
- type ParamOrdering = SimpleMap[GenericType, Array[List[PolyParam]]]
+ type ParamOrdering = ArrayValuedMap[List[PolyParam]]
/** A new constraint with given maps */
private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(implicit ctx: Context) : OrderingConstraint = {
@@ -495,6 +497,44 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
}
+ def & (other: Constraint)(implicit ctx: Context) = {
+ def merge[T](m1: ArrayValuedMap[T], m2: ArrayValuedMap[T], join: (T, T) => T): ArrayValuedMap[T] = {
+ var merged = m1
+ def mergeArrays(xs1: Array[T], xs2: Array[T]) = {
+ val xs = xs1.clone
+ for (i <- xs.indices) xs(i) = join(xs1(i), xs2(i))
+ xs
+ }
+ m2.foreachBinding { (poly, xs2) =>
+ merged = merged.updated(poly,
+ if (m1.contains(poly)) mergeArrays(m1(poly), xs2) else xs2)
+ }
+ merged
+ }
+
+ def mergeParams(ps1: List[PolyParam], ps2: List[PolyParam]) =
+ (ps1 /: ps2)((ps1, p2) => if (ps1.contains(p2)) ps1 else p2 :: ps1)
+
+ def mergeEntries(e1: Type, e2: Type): Type = e1 match {
+ case e1: TypeBounds =>
+ e2 match {
+ case e2: TypeBounds => e1 & e2
+ case _ if e1 contains e2 => e2
+ case _ => mergeError
+ }
+ case _ if e1 eq e2 => e1
+ case _ => mergeError
+ }
+
+ def mergeError = throw new AssertionError(i"cannot merge $this with $other")
+
+ val that = other.asInstanceOf[OrderingConstraint]
+ new OrderingConstraint(
+ merge(this.boundsMap, that.boundsMap, mergeEntries),
+ merge(this.lowerMap, that.lowerMap, mergeParams),
+ merge(this.upperMap, that.upperMap, mergeParams))
+ }
+
override def checkClosed()(implicit ctx: Context): Unit = {
def isFreePolyParam(tp: Type) = tp match {
case PolyParam(binder: GenericType, _) => !contains(binder)
diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala
index 8d7e9d164..538a74198 100644
--- a/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -76,6 +76,19 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
myNothingType
}
+ /** Indicates whether a previous subtype check used GADT bounds */
+ var GADTused = false
+
+ /** Record that GADT bounds of `sym` were used in a subtype check.
+ * But exclude constructor type parameters, as these are aliased
+ * to the corresponding class parameters, which does not constitute
+ * a true usage of a GADT symbol.
+ */
+ private def GADTusage(sym: Symbol) = {
+ if (!sym.owner.isConstructor) GADTused = true
+ true
+ }
+
// Subtype testing `<:<`
def topLevelSubType(tp1: Type, tp2: Type): Boolean = {
@@ -325,7 +338,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
val gbounds2 = ctx.gadt.bounds(tp2.symbol)
(gbounds2 != null) &&
(isSubTypeWhenFrozen(tp1, gbounds2.lo) ||
- narrowGADTBounds(tp2, tp1, isUpper = false))
+ narrowGADTBounds(tp2, tp1, isUpper = false)) &&
+ GADTusage(tp2.symbol)
}
((frozenConstraint || !isCappable(tp1)) && isSubType(tp1, lo2) ||
compareGADT ||
@@ -507,7 +521,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
val gbounds1 = ctx.gadt.bounds(tp1.symbol)
(gbounds1 != null) &&
(isSubTypeWhenFrozen(gbounds1.hi, tp2) ||
- narrowGADTBounds(tp1, tp2, isUpper = true))
+ narrowGADTBounds(tp1, tp2, isUpper = true)) &&
+ GADTusage(tp1.symbol)
}
isSubType(hi1, tp2) || compareGADT
case _ =>
@@ -846,11 +861,13 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
// special case for situations like:
// class C { type T }
// val foo: C
- // foo.type <: C { type T = foo.T }
+ // foo.type <: C { type T {= , <: , >:} foo.T }
def selfReferentialMatch = tp1.isInstanceOf[SingletonType] && {
rinfo2 match {
- case rinfo2: TypeAlias =>
- !defn.isBottomType(tp1.widen) && (tp1 select name) =:= rinfo2.alias
+ case rinfo2: TypeBounds =>
+ val mbr1 = tp1.select(name)
+ !defn.isBottomType(tp1.widen) &&
+ (mbr1 =:= rinfo2.hi || (rinfo2.hi ne rinfo2.lo) && mbr1 =:= rinfo2.lo)
case _ => false
}
}
diff --git a/src/dotty/tools/dotc/core/TyperState.scala b/src/dotty/tools/dotc/core/TyperState.scala
index e64335218..7b8867ccc 100644
--- a/src/dotty/tools/dotc/core/TyperState.scala
+++ b/src/dotty/tools/dotc/core/TyperState.scala
@@ -59,6 +59,11 @@ class TyperState(r: Reporter) extends DotClass with Showable {
/** Commit state so that it gets propagated to enclosing context */
def commit()(implicit ctx: Context): Unit = unsupported("commit")
+ /** The closest ancestor of this typer state (including possibly this typer state itself)
+ * which is not yet committed, or which does not have a parent.
+ */
+ def uncommittedAncestor: TyperState = this
+
/** Make type variable instances permanent by assigning to `inst` field if
* type variable instantiation cannot be retracted anymore. Then, remove
* no-longer needed constraint entries.
@@ -83,7 +88,8 @@ extends TyperState(r) {
override def reporter = myReporter
- private var myConstraint: Constraint = previous.constraint
+ private val previousConstraint = previous.constraint
+ private var myConstraint: Constraint = previousConstraint
override def constraint = myConstraint
override def constraint_=(c: Constraint)(implicit ctx: Context) = {
@@ -96,7 +102,6 @@ extends TyperState(r) {
override def ephemeral = myEphemeral
override def ephemeral_=(x: Boolean): Unit = { myEphemeral = x }
-
override def fresh(isCommittable: Boolean): TyperState =
new MutableTyperState(this, new StoreReporter(reporter), isCommittable)
@@ -107,6 +112,11 @@ extends TyperState(r) {
isCommittable &&
(!previous.isInstanceOf[MutableTyperState] || previous.isGlobalCommittable)
+ private var isCommitted = false
+
+ override def uncommittedAncestor: TyperState =
+ if (isCommitted) previous.uncommittedAncestor else this
+
/** Commit typer state so that its information is copied into current typer state
* In addition (1) the owning state of undetermined or temporarily instantiated
* type variables changes from this typer state to the current one. (2) Variables
@@ -116,14 +126,17 @@ extends TyperState(r) {
override def commit()(implicit ctx: Context) = {
val targetState = ctx.typerState
assert(isCommittable)
- targetState.constraint = constraint
+ targetState.constraint =
+ if (targetState.constraint eq previousConstraint) constraint
+ else targetState.constraint & constraint
constraint foreachTypeVar { tvar =>
if (tvar.owningState eq this)
tvar.owningState = targetState
}
- targetState.ephemeral = ephemeral
+ targetState.ephemeral |= ephemeral
targetState.gc()
reporter.flush()
+ isCommitted = true
}
override def gc()(implicit ctx: Context): Unit = {
diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala
index 1bfd6eaee..cb423e186 100644
--- a/src/dotty/tools/dotc/core/Types.scala
+++ b/src/dotty/tools/dotc/core/Types.scala
@@ -177,7 +177,7 @@ object Types {
}
/** Is some part of this type produced as a repair for an error? */
- final def isErroneous(implicit ctx: Context): Boolean = existsPart(_.isError)
+ final def isErroneous(implicit ctx: Context): Boolean = existsPart(_.isError, forceLazy = false)
/** Does the type carry an annotation that is an instance of `cls`? */
final def hasAnnotation(cls: ClassSymbol)(implicit ctx: Context): Boolean = stripTypeVar match {
@@ -219,8 +219,8 @@ object Types {
/** Returns true if there is a part of this type that satisfies predicate `p`.
*/
- final def existsPart(p: Type => Boolean)(implicit ctx: Context): Boolean =
- new ExistsAccumulator(p).apply(false, this)
+ final def existsPart(p: Type => Boolean, forceLazy: Boolean = true)(implicit ctx: Context): Boolean =
+ new ExistsAccumulator(p, forceLazy).apply(false, this)
/** Returns true if all parts of this type satisfy predicate `p`.
*/
@@ -841,6 +841,13 @@ object Types {
case _ => this
}
+ /** Eliminate anonymous classes */
+ final def deAnonymize(implicit ctx: Context): Type = this match {
+ case tp:TypeRef if tp.symbol.isAnonymousClass =>
+ tp.symbol.asClass.typeRef.asSeenFrom(tp.prefix, tp.symbol.owner)
+ case tp => tp
+ }
+
/** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type
* is no longer alias type, LazyRef, or instantiated type variable.
*/
@@ -2525,8 +2532,8 @@ object Types {
/** A type for polymorphic methods */
class PolyType(val paramNames: List[TypeName])(paramBoundsExp: GenericType => List[TypeBounds], resultTypeExp: GenericType => Type)
extends CachedGroundType with GenericType with MethodOrPoly {
- val paramBounds = paramBoundsExp(this)
- val resType = resultTypeExp(this)
+ val paramBounds: List[TypeBounds] = paramBoundsExp(this)
+ val resType: Type = resultTypeExp(this)
def variances = Nil
protected def computeSignature(implicit ctx: Context) = resultSignature
@@ -3688,9 +3695,10 @@ object Types {
protected def traverseChildren(tp: Type) = foldOver((), tp)
}
- class ExistsAccumulator(p: Type => Boolean)(implicit ctx: Context) extends TypeAccumulator[Boolean] {
+ class ExistsAccumulator(p: Type => Boolean, forceLazy: Boolean = true)(implicit ctx: Context) extends TypeAccumulator[Boolean] {
override def stopAtStatic = false
- def apply(x: Boolean, tp: Type) = x || p(tp) || foldOver(x, tp)
+ def apply(x: Boolean, tp: Type) =
+ x || p(tp) || (forceLazy || !tp.isInstanceOf[LazyRef]) && foldOver(x, tp)
}
class ForeachAccumulator(p: Type => Unit)(implicit ctx: Context) extends TypeAccumulator[Unit] {
diff --git a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
index 4ea98f7c3..1570dbca0 100644
--- a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
+++ b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
@@ -85,6 +85,7 @@ class ClassfileParser(
val jflags = in.nextChar
val isAnnotation = hasAnnotation(jflags)
val sflags = classTranslation.flags(jflags)
+ val isEnum = (jflags & JAVA_ACC_ENUM) != 0
val nameIdx = in.nextChar
currentClassName = pool.getClassName(nameIdx)
@@ -140,6 +141,15 @@ class ClassfileParser(
setClassInfo(classRoot, classInfo)
setClassInfo(moduleRoot, staticInfo)
}
+
+ // eager load java enum definitions for exhaustivity check of pattern match
+ if (isEnum) {
+ instanceScope.toList.map(_.ensureCompleted())
+ staticScope.toList.map(_.ensureCompleted())
+ classRoot.setFlag(Flags.Enum)
+ moduleRoot.setFlag(Flags.Enum)
+ }
+
result
}
diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala
index 600707cbf..a06930058 100644
--- a/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -22,6 +22,7 @@ import ScriptParsers._
import scala.annotation.{tailrec, switch}
import util.DotClass
import rewrite.Rewrites.patch
+import Scanners.Comment
object Parsers {
@@ -1008,9 +1009,23 @@ object Parsers {
in.nextToken()
expr()
} else EmptyTree
+
+ handler match {
+ case Block(Nil, EmptyTree) => syntaxError(
+ "`catch` block does not contain a valid expression, try adding a case like - `case e: Exception =>` to the block",
+ handler.pos
+ )
+ case _ =>
+ }
+
val finalizer =
- if (handler.isEmpty || in.token == FINALLY) { accept(FINALLY); expr() }
- else EmptyTree
+ if (in.token == FINALLY) { accept(FINALLY); expr() }
+ else {
+ if (handler.isEmpty)
+ warning("A try without `catch` or `finally` is equivalent to putting its body in a block; no exceptions are handled.")
+
+ EmptyTree
+ }
ParsedTry(body, handler, finalizer)
}
case THROW =>
@@ -1778,13 +1793,13 @@ object Parsers {
*/
def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match {
case VAL =>
- patDefOrDcl(posMods(start, mods), in.getDocString(start))
+ patDefOrDcl(posMods(start, mods), in.getDocComment(start))
case VAR =>
- patDefOrDcl(posMods(start, addFlag(mods, Mutable)), in.getDocString(start))
+ patDefOrDcl(posMods(start, addFlag(mods, Mutable)), in.getDocComment(start))
case DEF =>
- defDefOrDcl(posMods(start, mods), in.getDocString(start))
+ defDefOrDcl(posMods(start, mods), in.getDocComment(start))
case TYPE =>
- typeDefOrDcl(posMods(start, mods), in.getDocString(start))
+ typeDefOrDcl(posMods(start, mods), in.getDocComment(start))
case _ =>
tmplDef(start, mods)
}
@@ -1794,7 +1809,7 @@ object Parsers {
* ValDcl ::= Id {`,' Id} `:' Type
* VarDcl ::= Id {`,' Id} `:' Type
*/
- def patDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = {
+ def patDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = {
val lhs = commaSeparated(pattern2)
val tpt = typedOpt()
val rhs =
@@ -1820,7 +1835,7 @@ object Parsers {
* DefDcl ::= DefSig `:' Type
* DefSig ::= id [DefTypeParamClause] ParamClauses
*/
- def defDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = atPos(tokenRange) {
+ def defDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(tokenRange) {
def scala2ProcedureSyntax(resultTypeStr: String) = {
val toInsert =
if (in.token == LBRACE) s"$resultTypeStr ="
@@ -1895,7 +1910,7 @@ object Parsers {
/** TypeDef ::= type Id [TypeParamClause] `=' Type
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
*/
- def typeDefOrDcl(mods: Modifiers, docstring: Option[String] = None): Tree = {
+ def typeDefOrDcl(mods: Modifiers, docstring: Option[Comment] = None): Tree = {
newLinesOpt()
atPos(tokenRange) {
val name = ident().toTypeName
@@ -1917,7 +1932,7 @@ object Parsers {
* | [`case'] `object' ObjectDef
*/
def tmplDef(start: Int, mods: Modifiers): Tree = {
- val docstring = in.getDocString(start)
+ val docstring = in.getDocComment(start)
in.token match {
case TRAIT =>
classDef(posMods(start, addFlag(mods, Trait)), docstring)
@@ -1938,7 +1953,7 @@ object Parsers {
/** ClassDef ::= Id [ClsTypeParamClause]
* [ConstrMods] ClsParamClauses TemplateOpt
*/
- def classDef(mods: Modifiers, docstring: Option[String]): TypeDef = atPos(tokenRange) {
+ def classDef(mods: Modifiers, docstring: Option[Comment]): TypeDef = atPos(tokenRange) {
val name = ident().toTypeName
val constr = atPos(in.offset) {
val tparams = typeParamClauseOpt(ParamOwner.Class)
@@ -1965,7 +1980,7 @@ object Parsers {
/** ObjectDef ::= Id TemplateOpt
*/
- def objectDef(mods: Modifiers, docstring: Option[String] = None): ModuleDef = {
+ def objectDef(mods: Modifiers, docstring: Option[Comment] = None): ModuleDef = {
val name = ident()
val template = templateOpt(emptyConstructor())
@@ -2190,7 +2205,7 @@ object Parsers {
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
- val docstring = in.getDocString(start)
+ val docstring = in.getDocComment(start)
ts += objectDef(atPos(start, in.skipToken()) { Modifiers(Package) }, docstring)
if (in.token != EOF) {
acceptStatSep()
diff --git a/src/dotty/tools/dotc/parsing/Scanners.scala b/src/dotty/tools/dotc/parsing/Scanners.scala
index 1355ea386..b46ab6348 100644
--- a/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -193,7 +193,7 @@ object Scanners {
}
/** Returns the closest docstring preceding the position supplied */
- def getDocString(pos: Int): Option[String] = {
+ def getDocComment(pos: Int): Option[Comment] = {
def closest(c: Comment, docstrings: List[Comment]): Comment = docstrings match {
case x :: xs if (c.pos.end < x.pos.end && x.pos.end <= pos) => closest(x, xs)
case Nil => c
@@ -203,7 +203,7 @@ object Scanners {
case (list @ (x :: xs)) :: _ => {
val c = closest(x, xs)
docsPerBlockStack = list.dropWhile(_ != c).tail :: docsPerBlockStack.tail
- Some(c.chrs)
+ Some(c)
}
case _ => None
}
diff --git a/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/src/dotty/tools/dotc/transform/ExpandSAMs.scala
index d9445d046..04c6864b1 100644
--- a/src/dotty/tools/dotc/transform/ExpandSAMs.scala
+++ b/src/dotty/tools/dotc/transform/ExpandSAMs.scala
@@ -74,7 +74,8 @@ class ExpandSAMs extends MiniPhaseTransform { thisTransformer =>
Bind(defaultSym, Underscore(selector.tpe.widen)),
EmptyTree,
Literal(Constant(false)))
- cpy.Match(applyRhs)(paramRef, cases.map(translateCase) :+ defaultCase)
+ val annotated = Annotated(New(ref(defn.UncheckedAnnotType)), paramRef)
+ cpy.Match(applyRhs)(annotated, cases.map(translateCase) :+ defaultCase)
case _ =>
tru
}
diff --git a/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/src/dotty/tools/dotc/transform/InterceptedMethods.scala
index ffb4ae756..7c60e8d72 100644
--- a/src/dotty/tools/dotc/transform/InterceptedMethods.scala
+++ b/src/dotty/tools/dotc/transform/InterceptedMethods.scala
@@ -22,7 +22,6 @@ import dotty.tools.dotc.ast.{untpd, tpd}
import dotty.tools.dotc.core.Constants.Constant
import dotty.tools.dotc.core.Types.MethodType
import dotty.tools.dotc.core.Names.Name
-import dotty.runtime.LazyVals
import scala.collection.mutable.ListBuffer
import dotty.tools.dotc.core.Denotations.SingleDenotation
import dotty.tools.dotc.core.SymDenotations.SymDenotation
diff --git a/src/dotty/tools/dotc/transform/PatternMatcher.scala b/src/dotty/tools/dotc/transform/PatternMatcher.scala
index 839189948..490feb7d0 100644
--- a/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -24,6 +24,7 @@ import Applications._
import TypeApplications._
import SymUtils._, core.NameOps._
import core.Mode
+import patmat._
import dotty.tools.dotc.util.Positions.Position
import dotty.tools.dotc.core.Decorators._
@@ -36,7 +37,7 @@ import scala.reflect.internal.util.Collections
* elimRepeated is required
* TODO: outer tests are not generated yet.
*/
-class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTransformer =>
+class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
import dotty.tools.dotc.ast.tpd._
override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref
@@ -52,6 +53,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
override def transformMatch(tree: Match)(implicit ctx: Context, info: TransformerInfo): Tree = {
val translated = new Translator()(ctx).translator.translateMatch(tree)
+ // check exhaustivity and unreachability
+ val engine = new SpaceEngine
+ if (engine.checkable(tree)) {
+ engine.checkExhaustivity(tree)
+ engine.checkRedundancy(tree)
+ }
+
translated.ensureConforms(tree.tpe)
}
@@ -72,7 +80,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
ctx.newSymbol(owner, ctx.freshName(prefix + ctr).toTermName, Flags.Synthetic | Flags.Case, tp, coord = pos)
}
- def newSynthCaseLabel(name: String, tpe:Type, owner: Symbol = ctx.owner) =
+ def newSynthCaseLabel(name: String, tpe: Type, owner: Symbol = ctx.owner) =
ctx.newSymbol(owner, ctx.freshName(name).toTermName, Flags.Label | Flags.Synthetic | Flags.Method, tpe).asTerm
//NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS
@@ -140,30 +148,28 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
}
+ object Rebindings {
+ def apply(from: Symbol, to: Symbol) = new Rebindings(List(from), List(ref(to)))
+ // requires sameLength(from, to)
+ def apply(from: List[Symbol], to: List[Tree]) =
+ if (from nonEmpty) new Rebindings(from, to) else NoRebindings
+ }
- object Rebindings {
- def apply(from: Symbol, to: Symbol) = new Rebindings(List(from), List(ref(to)))
- // requires sameLength(from, to)
- def apply(from: List[Symbol], to: List[Tree]) =
- if (from nonEmpty) new Rebindings(from, to) else NoRebindings
- }
-
- class Rebindings(val lhs: List[Symbol], val rhs: List[Tree]) {
- def >>(other: Rebindings) = {
- if (other eq NoRebindings) this
- else if (this eq NoRebindings) other
- else {
- assert((lhs.toSet ++ other.lhs.toSet).size == lhs.length + other.lhs.length, "no double assignments")
- new Rebindings(this.lhs ++ other.lhs, this.rhs ++ other.rhs)
- }
- }
-
- def emitValDefs: List[ValDef] = {
- Collections.map2(lhs, rhs)((symbol, tree) => ValDef(symbol.asTerm, tree.ensureConforms(symbol.info)))
+ class Rebindings(val lhs: List[Symbol], val rhs: List[Tree]) {
+ def >>(other: Rebindings) = {
+ if (other eq NoRebindings) this
+ else if (this eq NoRebindings) other
+ else {
+ assert((lhs.toSet ++ other.lhs.toSet).size == lhs.length + other.lhs.length, "no double assignments")
+ new Rebindings(this.lhs ++ other.lhs, this.rhs ++ other.rhs)
}
}
- object NoRebindings extends Rebindings(Nil, Nil)
+ def emitValDefs: List[ValDef] = {
+ Collections.map2(lhs, rhs)((symbol, tree) => ValDef(symbol.asTerm, tree.ensureConforms(symbol.info)))
+ }
+ }
+ object NoRebindings extends Rebindings(Nil, Nil)
trait OptimizedCodegen extends CodegenCore {
override def codegen: AbsCodegen = optimizedCodegen
@@ -184,12 +190,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
//val matchRes = ctx.newSymbol(NoSymbol, ctx.freshName("matchRes").toTermName, Flags.Synthetic | Flags.Param | Flags.Label | Flags.Method, restpe /*withoutAnnotations*/)
//NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
- val caseSyms = cases.scanLeft(ctx.owner.asTerm)((curOwner, nextTree) => newSynthCaseLabel(ctx.freshName("case"), MethodType(Nil, restpe), curOwner)).tail
+
+ val caseSyms: List[TermSymbol] = cases.scanLeft(ctx.owner.asTerm)((curOwner, nextTree) => newSynthCaseLabel(ctx.freshName("case"), MethodType(Nil, restpe), curOwner)).tail
// must compute catchAll after caseLabels (side-effects nextCase)
// catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
// if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
- val catchAllDef = matchFailGen.map { _(scrutSym)}
+ val catchAllDef = matchFailGen.map { _(scrutSym) }
.getOrElse(Throw(New(defn.MatchErrorType, List(ref(scrutSym)))))
val matchFail = newSynthCaseLabel(ctx.freshName("matchFail"), MethodType(Nil, restpe))
@@ -199,14 +206,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val caseDefs = (cases zip caseSyms zip nextCases).foldRight[Tree](catchAllDefBody) {
// dotty deviation
//case (((mkCase, sym), nextCase), acc) =>
- (x:(((Casegen => Tree), TermSymbol), Tree), acc: Tree) => x match {
-
- case ((mkCase, sym), nextCase) =>
- val body = mkCase(new OptimizedCasegen(nextCase)).ensureConforms(restpe)
-
- DefDef(sym, _ => Block(List(acc), body))
- }}
+ (x: (((Casegen => Tree), TermSymbol), Tree), acc: Tree) => x match {
+ case ((mkCase, sym), nextCase) =>
+ val body = mkCase(new OptimizedCasegen(nextCase)).ensureConforms(restpe)
+ DefDef(sym, _ => Block(List(acc), body))
+ }
+ }
// scrutSym == NoSymbol when generating an alternatives matcher
// val scrutDef = scrutSym.fold(List[Tree]())(ValDef(_, scrut) :: Nil) // for alternatives
@@ -247,9 +253,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
)
} else {
assert(defn.isProductSubType(prev.tpe))
- Block(
- List(ValDef(b.asTerm, prev)),
- next //Substitution(b, ref(prevSym))(next)
+ val nullCheck: Tree = prev.select(defn.Object_ne).appliedTo(Literal(Constant(null)))
+ ifThenElseZero(
+ nullCheck,
+ Block(
+ List(ValDef(b.asTerm, prev)),
+ next //Substitution(b, ref(prevSym))(next)
+ )
)
}
}
@@ -277,7 +287,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
next
))
}
-
}
}
final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
@@ -634,7 +643,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val checkedLength: Option[Int],
val prevBinder: Symbol,
val ignoredSubPatBinders: Set[Symbol]
- ) extends FunTreeMaker with PreserveSubPatBinders {
+ ) extends FunTreeMaker with PreserveSubPatBinders {
def extraStoredBinders: Set[Symbol] = Set()
@@ -656,9 +665,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
bindSubPats(next)
}
- if (extractorReturnsBoolean) casegen.flatMapCond(extractor, unitLiteral, nextBinder, condAndNext)
- else casegen.flatMap(extractor, nextBinder, condAndNext) // getType?
-
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, unitLiteral, nextBinder, condAndNext)
+ else casegen.flatMap(extractor, nextBinder, condAndNext) // getType?
}
override def toString = "X" + ((extractor, nextBinder.name))
@@ -692,7 +700,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val mutableBinders: List[Symbol],
binderKnownNonNull: Boolean,
val ignoredSubPatBinders: Set[Symbol]
- ) extends FunTreeMaker with PreserveSubPatBinders {
+ ) extends FunTreeMaker with PreserveSubPatBinders {
val nextBinder = prevBinder // just passing through
@@ -701,6 +709,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck: Tree = ref(prevBinder).select(defn.Object_ne).appliedTo(Literal(Constant(null)))
+
val cond: Option[Tree] =
if (binderKnownNonNull) extraCond
else extraCond.map(nullCheck.select(defn.Boolean_&&).appliedTo).orElse(Some(nullCheck))
@@ -774,9 +783,9 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val expectedClass = expectedTp.dealias.classSymbol.asClass
val test = codegen._asInstanceOf(testedBinder, expectedTp)
val outerAccessorTested = ctx.atPhase(ctx.explicitOuterPhase.next) { implicit ctx =>
- ExplicitOuter.ensureOuterAccessors(expectedClass)
- test.select(ExplicitOuter.outerAccessor(expectedClass)).select(defn.Object_eq).appliedTo(expectedOuter)
- }
+ ExplicitOuter.ensureOuterAccessors(expectedClass)
+ test.select(ExplicitOuter.outerAccessor(expectedClass)).select(defn.Object_eq).appliedTo(expectedOuter)
+ }
outerAccessorTested
}
}
@@ -840,7 +849,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val nextBinder = afterTest.asTerm
- def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol) = {
+ def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol): Boolean = {
// See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
// generates an outer test based on `patType.prefix` with automatically dealises.
patType.dealias match {
@@ -858,7 +867,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val np = expectedTp.normalizedPrefix
val ts = np.termSymbol
(ts ne NoSymbol) && needsOuterTest(expectedTp, testedBinder.info, ctx.owner)
-
}
// the logic to generate the run-time test that follows from the fact that
@@ -898,7 +906,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
if (isExpectedReferenceType) mkNullTest
else mkTypeTest
)
- )
+ )
// true when called to type-test the argument to an extractor
// don't do any fancy equality checking, just test the type
@@ -912,7 +920,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
and(mkEqualsTest(ref(tref.symbol.companionModule)), mkTypeTest) // must use == to support e.g. List() == Nil
case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(Literal(Constant(null))))
case ConstantType(const) => mkEqualsTest(expTp(Literal(const)))
- case t:SingletonType => mkEqTest(singleton(expectedTp)) // SI-4577, SI-4897
+ case t: SingletonType => mkEqTest(singleton(expectedTp)) // SI-4577, SI-4897
//case ThisType(sym) => mkEqTest(expTp(This(sym)))
case _ => mkDefault
}
@@ -1042,7 +1050,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val (cases, toHoist) = optimizeCases(scrutSym, casesRebindingPropagated, pt)
-
val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases.map(x => combineExtractors(x) _), synthCatchAll)
if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
@@ -1084,7 +1091,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def unapply(pat: Tree): Boolean = pat match {
case Typed(_, arg) if arg.tpe.isRepeatedParam => true
case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case t if (tpd.isWildcardArg(t)) => true
+ case t if (tpd.isWildcardArg(t)) => true
case x: Ident => isVarPattern(x)
case Alternative(ps) => ps forall unapply
case EmptyTree => true
@@ -1105,7 +1112,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
object SymbolBound {
def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
case Bind(_, expr) if tree.symbol.exists => Some(tree.symbol -> expr)
- case _ => None
+ case _ => None
}
}
@@ -1118,13 +1125,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
final case class BoundTree(binder: Symbol, tree: Tree) {
private lazy val extractor = ExtractorCall(tree, binder)
- def pos = tree.pos
- def tpe = binder.info.widenDealias
- def pt = unbound match {
- // case Star(tpt) => this glbWith seqType(tpt.tpe) dd todo:
- case TypeBound(tpe) => tpe
- case tree => tree.tpe
- }
+ def pos = tree.pos
+ def tpe = binder.info.widenDealias
+ def pt = unbound match {
+ // case Star(tpt) => this glbWith seqType(tpt.tpe) dd todo:
+ case TypeBound(tpe) => tpe
+ case tree => tree.tpe
+ }
def glbWith(other: Type) = ctx.typeComparer.glb(tpe :: other :: Nil)// .normalize
@@ -1192,7 +1199,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// Statically conforms to paramType
if (tpe <:< paramType) treeMaker(binder, false, pos, tpe) :: Nil
else typeTest :: extraction :: Nil
- )
+ )
step(makers: _*)(extractor.subBoundTrees: _*)
}
@@ -1211,7 +1218,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// [7] symbol-less bind patterns - this happens in certain ill-formed programs, there'll be an error later
// don't fail here though (or should we?)
def nextStep(): TranslationStep = tree match {
- case _: UnApply | _: Apply| Typed(_: UnApply | _: Apply, _) => extractorStep()
+ case _: UnApply | _: Apply | Typed(_: UnApply | _: Apply, _) => extractorStep()
case SymbolAndTypeBound(sym, tpe) => typeTestStep(sym, tpe)
case TypeBound(tpe) => typeTestStep(binder, tpe)
case SymbolBound(sym, expr) => bindingStep(sym, expr)
@@ -1222,7 +1229,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
def translate(): List[TreeMaker] = nextStep() merge (_.translate())
-
private def concreteType = tpe.bounds.hi
private def unbound = unbind(tree)
private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)"
@@ -1244,13 +1250,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
case _ => false
}
- def elimAnonymousClass(t: Type) = t match {
- case t:TypeRef if t.symbol.isAnonymousClass =>
- t.symbol.asClass.typeRef.asSeenFrom(t.prefix, t.symbol.owner)
- case _ =>
- t
- }
-
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
*
@@ -1259,20 +1258,21 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
*
* NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
* thus, you must typecheck the result (and that will in turn translate nested matches)
- * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
+ * this could probably be optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
*/
def translateMatch(match_ : Match): Tree = {
val Match(sel, cases) = match_
- val selectorTp = elimAnonymousClass(sel.tpe.widen/*withoutAnnotations*/)
+ val selectorTp = sel.tpe.widen.deAnonymize/*withoutAnnotations*/
val selectorSym = freshSym(sel.pos, selectorTp, "selector")
val (nonSyntheticCases, defaultOverride) = cases match {
case init :+ last if isSyntheticDefaultCase(last) => (init, Some(((scrut: Symbol) => last.body)))
- case _ => (cases, None)
+ case _ => (cases, None)
}
+
// checkMatchVariablePatterns(nonSyntheticCases) // only used for warnings
// we don't transform after uncurry
@@ -1329,7 +1329,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
* a function that will take care of binding and substitution of the next ast (to the right).
*
*/
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = {
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef): List[TreeMaker] = {
val CaseDef(pattern, guard, body) = caseDef
translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt)
}
@@ -1398,7 +1398,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
object ExtractorCall {
// TODO: check unargs == args
- def apply(tree: Tree, binder: Symbol): ExtractorCall = {
+ def apply(tree: Tree, binder: Symbol): ExtractorCall = {
tree match {
case UnApply(unfun, implicits, args) =>
val castedBinder = ref(binder).ensureConforms(tree.tpe)
@@ -1477,8 +1477,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
productSelectors(binder.info)
else binder.caseAccessors
val res =
- if (accessors.isDefinedAt(i - 1)) ref(binder).select(accessors(i - 1).name)
- else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
+ if (accessors.isDefinedAt(i - 1)) ref(binder).select(accessors(i - 1).name)
+ else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
val rsym = res.symbol // just for debugging
res
}
@@ -1490,7 +1490,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
if (!aligner.isStar) Nil
else if (expectedLength == 0) seqTree(binder) :: Nil
else genDrop(binder, expectedLength)
- )
+ )
// this error-condition has already been checked by checkStarPatOK:
// if (isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if (lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= " +(resultInMonad, ts, subPatTypes, subPats))
@@ -1501,7 +1501,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
( productElemsToN(binder, firstIndexingBinder)
++ genTake(binder, expectedLength)
++ lastTrees
- ).toList
+ ).toList
}
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
@@ -1509,7 +1509,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
protected def subPatRefs(binder: Symbol): List[Tree] = {
val refs = if (totalArity > 0 && isSeq) subPatRefsSeq(binder)
else if (binder.info.member(nme._1).exists && !isSeq) productElemsToN(binder, totalArity)
- else ref(binder):: Nil
+ else ref(binder) :: Nil
refs
}
@@ -1599,7 +1599,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely
// wrong when isSeq, and resultInMonad should always be correct since it comes
// directly from the extractor's result type
- val binder = freshSym(pos, resultInMonad)
+ val binder = freshSym(pos, resultInMonad)
val spb = subPatBinders
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
spb,
@@ -1817,6 +1817,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def expectedTypes = typedPatterns map (_.tpe)
def unexpandedFormals = extractor.varargsTypes
}
+
trait ScalacPatternExpander extends PatternExpander[Tree, Type] {
def NoPattern = EmptyTree
def NoType = core.Types.NoType
@@ -1834,7 +1835,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
( typeOfMemberNamedHead(seq)
orElse typeOfMemberNamedApply(seq)
orElse seq.elemType
- )
+ )
}
def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated): Extractor = {
ctx.log(s"newExtractor($whole, $fixed, $repeated")
@@ -1861,7 +1862,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
method.paramTypess.head match {
case init :+ last if last.isRepeatedParam => newExtractor(whole, init, repeatedFromVarargs(last))
- case tps => newExtractor(whole, tps, NoRepeated)
+ case tps => newExtractor(whole, tps, NoRepeated)
}
}
@@ -1872,15 +1873,14 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
* Unfortunately the MethodType does not carry the information of whether
* it was unapplySeq, so we have to funnel that information in separately.
*/
- def unapplyMethodTypes(tree:Tree, fun: Tree, args:List[Tree], resultType:Type, isSeq: Boolean): Extractor = {
+ def unapplyMethodTypes(tree: Tree, fun: Tree, args: List[Tree], resultType: Type, isSeq: Boolean): Extractor = {
_id = _id + 1
- val whole = tree.tpe// see scaladoc for Trees.Unapply
+ val whole = tree.tpe // see scaladoc for Trees.Unapply
// fun.tpe.widen.paramTypess.headOption.flatMap(_.headOption).getOrElse(NoType)//firstParamType(method)
val resultOfGet = extractorMemberType(resultType, nme.get)
- //println(s"${_id}unapplyArgs(${result.widen}")
- val expanded:List[Type] = /*(
+ val expanded: List[Type] = /*(
if (result =:= defn.BooleanType) Nil
else if (defn.isProductSubType(result)) productSelectorTypes(result)
else if (result.classSymbol is Flags.CaseClass) result.decls.filter(x => x.is(Flags.CaseAccessor) && x.is(Flags.Method)).map(_.info).toList
@@ -1915,7 +1915,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def offering = extractor.offeringString
def symString = tree.symbol.showLocated
def offerString = if (extractor.isErroneous) "" else s" offering $offering"
- def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + prodArity
+ def arityExpected = (if (extractor.hasSeq) "at least " else "") + prodArity
def err(msg: String) = ctx.error(msg, tree.pos)
def warn(msg: String) = ctx.warning(msg, tree.pos)
@@ -1942,10 +1942,10 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
}
- def apply(tree:Tree, sel: Tree, args: List[Tree], resultType: Type): Aligned = {
+ def apply(tree: Tree, sel: Tree, args: List[Tree], resultType: Type): Aligned = {
val fn = sel match {
case Applied(fn) => fn
- case _ => sel
+ case _ => sel
}
val patterns = newPatterns(args)
val isSeq = sel.symbol.name == nme.unapplySeq
@@ -1975,8 +1975,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
def apply(tree: Tree, resultType: Type): Aligned = tree match {
- case Typed(tree, _) => apply(tree, resultType)
- case Apply(fn, args) => apply(tree, fn, args, resultType)
+ case Typed(tree, _) => apply(tree, resultType)
+ case Apply(fn, args) => apply(tree, fn, args, resultType)
case UnApply(fn, implicits, args) => apply(tree, fn, args, resultType)
}
}
diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala
index b71284049..e74709282 100644
--- a/src/dotty/tools/dotc/transform/PostTyper.scala
+++ b/src/dotty/tools/dotc/transform/PostTyper.scala
@@ -39,6 +39,8 @@ import Symbols._, TypeUtils._
*
* (9) Adds SourceFile annotations to all top-level classes and objects
*
+ * (10) Adds Child annotations to all sealed classes
+ *
* The reason for making this a macro transform is that some functions (in particular
* super and protected accessors and instantiation checks) are naturally top-down and
* don't lend themselves to the bottom-up approach of a mini phase. The other two functions
@@ -91,7 +93,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
*
* should behave differently.
*
- * O1.x should have the same effect as { println("43"; 42 }
+ * O1.x should have the same effect as { println("43"); 42 }
*
* whereas
*
@@ -101,10 +103,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
* purity of the prefix unless the selection goes to an inline val.
*/
private def normalizeTree(tree: Tree)(implicit ctx: Context): Tree = tree match {
- case tree: TypeTree => tree
- case TypeApply(fn, args) =>
- Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType])
- tree
+ case _: TypeTree | _: TypeApply => tree
case _ =>
if (tree.isType) {
Checking.typeChecker.traverse(tree)
@@ -243,6 +242,13 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
ctx.compilationUnit.source.exists &&
sym != defn.SourceFileAnnot)
sym.addAnnotation(Annotation.makeSourceFile(ctx.compilationUnit.source.file.path))
+
+ if (!sym.isAnonymousClass) // ignore anonymous class
+ for (parent <- sym.asClass.classInfo.classParents) {
+ val pclazz = parent.classSymbol
+ if (pclazz.is(Sealed)) pclazz.addAnnotation(Annotation.makeChild(sym))
+ }
+
tree
}
else {
diff --git a/src/dotty/tools/dotc/transform/TailRec.scala b/src/dotty/tools/dotc/transform/TailRec.scala
index efa0633d8..b345dda61 100644
--- a/src/dotty/tools/dotc/transform/TailRec.scala
+++ b/src/dotty/tools/dotc/transform/TailRec.scala
@@ -338,14 +338,16 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
assert(false, "We should never have gotten inside a pattern")
tree
+ case t @ DefDef(_, _, _, _, _) =>
+ t // todo: could improve to handle DefDef's with a label flag calls to which are in tail position
+
case ValDef(_, _, _) | EmptyTree | Super(_, _) | This(_) |
- Literal(_) | TypeTree(_) | DefDef(_, _, _, _, _) | TypeDef(_, _) =>
+ Literal(_) | TypeTree(_) | TypeDef(_, _) =>
tree
case Return(expr, from) =>
tpd.cpy.Return(tree)(noTailTransform(expr), from)
- case t: DefDef =>
- t // todo: could improve to handle DefDef's with a label flag calls to which are in tail position
+
case _ =>
super.transform(tree)
}
diff --git a/src/dotty/tools/dotc/transform/TreeChecker.scala b/src/dotty/tools/dotc/transform/TreeChecker.scala
index ce160d7b0..18e3a6c8a 100644
--- a/src/dotty/tools/dotc/transform/TreeChecker.scala
+++ b/src/dotty/tools/dotc/transform/TreeChecker.scala
@@ -258,7 +258,7 @@ class TreeChecker extends Phase with SymTransformer {
}
override def typed(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree = {
- val tpdTree = super.typed(tree)
+ val tpdTree = super.typed(tree, pt)
checkIdentNotJavaClass(tpdTree)
tpdTree
}
diff --git a/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
index 6de2bf44c..3774127fa 100644
--- a/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
+++ b/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
@@ -100,7 +100,7 @@ trait TypeTestsCasts {
* The transform happens before erasure of `argType`, thus cannot be merged
* with `transformIsInstanceOf`, which depends on erased type of `argType`.
*/
- def transformOrTypeTest(qual: Tree, argType: Type): Tree = argType match {
+ def transformOrTypeTest(qual: Tree, argType: Type): Tree = argType.dealias match {
case OrType(tp1, tp2) =>
evalOnce(qual) { fun =>
transformOrTypeTest(fun, tp1)
diff --git a/src/dotty/tools/dotc/transform/patmat/Space.scala b/src/dotty/tools/dotc/transform/patmat/Space.scala
new file mode 100644
index 000000000..d942c6853
--- /dev/null
+++ b/src/dotty/tools/dotc/transform/patmat/Space.scala
@@ -0,0 +1,619 @@
+package dotty.tools.dotc
+package transform
+package patmat
+
+import core.Types._
+import core.Contexts._
+import core.Flags._
+import ast.Trees._
+import ast.tpd
+import core.Decorators._
+import core.Symbols._
+import core.StdNames._
+import core.NameOps._
+import core.Constants._
+
+/** Space logic for checking exhaustivity and unreachability of pattern matching
+ *
+ * Space can be thought of as a set of possible values. A type or a pattern
+ * both refer to spaces. The space of a type is the values that inhabit the
+ * type. The space of a pattern is the values that can be covered by the
+ * pattern.
+ *
+ * Space is recursively defined as follows:
+ *
+ * 1. `Empty` is a space
+ * 2. For a type T, `Typ(T)` is a space
+ * 3. A union of spaces `S1 | S2 | ...` is a space
+ * 4. For a case class Kon(x1: T1, x2: T2, .., xn: Tn), if S1, S2, ..., Sn
+ * are spaces, then `Kon(S1, S2, ..., Sn)` is a space.
+ * 5. A constant `Const(value, T)` is a point in space
+ * 6. A stable identifier `Var(sym, T)` is a space
+ *
+ * For the problem of exhaustivity check, its formulation in terms of space is as follows:
+ *
+ * Is the space Typ(T) a subspace of the union of space covered by all the patterns?
+ *
+ * The problem of unreachable patterns can be formulated as follows:
+ *
+ * Is the space covered by a pattern a subspace of the space covered by previous patterns?
+ *
+ * Assumption:
+ * (1) One case class cannot be inherited directly or indirectly by another
+ * case class.
+ * (2) Inheritance of a case class cannot be well handled by the algorithm.
+ *
+ */
+
+
+/** space definition */
+sealed trait Space
+
+/** Empty space */
+case object Empty extends Space
+
+/** Space representing the set of all values of a type
+ *
+ * @param tp: the type this space represents
+ * @param decomposed: does the space result from decomposition? Used for pretty print
+ *
+ */
+case class Typ(tp: Type, decomposed: Boolean) extends Space
+
+/** Space representing a constructor pattern */
+case class Kon(tp: Type, params: List[Space]) extends Space
+
+/** Union of spaces */
+case class Or(spaces: List[Space]) extends Space
+
+/** Point in space */
+sealed trait Point extends Space
+
+/** Point representing variables(stable identifier) in patterns */
+case class Var(sym: Symbol, tp: Type) extends Point
+
+/** Point representing literal constants in patterns */
+case class Const(value: Constant, tp: Type) extends Point
+
+/** abstract space logic */
+trait SpaceLogic {
+ /** Is `tp1` a subtype of `tp2`? */
+ def isSubType(tp1: Type, tp2: Type): Boolean
+
+ /** Is `tp1` the same type as `tp2`? */
+ def isEqualType(tp1: Type, tp2: Type): Boolean
+
+ /** Is the type `tp` decomposable? i.e. all values of the type can be covered
+ * by its decomposed types.
+ *
+ * Abstract sealed class, OrType, Boolean and Java enums can be decomposed.
+ */
+ def canDecompose(tp: Type): Boolean
+
+ /** Return term parameter types of the case class `tp` */
+ def signature(tp: Type): List[Type]
+
+ /** Get components of decomposable types */
+ def decompose(tp: Type): List[Space]
+
+ /** Simplify space using the laws, there's no nested union after simplify */
+ def simplify(space: Space): Space = space match {
+ case Kon(tp, spaces) =>
+ val sp = Kon(tp, spaces.map(simplify _))
+ if (sp.params.contains(Empty)) Empty
+ else sp
+ case Or(spaces) =>
+ val set = spaces.map(simplify _).flatMap {
+ case Or(ss) => ss
+ case s => Seq(s)
+ } filter (_ != Empty)
+
+ if (set.isEmpty) Empty
+ else if (set.size == 1) set.toList(0)
+ else Or(set)
+ case Typ(tp, _) =>
+ if (canDecompose(tp) && decompose(tp).isEmpty) Empty
+ else space
+ case _ => space
+ }
+
+ /** Flatten space to get rid of `Or` for pretty print */
+ def flatten(space: Space): List[Space] = space match {
+ case Kon(tp, spaces) =>
+ val flats = spaces.map(flatten _)
+
+ flats.foldLeft(List[Kon]()) { (acc, flat) =>
+ if (acc.isEmpty) flat.map(s => Kon(tp, Nil :+ s))
+ else for (Kon(tp, ss) <- acc; s <- flat) yield Kon(tp, ss :+ s)
+ }
+ case Or(spaces) =>
+ spaces.flatMap(flatten _)
+ case _ => List(space)
+ }
+
+ /** Is `a` a subspace of `b`? Equivalent to `a - b == Empty`, but faster */
+ def isSubspace(a: Space, b: Space): Boolean = {
+ def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) => true
+ case (_, Empty) => false
+ case (Or(ss), _) => ss.forall(isSubspace(_, b))
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ isSubType(tp1, tp2) || tryDecompose1(tp1) || tryDecompose2(tp2)
+ case (Typ(tp1, _), Or(ss)) =>
+ ss.exists(isSubspace(a, _)) || tryDecompose1(tp1)
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ isSubType(tp1, tp2) && isSubspace(Kon(tp2, signature(tp2).map(Typ(_, false))), b) ||
+ tryDecompose1(tp1)
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ isSubType(tp1, tp2) ||
+ simplify(a) == Empty ||
+ (isSubType(tp2, tp1) && tryDecompose1(tp1)) ||
+ tryDecompose2(tp2)
+ case (Kon(_, _), Or(_)) =>
+ simplify(minus(a, b)) == Empty
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ isEqualType(tp1, tp2) && ss1.zip(ss2).forall((isSubspace _).tupled)
+ case (Const(v1, _), Const(v2, _)) => v1 == v2
+ case (Const(_, tp1), Typ(tp2, _)) => isSubType(tp1, tp2) || tryDecompose2(tp2)
+ case (Const(_, _), Or(ss)) => ss.exists(isSubspace(a, _))
+ case (Const(_, _), _) => false
+ case (_, Const(_, _)) => false
+ case (Var(x, _), Var(y, _)) => x == y
+ case (Var(_, tp1), Typ(tp2, _)) => isSubType(tp1, tp2) || tryDecompose2(tp2)
+ case (Var(_, _), Or(ss)) => ss.exists(isSubspace(a, _))
+ case (Var(_, _), _) => false
+ case (_, Var(_, _)) => false
+ }
+ }
+
+ /** Intersection of two spaces */
+ def intersect(a: Space, b: Space): Space = {
+ def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) | (_, Empty) => Empty
+ case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filterConserve(_ ne Empty))
+ case (Or(ss), _) => Or(ss.map(intersect(_, b)).filterConserve(_ ne Empty))
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (isSubType(tp2, tp1)) a // problematic corner case: inheriting a case class
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ if (!isEqualType(tp1, tp2)) Empty
+ else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty
+ else Kon(tp1, ss1.zip(ss2).map((intersect _).tupled))
+ case (Const(v1, _), Const(v2, _)) =>
+ if (v1 == v2) a else Empty
+ case (Const(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Const(_, _), _) => Empty
+ case (Typ(tp1, _), Const(_, tp2)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (_, Const(_, _)) => Empty
+ case (Var(x, _), Var(y, _)) =>
+ if (x == y) a else Empty
+ case (Var(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Var(_, _), _) => Empty
+ case (Typ(tp1, _), Var(_, tp2)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (_, Var(_, _)) => Empty
+ }
+ }
+
+ /** The space of a not covered by b */
+ def minus(a: Space, b: Space): Space = {
+ def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) => Empty
+ case (_, Empty) => a
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ // corner case: inheriting a case class
+ // rationale: every instance of `tp1` is covered by `tp2(_)`
+ if (isSubType(tp1, tp2)) minus(Kon(tp2, signature(tp2).map(Typ(_, false))), b)
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else a
+ case (_, Or(ss)) =>
+ ss.foldLeft(a)(minus)
+ case (Or(ss), _) =>
+ Or(ss.map(minus(_, b)))
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ // uncovered corner case: tp2 :< tp1
+ if (isSubType(tp1, tp2)) Empty
+ else if (simplify(a) == Empty) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ if (!isEqualType(tp1, tp2)) a
+ else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) a
+ else if (ss1.zip(ss2).forall((isSubspace _).tupled)) Empty
+ else
+ // `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)`
+ Or(ss1.zip(ss2).map((minus _).tupled).zip(0 to ss2.length - 1).map {
+ case (ri, i) => Kon(tp1, ss1.updated(i, ri))
+ })
+ case (Const(v1, _), Const(v2, _)) =>
+ if (v1 == v2) Empty else a
+ case (Const(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Const(_, _), _) => a
+ case (Typ(tp1, _), Const(_, tp2)) => // Boolean & Java enum
+ if (canDecompose(tp1)) tryDecompose1(tp1)
+ else a
+ case (_, Const(_, _)) => a
+ case (Var(x, _), Var(y, _)) =>
+ if (x == y) Empty else a
+ case (Var(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Var(_, _), _) => a
+ case (_, Var(_, _)) => a
+ }
+ }
+}
+
+/** Scala implementation of space logic */
+class SpaceEngine(implicit ctx: Context) extends SpaceLogic {
+ import tpd._
+
+ /** Return the space that represents the pattern `pat`
+ *
+ * If roundUp is true, approximate extractors to its type,
+ * otherwise approximate extractors to Empty
+ */
+ def project(pat: Tree, roundUp: Boolean = true)(implicit ctx: Context): Space = pat match {
+ case Literal(c) => Const(c, c.tpe)
+ case _: BackquotedIdent => Var(pat.symbol, pat.tpe)
+ case Ident(_) | Select(_, _) =>
+ pat.tpe.stripAnnots match {
+ case tp: TermRef =>
+ if (pat.symbol.is(Enum))
+ Const(Constant(pat.symbol), tp)
+ else if (tp.underlyingIterator.exists(_.classSymbol.is(Module)))
+ Typ(tp.widenTermRefExpr.stripAnnots, false)
+ else
+ Var(pat.symbol, tp)
+ case tp => Typ(tp, false)
+ }
+ case Alternative(trees) => Or(trees.map(project(_, roundUp)))
+ case Bind(_, pat) => project(pat)
+ case UnApply(_, _, pats) =>
+ if (pat.tpe.classSymbol.is(CaseClass))
+ Kon(pat.tpe.stripAnnots, pats.map(pat => project(pat, roundUp)))
+ else if (roundUp) Typ(pat.tpe.stripAnnots, false)
+ else Empty
+ case Typed(pat @ UnApply(_, _, _), _) => project(pat)
+ case Typed(expr, _) => Typ(expr.tpe.stripAnnots, true)
+ case _ =>
+ Empty
+ }
+
+ /* Erase a type binding according to erasure semantics in pattern matching */
+ def erase(tp: Type): Type = {
+ def doErase(tp: Type): Type = tp match {
+ case tp: HKApply => erase(tp.superType)
+ case tp: RefinedType => erase(tp.parent)
+ case _ => tp
+ }
+
+ tp match {
+ case OrType(tp1, tp2) =>
+ OrType(erase(tp1), erase(tp2))
+ case AndType(tp1, tp2) =>
+ AndType(erase(tp1), erase(tp2))
+ case _ =>
+ val origin = doErase(tp)
+ if (origin =:= defn.ArrayType) tp else origin
+ }
+ }
+
+ /** Is `tp1` a subtype of `tp2`? */
+ def isSubType(tp1: Type, tp2: Type): Boolean = {
+ // check SI-9657 and tests/patmat/gadt.scala
+ erase(tp1) <:< erase(tp2)
+ }
+
+ def isEqualType(tp1: Type, tp2: Type): Boolean = tp1 =:= tp2
+
+ /** Parameter types of the case class type `tp` */
+ def signature(tp: Type): List[Type] = {
+ val ktor = tp.classSymbol.primaryConstructor.info
+
+ val meth = ktor match {
+ case ktor: PolyType =>
+ ktor.instantiate(tp.classSymbol.typeParams.map(_.typeRef)).asSeenFrom(tp, tp.classSymbol)
+ case _ => ktor
+ }
+
+ // refine path-dependent type in params. refer to t9672
+ meth.firstParamTypes.map(_.asSeenFrom(tp, tp.classSymbol))
+ }
+
+ /** Decompose a type into subspaces -- assume the type can be decomposed */
+ def decompose(tp: Type): List[Space] = {
+ val children = tp.classSymbol.annotations.filter(_.symbol == ctx.definitions.ChildAnnot).map { annot =>
+ // refer to definition of Annotation.makeChild
+ annot.tree match {
+ case Apply(TypeApply(_, List(tpTree)), _) => tpTree.symbol
+ }
+ }
+
+ tp match {
+ case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true))
+ case _ if tp =:= ctx.definitions.BooleanType =>
+ List(
+ Const(Constant(true), ctx.definitions.BooleanType),
+ Const(Constant(false), ctx.definitions.BooleanType)
+ )
+ case _ if tp.classSymbol.is(Enum) =>
+ children.map(sym => Const(Constant(sym), tp))
+ case _ =>
+ val parts = children.map { sym =>
+ if (sym.is(ModuleClass))
+ sym.asClass.classInfo.selfType
+ else if (sym.info.typeParams.length > 0 || tp.isInstanceOf[TypeRef])
+ refine(tp, sym.typeRef)
+ else
+ sym.typeRef
+ } filter { tpe =>
+ // Child class may not always be subtype of parent:
+ // GADT & path-dependent types
+ tpe <:< expose(tp)
+ }
+
+ parts.map(Typ(_, true))
+ }
+ }
+
+ /** Refine tp2 based on tp1
+ *
+ * E.g. if `tp1` is `Option[Int]`, `tp2` is `Some`, then return
+ * `Some[Int]`.
+ *
+ * If `tp1` is `path1.A`, `tp2` is `path2.B`, and `path1` is subtype of
+ * `path2`, then return `path1.B`.
+ */
+ def refine(tp1: Type, tp2: Type): Type = (tp1, tp2) match {
+ case (tp1: RefinedType, _) => tp1.wrapIfMember(refine(tp1.parent, tp2))
+ case (tp1: HKApply, _) => refine(tp1.superType, tp2)
+ case (TypeRef(ref1: TypeProxy, _), tp2 @ TypeRef(ref2: TypeProxy, name)) =>
+ if (ref1.underlying <:< ref2.underlying) TypeRef(ref1, name) else tp2
+ case _ => tp2
+ }
+
+ /** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */
+ def canDecompose(tp: Type): Boolean = {
+ tp.classSymbol.is(allOf(Abstract, Sealed)) ||
+ tp.classSymbol.is(allOf(Trait, Sealed)) ||
+ tp.isInstanceOf[OrType] ||
+ tp =:= ctx.definitions.BooleanType ||
+ tp.classSymbol.is(Enum)
+ }
+
+ /** Show friendly type name with current scope in mind
+ *
+ * E.g. C.this.B --> B if current owner is C
+ * C.this.x.T --> x.T if current owner is C
+ * X[T] --> X
+ * C --> C if current owner is C !!!
+ *
+ */
+ def showType(tp: Type): String = {
+ val enclosingCls = ctx.owner.enclosingClass.asClass.classInfo.symbolicTypeRef
+
+ def isOmittable(sym: Symbol) =
+ sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName ||
+ ctx.definitions.UnqualifiedOwnerTypes.exists(_.symbol == sym) ||
+ sym.showFullName.startsWith("scala.") ||
+ sym == enclosingCls.typeSymbol
+
+ def refinePrefix(tp: Type): String = tp match {
+ case NoPrefix => ""
+ case tp: NamedType if isOmittable(tp.symbol) => ""
+ case tp: ThisType => refinePrefix(tp.tref)
+ case tp: RefinedType => refinePrefix(tp.parent)
+ case tp: NamedType => tp.name.show.stripSuffix("$")
+ }
+
+ def refine(tp: Type): String = tp match {
+ case tp: RefinedType => refine(tp.parent)
+ case tp: ThisType => refine(tp.tref)
+ case tp: NamedType =>
+ val pre = refinePrefix(tp.prefix)
+ if (tp.name == tpnme.higherKinds) pre
+ else if (pre.isEmpty) tp.name.show.stripSuffix("$")
+ else pre + "." + tp.name.show.stripSuffix("$")
+ case _ => tp.show.stripSuffix("$")
+ }
+
+ val text = tp.stripAnnots match {
+ case tp: OrType => showType(tp.tp1) + " | " + showType(tp.tp2)
+ case tp => refine(tp)
+ }
+
+ if (text.isEmpty) enclosingCls.show.stripSuffix("$")
+ else text
+ }
+
+ /** Display spaces */
+ def show(s: Space): String = {
+ def doShow(s: Space, mergeList: Boolean = false): String = s match {
+ case Empty => ""
+ case Const(v, _) => v.show
+ case Var(x, _) => x.show
+ case Typ(tp, decomposed) =>
+ val sym = tp.widen.classSymbol
+
+ if (sym.is(ModuleClass))
+ showType(tp)
+ else if (ctx.definitions.isTupleType(tp))
+ signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (sym.showFullName == "scala.collection.immutable.::")
+ if (mergeList) "_" else "List(_)"
+ else if (tp.classSymbol.is(CaseClass))
+ // use constructor syntax for case class
+ showType(tp) + signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (signature(tp).nonEmpty)
+ tp.classSymbol.name + signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (decomposed) "_: " + showType(tp)
+ else "_"
+ case Kon(tp, params) =>
+ if (ctx.definitions.isTupleType(tp))
+ "(" + params.map(doShow(_)).mkString(", ") + ")"
+ else if (tp.widen.classSymbol.showFullName == "scala.collection.immutable.::")
+ if (mergeList) params.map(doShow(_, mergeList)).mkString(", ")
+ else params.map(doShow(_, true)).filter(_ != "Nil").mkString("List(", ", ", ")")
+ else
+ showType(tp) + params.map(doShow(_)).mkString("(", ", ", ")")
+ case Or(_) =>
+ throw new Exception("incorrect flatten result " + s)
+ }
+
+ flatten(s).map(doShow(_, false)).distinct.mkString(", ")
+ }
+
+ def checkable(tree: Match): Boolean = {
+ def isCheckable(tp: Type): Boolean = tp match {
+ case AnnotatedType(tp, annot) =>
+ (ctx.definitions.UncheckedAnnot != annot.symbol) && isCheckable(tp)
+ case _ =>
+ // Possible to check everything, but be compatible with scalac by default
+ ctx.settings.YcheckAllPatmat.value ||
+ tp.typeSymbol.is(Sealed) ||
+ tp.isInstanceOf[OrType] ||
+ tp.typeSymbol == ctx.definitions.BooleanType.typeSymbol ||
+ tp.typeSymbol.is(Enum) ||
+ canDecompose(tp) ||
+ (defn.isTupleType(tp) && tp.dealias.argInfos.exists(isCheckable(_)))
+ }
+
+ val Match(sel, cases) = tree
+ isCheckable(sel.tpe.widen.deAnonymize.dealias)
+ }
+
+
+ /** Expose refined type to eliminate reference to type variables
+ *
+ * A = B M { type T = A } ~~> M { type T = B }
+ *
+ * A <: X :> Y M { type T = A } ~~> M { type T <: X :> Y }
+ *
+ * A <: X :> Y B <: U :> V M { type T <: A :> B } ~~> M { type T <: X :> V }
+ *
+ * A = X B = Y M { type T <: A :> B } ~~> M { type T <: X :> Y }
+ */
+ def expose(tp: Type): Type = {
+ def follow(tp: Type, up: Boolean): Type = tp match {
+ case tp: TypeProxy =>
+ tp.underlying match {
+ case TypeBounds(lo, hi) =>
+ follow(if (up) hi else lo, up)
+ case _ =>
+ tp
+ }
+ case OrType(tp1, tp2) =>
+ OrType(follow(tp1, up), follow(tp2, up))
+ case AndType(tp1, tp2) =>
+ AndType(follow(tp1, up), follow(tp2, up))
+ }
+
+ tp match {
+ case tp: RefinedType =>
+ tp.refinedInfo match {
+ case tpa : TypeAlias =>
+ val hi = follow(tpa.alias, true)
+ val lo = follow(tpa.alias, false)
+ val refined = if (hi =:= lo)
+ tpa.derivedTypeAlias(hi)
+ else
+ tpa.derivedTypeBounds(lo, hi)
+
+ tp.derivedRefinedType(
+ expose(tp.parent),
+ tp.refinedName,
+ refined
+ )
+ case tpb @ TypeBounds(lo, hi) =>
+ tp.derivedRefinedType(
+ expose(tp.parent),
+ tp.refinedName,
+ tpb.derivedTypeBounds(follow(lo, false), follow(hi, true))
+ )
+ }
+ case _ => tp
+ }
+ }
+
+ def checkExhaustivity(_match: Match): Unit = {
+ val Match(sel, cases) = _match
+ val selTyp = sel.tpe.widen.deAnonymize.dealias
+
+
+ val patternSpace = cases.map(x => project(x.pat)).reduce((a, b) => Or(List(a, b)))
+ val uncovered = simplify(minus(Typ(selTyp, true), patternSpace))
+
+ if (uncovered != Empty) {
+ ctx.warning(
+ "match may not be exhaustive.\n" +
+ s"It would fail on the following input: " +
+ show(uncovered), _match.pos
+ )
+ }
+ }
+
+ def checkRedundancy(_match: Match): Unit = {
+ val Match(sel, cases) = _match
+ // ignore selector type for now
+ // val selTyp = sel.tpe.widen.deAnonymize.dealias
+
+ // starts from the second, the first can't be redundant
+ (1 until cases.length).foreach { i =>
+ // in redundancy check, take guard as false, take extractor as match
+ // nothing in order to soundly approximate
+ val prevs = cases.take(i).map { x =>
+ if (x.guard.isEmpty) project(x.pat, false)
+ else Empty
+ }.reduce((a, b) => Or(List(a, b)))
+
+ val curr = project(cases(i).pat)
+
+ if (isSubspace(curr, prevs)) {
+ ctx.warning("unreachable code", cases(i).body.pos)
+ }
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala
index efd12cb5e..a9212e5d6 100644
--- a/src/dotty/tools/dotc/typer/Applications.scala
+++ b/src/dotty/tools/dotc/typer/Applications.scala
@@ -31,7 +31,7 @@ import language.implicitConversions
object Applications {
import tpd._
- def extractorMemberType(tp: Type, name: Name, errorPos: Position = NoPosition)(implicit ctx:Context) = {
+ def extractorMemberType(tp: Type, name: Name, errorPos: Position = NoPosition)(implicit ctx: Context) = {
val ref = tp.member(name).suchThat(_.info.isParameterless)
if (ref.isOverloaded)
errorType(i"Overloaded reference to $ref is not allowed in extractor", errorPos)
@@ -41,12 +41,12 @@ object Applications {
ref.info.widenExpr.dealias
}
- def productSelectorTypes(tp: Type, errorPos: Position = NoPosition)(implicit ctx:Context): List[Type] = {
+ def productSelectorTypes(tp: Type, errorPos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
val sels = for (n <- Iterator.from(0)) yield extractorMemberType(tp, nme.selectorName(n), errorPos)
sels.takeWhile(_.exists).toList
}
- def productSelectors(tp: Type)(implicit ctx:Context): List[Symbol] = {
+ def productSelectors(tp: Type)(implicit ctx: Context): List[Symbol] = {
val sels = for (n <- Iterator.from(0)) yield tp.member(nme.selectorName(n)).symbol
sels.takeWhile(_.exists).toList
}
@@ -58,7 +58,7 @@ object Applications {
else tp :: Nil
} else tp :: Nil
- def unapplyArgs(unapplyResult: Type, unapplyFn:Tree, args:List[untpd.Tree], pos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
+ def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
def seqSelector = defn.RepeatedParamType.appliedTo(unapplyResult.elemType :: Nil)
def getTp = extractorMemberType(unapplyResult, nme.get, pos)
@@ -541,24 +541,13 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = {
- /** Try same application with an implicit inserted around the qualifier of the function
- * part. Return an optional value to indicate success.
- */
- def tryWithImplicitOnQualifier(fun1: Tree, proto: FunProto)(implicit ctx: Context): Option[Tree] =
- tryInsertImplicitOnQualifier(fun1, proto) flatMap { fun2 =>
- tryEither { implicit ctx =>
- Some(typedApply(
- cpy.Apply(tree)(untpd.TypedSplice(fun2), proto.typedArgs map untpd.TypedSplice),
- pt)): Option[Tree]
- } { (_, _) => None }
- }
-
def realApply(implicit ctx: Context): Tree = track("realApply") {
val originalProto = new FunProto(tree.args, IgnoredProto(pt), this)(argCtx(tree))
val fun1 = typedExpr(tree.fun, originalProto)
// Warning: The following lines are dirty and fragile. We record that auto-tupling was demanded as
- // a side effect in adapt. If it was, we assume the tupled proto-type in the rest of the application.
+ // a side effect in adapt. If it was, we assume the tupled proto-type in the rest of the application,
+ // until, possibly, we have to fall back to insert an implicit on the qualifier.
// This crucially relies on he fact that `proto` is used only in a single call of `adapt`,
// otherwise we would get possible cross-talk between different `adapt` calls using the same
// prototype. A cleaner alternative would be to return a modified prototype from `adapt` together with
@@ -574,32 +563,49 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
if (!constrainResult(fun1.tpe.widen, proto.derivedFunProto(resultType = pt)))
typr.println(i"result failure for $tree with type ${fun1.tpe.widen}, expected = $pt")
+ /** Type application where arguments come from prototype, and no implicits are inserted */
+ def simpleApply(fun1: Tree, proto: FunProto)(implicit ctx: Context): Tree =
+ methPart(fun1).tpe match {
+ case funRef: TermRef =>
+ val app =
+ if (proto.allArgTypesAreCurrent())
+ new ApplyToTyped(tree, fun1, funRef, proto.typedArgs, pt)
+ else
+ new ApplyToUntyped(tree, fun1, funRef, proto, pt)(argCtx(tree))
+ convertNewGenericArray(ConstFold(app.result))
+ case _ =>
+ handleUnexpectedFunType(tree, fun1)
+ }
+
+ /** Try same application with an implicit inserted around the qualifier of the function
+ * part. Return an optional value to indicate success.
+ */
+ def tryWithImplicitOnQualifier(fun1: Tree, proto: FunProto)(implicit ctx: Context): Option[Tree] =
+ tryInsertImplicitOnQualifier(fun1, proto) flatMap { fun2 =>
+ tryEither {
+ implicit ctx => Some(simpleApply(fun2, proto)): Option[Tree]
+ } {
+ (_, _) => None
+ }
+ }
+
fun1.tpe match {
case ErrorType => tree.withType(ErrorType)
- case TryDynamicCallType =>
- tree match {
- case tree @ Apply(Select(qual, name), args) if !isDynamicMethod(name) =>
- typedDynamicApply(qual, name, args, pt)(tree)
- case _ =>
- handleUnexpectedFunType(tree, fun1)
- }
- case _ => methPart(fun1).tpe match {
- case funRef: TermRef =>
- tryEither { implicit ctx =>
- val app =
- if (proto.argsAreTyped) new ApplyToTyped(tree, fun1, funRef, proto.typedArgs, pt)
- else new ApplyToUntyped(tree, fun1, funRef, proto, pt)(argCtx(tree))
- val result = app.result
- convertNewGenericArray(ConstFold(result))
- } { (failedVal, failedState) =>
+ case TryDynamicCallType => typedDynamicApply(tree, pt)
+ case _ =>
+ tryEither {
+ implicit ctx => simpleApply(fun1, proto)
+ } {
+ (failedVal, failedState) =>
def fail = { failedState.commit(); failedVal }
+ // Try once with original prototype and once (if different) with tupled one.
+ // The reason we need to try both is that the decision whether to use tupled
+ // or not was already taken but might have to be revised when an implicit
+ // is inserted on the qualifier.
tryWithImplicitOnQualifier(fun1, originalProto).getOrElse(
if (proto eq originalProto) fail
else tryWithImplicitOnQualifier(fun1, proto).getOrElse(fail))
- }
- case _ =>
- handleUnexpectedFunType(tree, fun1)
- }
+ }
}
}
@@ -611,7 +617,7 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
*
* { val xs = es; e' = e' + args }
*/
- def typedOpAssign: Tree = track("typedOpAssign") {
+ def typedOpAssign: Tree = track("typedOpAssign") {
val Apply(Select(lhs, name), rhss) = tree
val lhs1 = typedExpr(lhs)
val liftedDefs = new mutable.ListBuffer[Tree]
@@ -667,7 +673,12 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
}
case _ =>
}
- assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)
+ def tryDynamicTypeApply(): Tree = typedFn match {
+ case typedFn: Select if !pt.isInstanceOf[FunProto] => typedDynamicSelect(typedFn, typedArgs, pt)
+ case _ => tree.withType(TryDynamicCallType)
+ }
+ if (typedFn.tpe eq TryDynamicCallType) tryDynamicTypeApply()
+ else assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)
}
/** Rewrite `new Array[T](....)` if T is an unbounded generic to calls to newGenericArray.
@@ -763,14 +774,13 @@ trait Applications extends Compatibility { self: Typer with Dynamic =>
* The generalizations of a type T are the smallest set G such that
*
* - T is in G
- * - If a typeref R in G represents a trait, R's superclass is in G.
+ * - If a typeref R in G represents a class or trait, R's superclass is in G.
* - If a type proxy P is not a reference to a class, P's supertype is in G
*/
def isSubTypeOfParent(subtp: Type, tp: Type)(implicit ctx: Context): Boolean =
if (subtp <:< tp) true
else tp match {
- case tp: TypeRef if tp.symbol.isClass =>
- tp.symbol.is(Trait) && isSubTypeOfParent(subtp, tp.firstParent)
+ case tp: TypeRef if tp.symbol.isClass => isSubTypeOfParent(subtp, tp.firstParent)
case tp: TypeProxy => isSubTypeOfParent(subtp, tp.superType)
case _ => false
}
diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala
index d77520c77..101974b32 100644
--- a/src/dotty/tools/dotc/typer/Checking.scala
+++ b/src/dotty/tools/dotc/typer/Checking.scala
@@ -37,7 +37,7 @@ object Checking {
* well as for AppliedTypeTree nodes. Also checks that type arguments to
* *-type parameters are fully applied.
*/
- def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context) = {
+ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context): Unit = {
(args, boundss).zipped.foreach { (arg, bound) =>
if (!bound.isHK && arg.tpe.isHK)
ctx.error(ex"missing type parameter(s) for $arg", arg.pos)
diff --git a/src/dotty/tools/dotc/typer/Dynamic.scala b/src/dotty/tools/dotc/typer/Dynamic.scala
index aeb3cca8c..b5ace87d3 100644
--- a/src/dotty/tools/dotc/typer/Dynamic.scala
+++ b/src/dotty/tools/dotc/typer/Dynamic.scala
@@ -2,15 +2,14 @@ package dotty.tools
package dotc
package typer
-import dotty.tools.dotc.ast.Trees.NamedArg
-import dotty.tools.dotc.ast.tpd._
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.ast.untpd
import dotty.tools.dotc.core.Constants.Constant
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.core.Names.Name
import dotty.tools.dotc.core.StdNames._
import dotty.tools.dotc.core.Types._
-import dotty.tools.dotc.core.Mode
import dotty.tools.dotc.core.Decorators._
object Dynamic {
@@ -28,44 +27,78 @@ object Dynamic {
* The first matching rule of is applied.
*/
trait Dynamic { self: Typer with Applications =>
+ import Dynamic._
+ import tpd._
/** Translate selection that does not typecheck according to the normal rules into a applyDynamic/applyDynamicNamed.
- * foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...)
- * foo.bar(x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
+ * foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...)
+ * foo.bar[T0, ...](baz0, baz1, ...) ~~> foo.applyDynamic[T0, ...](bar)(baz0, baz1, ...)
+ * foo.bar(x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
+ * foo.bar[T0, ...](x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed[T0, ...]("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
*/
- def typedDynamicApply(qual: untpd.Tree, name: Name, args: List[untpd.Tree], pt: Type)(original: untpd.Apply)(
- implicit ctx: Context): Tree = {
- def isNamedArg(arg: untpd.Tree): Boolean = arg match { case NamedArg(_, _) => true; case _ => false }
- val dynName = if (args.exists(isNamedArg)) nme.applyDynamicNamed else nme.applyDynamic
- if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args)) {
- ctx.error("applyDynamicNamed does not support passing a vararg parameter", original.pos)
- original.withType(ErrorType)
- } else {
- def namedArgTuple(name: String, arg: untpd.Tree) = untpd.Tuple(List(Literal(Constant(name)), arg))
- def namedArgs = args.map {
- case NamedArg(argName, arg) => namedArgTuple(argName.toString, arg)
- case arg => namedArgTuple("", arg)
+ def typedDynamicApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = {
+ def typedDynamicApply(qual: untpd.Tree, name: Name, targs: List[untpd.Tree]): Tree = {
+ def isNamedArg(arg: untpd.Tree): Boolean = arg match { case NamedArg(_, _) => true; case _ => false }
+ val args = tree.args
+ val dynName = if (args.exists(isNamedArg)) nme.applyDynamicNamed else nme.applyDynamic
+ if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args)) {
+ ctx.error("applyDynamicNamed does not support passing a vararg parameter", tree.pos)
+ tree.withType(ErrorType)
+ } else {
+ def namedArgTuple(name: String, arg: untpd.Tree) = untpd.Tuple(List(Literal(Constant(name)), arg))
+ def namedArgs = args.map {
+ case NamedArg(argName, arg) => namedArgTuple(argName.toString, arg)
+ case arg => namedArgTuple("", arg)
+ }
+ val args1 = if (dynName == nme.applyDynamic) args else namedArgs
+ typedApply(untpd.Apply(coreDynamic(qual, dynName, name, targs), args1), pt)
}
- val args1 = if (dynName == nme.applyDynamic) args else namedArgs
- typedApply(untpd.Apply(coreDynamic(qual, dynName, name), args1), pt)
+ }
+
+ tree.fun match {
+ case Select(qual, name) if !isDynamicMethod(name) =>
+ typedDynamicApply(qual, name, Nil)
+ case TypeApply(Select(qual, name), targs) if !isDynamicMethod(name) =>
+ typedDynamicApply(qual, name, targs)
+ case TypeApply(fun, targs) =>
+ typedDynamicApply(fun, nme.apply, targs)
+ case fun =>
+ typedDynamicApply(fun, nme.apply, Nil)
}
}
/** Translate selection that does not typecheck according to the normal rules into a selectDynamic.
- * foo.bar ~~> foo.selectDynamic(bar)
+ * foo.bar ~~> foo.selectDynamic(bar)
+ * foo.bar[T0, ...] ~~> foo.selectDynamic[T0, ...](bar)
*
* Note: inner part of translation foo.bar(baz) = quux ~~> foo.selectDynamic(bar).update(baz, quux) is achieved
* through an existing transformation of in typedAssign [foo.bar(baz) = quux ~~> foo.bar.update(baz, quux)].
*/
- def typedDynamicSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree =
- typedApply(coreDynamic(tree.qualifier, nme.selectDynamic, tree.name), pt)
+ def typedDynamicSelect(tree: untpd.Select, targs: List[Tree], pt: Type)(implicit ctx: Context): Tree =
+ typedApply(coreDynamic(tree.qualifier, nme.selectDynamic, tree.name, targs), pt)
/** Translate selection that does not typecheck according to the normal rules into a updateDynamic.
* foo.bar = baz ~~> foo.updateDynamic(bar)(baz)
*/
- def typedDynamicAssign(qual: untpd.Tree, name: Name, rhs: untpd.Tree, pt: Type)(implicit ctx: Context): Tree =
- typedApply(untpd.Apply(coreDynamic(qual, nme.updateDynamic, name), rhs), pt)
+ def typedDynamicAssign(tree: untpd.Assign, pt: Type)(implicit ctx: Context): Tree = {
+ def typedDynamicAssign(qual: untpd.Tree, name: Name, targs: List[untpd.Tree]): Tree =
+ typedApply(untpd.Apply(coreDynamic(qual, nme.updateDynamic, name, targs), tree.rhs), pt)
+ tree.lhs match {
+ case Select(qual, name) if !isDynamicMethod(name) =>
+ typedDynamicAssign(qual, name, Nil)
+ case TypeApply(Select(qual, name), targs) if !isDynamicMethod(name) =>
+ typedDynamicAssign(qual, name, targs)
+ case _ =>
+ ctx.error("reassignment to val", tree.pos)
+ tree.withType(ErrorType)
+ }
+ }
- private def coreDynamic(qual: untpd.Tree, dynName: Name, name: Name)(implicit ctx: Context): untpd.Apply =
- untpd.Apply(untpd.Select(qual, dynName), Literal(Constant(name.toString)))
+ private def coreDynamic(qual: untpd.Tree, dynName: Name, name: Name, targs: List[untpd.Tree])(implicit ctx: Context): untpd.Apply = {
+ val select = untpd.Select(qual, dynName)
+ val selectWithTypes =
+ if (targs.isEmpty) select
+ else untpd.TypeApply(select, targs)
+ untpd.Apply(selectWithTypes, Literal(Constant(name.toString)))
+ }
}
diff --git a/src/dotty/tools/dotc/typer/FrontEnd.scala b/src/dotty/tools/dotc/typer/FrontEnd.scala
index c5c6aec3c..e193b126a 100644
--- a/src/dotty/tools/dotc/typer/FrontEnd.scala
+++ b/src/dotty/tools/dotc/typer/FrontEnd.scala
@@ -57,7 +57,7 @@ class FrontEnd extends Phase {
case _ => NoSymbol
}
- private def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
+ protected def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
unit.isJava || firstTopLevelDef(unit.tpdTree :: Nil).isPrimitiveValueClass
override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala
index 0a3307140..2a1c18f7d 100644
--- a/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/src/dotty/tools/dotc/typer/Implicits.scala
@@ -801,14 +801,15 @@ class SearchHistory(val searchDepth: Int, val seen: Map[ClassSymbol, Int]) {
def updateMap(csyms: List[ClassSymbol], seen: Map[ClassSymbol, Int]): SearchHistory = csyms match {
case csym :: csyms1 =>
seen get csym match {
+ // proto complexity is >= than the last time it was seen → diverge
case Some(prevSize) if size >= prevSize => this
case _ => updateMap(csyms1, seen.updated(csym, size))
}
- case nil =>
- if (csyms.isEmpty) this
- else new SearchHistory(searchDepth + 1, seen)
+ case _ =>
+ new SearchHistory(searchDepth + 1, seen)
}
- updateMap(proto.classSymbols, seen)
+ if (proto.classSymbols.isEmpty) this
+ else updateMap(proto.classSymbols, seen)
}
}
}
diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala
index 7c61f8c23..719e8d7fc 100644
--- a/src/dotty/tools/dotc/typer/Inferencing.scala
+++ b/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -78,7 +78,8 @@ object Inferencing {
def apply(x: Boolean, tp: Type): Boolean = tp.dealias match {
case _: WildcardType | _: ProtoType =>
false
- case tvar: TypeVar if !tvar.isInstantiated =>
+ case tvar: TypeVar
+ if !tvar.isInstantiated && ctx.typerState.constraint.contains(tvar) =>
force.appliesTo(tvar) && {
val direction = instDirection(tvar.origin)
if (direction != 0) {
diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala
index 698f7e9a9..d90f37860 100644
--- a/src/dotty/tools/dotc/typer/Namer.scala
+++ b/src/dotty/tools/dotc/typer/Namer.scala
@@ -414,6 +414,16 @@ class Namer { typer: Typer =>
case mdef: DefTree =>
val sym = enterSymbol(createSymbol(mdef))
setDocstring(sym, stat)
+
+ // add java enum constants
+ mdef match {
+ case vdef: ValDef if (isEnumConstant(vdef)) =>
+ val enumClass = sym.owner.linkedClass
+ if (!(enumClass is Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed)
+ enumClass.addAnnotation(Annotation.makeChild(sym))
+ case _ =>
+ }
+
ctx
case stats: Thicket =>
for (tree <- stats.toList) {
@@ -425,8 +435,26 @@ class Namer { typer: Typer =>
ctx
}
+ /** Determines whether this field holds an enum constant.
+ * To qualify, the following conditions must be met:
+ * - The field's class has the ENUM flag set
+ * - The field's class extends java.lang.Enum
+ * - The field has the ENUM flag set
+ * - The field is static
+ * - The field is stable
+ */
+ def isEnumConstant(vd: ValDef)(implicit ctx: Context) = {
+ // val ownerHasEnumFlag =
+ // Necessary to check because scalac puts Java's static members into the companion object
+ // while Scala's enum constants live directly in the class.
+ // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal
+ // cyclic reference error. See the commit message for details.
+ // if (ctx.compilationUnit.isJava) ctx.owner.companionClass.is(Enum) else ctx.owner.is(Enum)
+ vd.mods.is(allOf(Enum, Stable, JavaStatic, JavaDefined)) // && ownerHasEnumFlag
+ }
+
def setDocstring(sym: Symbol, tree: Tree)(implicit ctx: Context) = tree match {
- case t: MemberDef => ctx.base.addDocstring(sym, t.rawComment)
+ case t: MemberDef => ctx.docbase.addDocstring(sym, t.rawComment)
case _ => ()
}
@@ -698,7 +726,7 @@ class Namer { typer: Typer =>
// the parent types are elaborated.
index(constr)
symbolOfTree(constr).ensureCompleted()
-
+
index(rest)(inClassContext(selfInfo))
val tparamAccessors = decls.filter(_ is TypeParamAccessor).toList
@@ -779,20 +807,27 @@ class Namer { typer: Typer =>
lazy val schema = paramFn(WildcardType)
val site = sym.owner.thisType
((NoType: Type) /: sym.owner.info.baseClasses.tail) { (tp, cls) =>
- val iRawInfo =
- cls.info.nonPrivateDecl(sym.name).matchingDenotation(site, schema).info
- val iInstInfo = iRawInfo match {
- case iRawInfo: PolyType =>
- if (iRawInfo.paramNames.length == typeParams.length)
- iRawInfo.instantiate(typeParams map (_.typeRef))
+ def instantiatedResType(info: Type, tparams: List[Symbol], paramss: List[List[Symbol]]): Type = info match {
+ case info: PolyType =>
+ if (info.paramNames.length == typeParams.length)
+ instantiatedResType(info.instantiate(tparams.map(_.typeRef)), Nil, paramss)
else NoType
+ case info: MethodType =>
+ paramss match {
+ case params :: paramss1 if info.paramNames.length == params.length =>
+ instantiatedResType(info.instantiate(params.map(_.termRef)), tparams, paramss1)
+ case _ =>
+ NoType
+ }
case _ =>
- if (typeParams.isEmpty) iRawInfo
+ if (tparams.isEmpty && paramss.isEmpty) info.widenExpr
else NoType
}
- val iResType = iInstInfo.finalResultType.asSeenFrom(site, cls)
+ val iRawInfo =
+ cls.info.nonPrivateDecl(sym.name).matchingDenotation(site, schema).info
+ val iResType = instantiatedResType(iRawInfo, typeParams, paramss).asSeenFrom(site, cls)
if (iResType.exists)
- typr.println(i"using inherited type for ${mdef.name}; raw: $iRawInfo, inst: $iInstInfo, inherited: $iResType")
+ typr.println(i"using inherited type for ${mdef.name}; raw: $iRawInfo, inherited: $iResType")
tp & iResType
}
}
diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala
index 767ccbe7d..f209c99be 100644
--- a/src/dotty/tools/dotc/typer/ProtoTypes.scala
+++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -172,6 +172,9 @@ object ProtoTypes {
/** A map in which typed arguments can be stored to be later integrated in `typedArgs`. */
private var myTypedArg: SimpleMap[untpd.Tree, Tree] = SimpleMap.Empty
+ /** A map recording the typer states in which arguments stored in myTypedArg were typed */
+ private var evalState: SimpleMap[untpd.Tree, TyperState] = SimpleMap.Empty
+
def isMatchedBy(tp: Type)(implicit ctx: Context) =
typer.isApplicable(tp, Nil, typedArgs, resultType)
@@ -179,17 +182,42 @@ object ProtoTypes {
if ((args eq this.args) && (resultType eq this.resultType) && (typer eq this.typer)) this
else new FunProto(args, resultType, typer)
- def argsAreTyped: Boolean = myTypedArgs.size == args.length
+ /** Forget the types of any arguments that have been typed producing a constraint in a
+ * typer state that is not yet committed into the one of the current context `ctx`.
+ * This is necessary to avoid "orphan" PolyParams that are referred to from
+ * type variables in the typed arguments, but that are not registered in the
+ * current constraint. A test case is pos/t1756.scala.
+ * @return True if all arguments have types (in particular, no types were forgotten).
+ */
+ def allArgTypesAreCurrent()(implicit ctx: Context): Boolean = {
+ evalState foreachBinding { (arg, tstate) =>
+ if (tstate.uncommittedAncestor.constraint ne ctx.typerState.constraint) {
+ typr.println(i"need to invalidate $arg / ${myTypedArg(arg)}, ${tstate.constraint}, current = ${ctx.typerState.constraint}")
+ myTypedArg = myTypedArg.remove(arg)
+ evalState = evalState.remove(arg)
+ }
+ }
+ myTypedArg.size == args.length
+ }
+
+ private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree)(implicit ctx: Context): Tree = {
+ var targ = myTypedArg(arg)
+ if (targ == null) {
+ targ = typerFn(arg)
+ if (!ctx.reporter.hasPending) {
+ myTypedArg = myTypedArg.updated(arg, targ)
+ evalState = evalState.updated(arg, ctx.typerState)
+ }
+ }
+ targ
+ }
/** The typed arguments. This takes any arguments already typed using
* `typedArg` into account.
*/
def typedArgs: List[Tree] = {
- if (!argsAreTyped)
- myTypedArgs = args mapconserve { arg =>
- val targ = myTypedArg(arg)
- if (targ != null) targ else typer.typed(arg)
- }
+ if (myTypedArgs.size != args.length)
+ myTypedArgs = args.mapconserve(cacheTypedArg(_, typer.typed(_)))
myTypedArgs
}
@@ -197,11 +225,7 @@ object ProtoTypes {
* used to avoid repeated typings of trees when backtracking.
*/
def typedArg(arg: untpd.Tree, formal: Type)(implicit ctx: Context): Tree = {
- var targ = myTypedArg(arg)
- if (targ == null) {
- targ = typer.typedUnadapted(arg, formal)
- if (!ctx.reporter.hasPending) myTypedArg = myTypedArg.updated(arg, targ)
- }
+ val targ = cacheTypedArg(arg, typer.typedUnadapted(_, formal))
typer.adapt(targ, formal, arg)
}
@@ -237,7 +261,6 @@ object ProtoTypes {
*/
class FunProtoTyped(args: List[tpd.Tree], resultType: Type, typer: Typer)(implicit ctx: Context) extends FunProto(args, resultType, typer)(ctx) {
override def typedArgs = args
- override def argsAreTyped = true
}
/** A prototype for implicitly inferred views:
diff --git a/src/dotty/tools/dotc/typer/RefChecks.scala b/src/dotty/tools/dotc/typer/RefChecks.scala
index 2838866fd..1f150c519 100644
--- a/src/dotty/tools/dotc/typer/RefChecks.scala
+++ b/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -525,7 +525,7 @@ object RefChecks {
subclassMsg(concreteSym, abstractSym)
else ""
- undefined(s"\n(Note that $pa does not match $pc$addendum)")
+ undefined(s"\n(Note that ${pa.show} does not match ${pc.show}$addendum)")
case xs =>
undefined(s"\n(The class implements a member with a different type: ${concrete.showDcl})")
}
diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala
index a6e2deb23..e1c9850d9 100644
--- a/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -168,7 +168,9 @@ trait TypeAssigner {
val d2 = pre.nonPrivateMember(name)
if (reallyExists(d2) && firstTry)
test(tpe.shadowed.withDenot(d2), false)
- else {
+ else if (pre.derivesFrom(defn.DynamicClass)) {
+ TryDynamicCallType
+ } else {
val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists)
val what = alts match {
case Nil =>
@@ -321,21 +323,30 @@ trait TypeAssigner {
case pt: PolyType =>
val paramNames = pt.paramNames
if (hasNamedArg(args)) {
- val argMap = new mutable.HashMap[Name, Type]
+ // Type arguments which are specified by name (immutable after this first loop)
+ val namedArgMap = new mutable.HashMap[Name, Type]
for (NamedArg(name, arg) <- args)
- if (argMap.contains(name))
+ if (namedArgMap.contains(name))
ctx.error("duplicate name", arg.pos)
else if (!paramNames.contains(name))
ctx.error(s"undefined parameter name, required: ${paramNames.mkString(" or ")}", arg.pos)
else
- argMap(name) = arg.tpe
+ namedArgMap(name) = arg.tpe
+
+ // Holds indexes of non-named typed arguments in paramNames
val gapBuf = new mutable.ListBuffer[Int]
- def nextPoly = {
- val idx = gapBuf.length
+ def nextPoly(idx: Int) = {
+ val newIndex = gapBuf.length
gapBuf += idx
- PolyParam(pt, idx)
+ // Re-index unassigned type arguments that remain after transformation
+ PolyParam(pt, newIndex)
}
- val normArgs = paramNames.map(pname => argMap.getOrElse(pname, nextPoly))
+
+ // Type parameters after naming assignment, conserving paramNames order
+ val normArgs: List[Type] = paramNames.zipWithIndex.map { case (pname, idx) =>
+ namedArgMap.getOrElse(pname, nextPoly(idx))
+ }
+
val transform = new TypeMap {
def apply(t: Type) = t match {
case PolyParam(`pt`, idx) => normArgs(idx)
@@ -347,19 +358,20 @@ trait TypeAssigner {
else {
val gaps = gapBuf.toList
pt.derivedPolyType(
- gaps.map(paramNames.filterNot(argMap.contains)),
+ gaps.map(paramNames),
gaps.map(idx => transform(pt.paramBounds(idx)).bounds),
resultType1)
}
}
else {
val argTypes = args.tpes
- if (sameLength(argTypes, paramNames)|| ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes)
+ if (sameLength(argTypes, paramNames) || ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes)
else wrongNumberOfArgs(fn.tpe, "type ", pt.paramNames.length, tree.pos)
}
case _ =>
errorType(i"${err.exprStr(fn)} does not take type parameters", tree.pos)
}
+
tree.withType(ownType)
}
@@ -383,8 +395,8 @@ trait TypeAssigner {
def assignType(tree: untpd.Closure, meth: Tree, target: Tree)(implicit ctx: Context) =
tree.withType(
- if (target.isEmpty) meth.tpe.widen.toFunctionType(tree.env.length)
- else target.tpe)
+ if (target.isEmpty) meth.tpe.widen.toFunctionType(tree.env.length)
+ else target.tpe)
def assignType(tree: untpd.CaseDef, body: Tree)(implicit ctx: Context) =
tree.withType(body.tpe)
@@ -488,7 +500,7 @@ trait TypeAssigner {
tree.withType(sym.nonMemberTermRef)
def assignType(tree: untpd.Annotated, annot: Tree, arg: Tree)(implicit ctx: Context) =
- tree.withType(AnnotatedType(arg.tpe, Annotation(annot)))
+ tree.withType(AnnotatedType(arg.tpe.widen, Annotation(annot)))
def assignType(tree: untpd.PackageDef, pid: Tree)(implicit ctx: Context) =
tree.withType(pid.symbol.valRef)
diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala
index 976f16289..3f8c421d9 100644
--- a/src/dotty/tools/dotc/typer/Typer.scala
+++ b/src/dotty/tools/dotc/typer/Typer.scala
@@ -363,12 +363,9 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val qual1 = typedExpr(tree.qualifier, selectionProto(tree.name, pt, this))
if (tree.name.isTypeName) checkStable(qual1.tpe, qual1.pos)
val select = typedSelect(tree, pt, qual1)
- pt match {
- case _: FunProto | AssignProto => select
- case _ =>
- if (select.tpe eq TryDynamicCallType) typedDynamicSelect(tree, pt)
- else select
- }
+ if (select.tpe ne TryDynamicCallType) select
+ else if (pt.isInstanceOf[PolyProto] || pt.isInstanceOf[FunProto] || pt == AssignProto) select
+ else typedDynamicSelect(tree, Nil, pt)
}
def asJavaSelectFromTypeTree(implicit ctx: Context): Tree = {
@@ -392,11 +389,17 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
- if (ctx.compilationUnit.isJava && tree.name.isTypeName) {
+ def selectWithFallback(fallBack: => Tree) =
+ tryEither(tryCtx => asSelect(tryCtx))((_, _) => fallBack)
+
+ if (ctx.compilationUnit.isJava && tree.name.isTypeName)
// SI-3120 Java uses the same syntax, A.B, to express selection from the
// value A and from the type A. We have to try both.
- tryEither(tryCtx => asSelect(tryCtx))((_, _) => asJavaSelectFromTypeTree(ctx))
- } else asSelect(ctx)
+ selectWithFallback(asJavaSelectFromTypeTree(ctx))
+ else if (tree.name == nme.withFilter && tree.getAttachment(desugar.MaybeFilter).isDefined)
+ selectWithFallback(typedSelect(untpd.cpy.Select(tree)(tree.qualifier, nme.filter), pt))
+ else
+ asSelect(ctx)
}
def typedSelectFromTypeTree(tree: untpd.SelectFromTypeTree, pt: Type)(implicit ctx: Context): Tree = track("typedSelectFromTypeTree") {
@@ -558,11 +561,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
reassignmentToVal
}
case TryDynamicCallType =>
- tree match {
- case Assign(Select(qual, name), rhs) if !isDynamicMethod(name) =>
- typedDynamicAssign(qual, name, rhs, pt)
- case _ => reassignmentToVal
- }
+ typedDynamicAssign(tree, pt)
case tpe =>
reassignmentToVal
}
@@ -787,7 +786,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
tree.selector match {
case EmptyTree =>
val (protoFormals, _) = decomposeProtoFunction(pt, 1)
- typed(desugar.makeCaseLambda(tree.cases, protoFormals.length) withPos tree.pos, pt)
+ val unchecked = pt <:< defn.PartialFunctionType
+ typed(desugar.makeCaseLambda(tree.cases, protoFormals.length, unchecked) withPos tree.pos, pt)
case _ =>
val sel1 = typedExpr(tree.selector)
val selType = widenForMatchSelector(
@@ -934,7 +934,11 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(implicit ctx: Context): SeqLiteral = track("typedSeqLiteral") {
- val proto1 = pt.elemType orElse WildcardType
+ val proto1 = pt.elemType match {
+ case NoType => WildcardType
+ case bounds: TypeBounds => WildcardType(bounds)
+ case elemtp => elemtp
+ }
val elems1 = tree.elems mapconserve (typed(_, proto1))
val proto2 = // the computed type of the `elemtpt` field
if (!tree.elemtpt.isEmpty) WildcardType
@@ -1111,8 +1115,9 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(implicit ctx: Context): Unit = {
// necessary to force annotation trees to be computed.
sym.annotations.foreach(_.tree)
+ val annotCtx = ctx.outersIterator.dropWhile(_.owner == sym).next
// necessary in order to mark the typed ahead annotations as definitely typed:
- untpd.modsDeco(mdef).mods.annotations.foreach(typedAnnotation)
+ untpd.modsDeco(mdef).mods.annotations.foreach(typedAnnotation(_)(annotCtx))
}
def typedAnnotation(annot: untpd.Tree)(implicit ctx: Context): Tree = track("typedAnnotation") {
@@ -1526,11 +1531,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val sel = typedSelect(untpd.Select(untpd.TypedSplice(tree), nme.apply), pt)
if (sel.tpe.isError) sel else adapt(sel, pt)
} { (failedTree, failedState) =>
- tryInsertImplicitOnQualifier(tree, pt) match {
- case Some(tree1) => adapt(tree1, pt)
- case none => fallBack(failedTree, failedState)
- }
- }
+ tryInsertImplicitOnQualifier(tree, pt).getOrElse(fallBack(failedTree, failedState))
+ }
/** If this tree is a select node `qual.name`, try to insert an implicit conversion
* `c` around `qual` so that `c(qual).name` conforms to `pt`. If that fails
@@ -1543,7 +1545,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
tryEither { implicit ctx =>
val qual1 = adaptInterpolated(qual, qualProto, EmptyTree)
if ((qual eq qual1) || ctx.reporter.hasErrors) None
- else Some(typedSelect(cpy.Select(tree)(untpd.TypedSplice(qual1), name), pt))
+ else Some(typed(cpy.Select(tree)(untpd.TypedSplice(qual1), name), pt))
} { (_, _) => None
}
case _ => None
@@ -1763,6 +1765,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
else
missingArgs
case _ =>
+ ctx.typeComparer.GADTused = false
if (ctx.mode is Mode.Pattern) {
tree match {
case _: RefTree | _: Literal if !isVarPattern(tree) =>
@@ -1771,7 +1774,15 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
tree
}
- else if (tree.tpe <:< pt) tree
+ else if (tree.tpe <:< pt)
+ if (ctx.typeComparer.GADTused && pt.isValueType)
+ // Insert an explicit cast, so that -Ycheck in later phases succeeds.
+ // I suspect, but am not 100% sure that this might affect inferred types,
+ // if the expected type is a supertype of the GADT bound. It would be good to come
+ // up with a test case for this.
+ tree.asInstance(pt)
+ else
+ tree
else if (wtp.isInstanceOf[MethodType]) missingArgs
else {
typr.println(i"adapt to subtype ${tree.tpe} !<:< $pt")
diff --git a/src/dotty/tools/dotc/util/DiffUtil.scala b/src/dotty/tools/dotc/util/DiffUtil.scala
index b28f36382..b7c77ad62 100644
--- a/src/dotty/tools/dotc/util/DiffUtil.scala
+++ b/src/dotty/tools/dotc/util/DiffUtil.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc.util
import scala.annotation.tailrec
-import difflib._
+import scala.collection.mutable
object DiffUtil {
@@ -13,9 +13,8 @@ object DiffUtil {
private final val ADDITION_COLOR = ANSI_GREEN
def mkColoredCodeDiff(code: String, lastCode: String, printDiffDel: Boolean): String = {
- import scala.collection.JavaConversions._
- @tailrec def split(str: String, acc: List[String]): List[String] = {
+ @tailrec def splitTokens(str: String, acc: List[String] = Nil): List[String] = {
if (str == "") {
acc.reverse
} else {
@@ -30,38 +29,119 @@ object DiffUtil {
!Character.isMirrored(c) && !Character.isWhitespace(c)
}
}
- split(rest, token :: acc)
+ splitTokens(rest, token :: acc)
}
}
- val lines = split(code, Nil).toArray
- val diff = DiffUtils.diff(split(lastCode, Nil), lines.toList)
+ val tokens = splitTokens(code, Nil).toArray
+ val lastTokens = splitTokens(lastCode, Nil).toArray
- for (delta <- diff.getDeltas) {
- val pos = delta.getRevised.getPosition
- val endPos = pos + delta.getRevised.getLines.size - 1
+ val diff = hirschberg(lastTokens, tokens)
- delta.getType.toString match { // Issue #1355 forces us to use the toString
- case "INSERT" =>
- lines(pos) = ADDITION_COLOR + lines(pos)
- lines(endPos) = lines(endPos) + ANSI_DEFAULT
+ diff.collect {
+ case Unmodified(str) => str
+ case Inserted(str) => ADDITION_COLOR + str + ANSI_DEFAULT
+ case Modified(old, str) if printDiffDel => DELETION_COLOR + old + ADDITION_COLOR + str + ANSI_DEFAULT
+ case Modified(_, str) => ADDITION_COLOR + str + ANSI_DEFAULT
+ case Deleted(str) if printDiffDel => DELETION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+ }
- case "CHANGE" =>
- val old = if (!printDiffDel) "" else
- DELETION_COLOR + delta.getOriginal.getLines.mkString + ANSI_DEFAULT
- lines(pos) = old + ADDITION_COLOR + lines(pos)
- lines(endPos) = lines(endPos) + ANSI_DEFAULT
+ private sealed trait Patch
+ private final case class Unmodified(str: String) extends Patch
+ private final case class Modified(original: String, str: String) extends Patch
+ private final case class Deleted(str: String) extends Patch
+ private final case class Inserted(str: String) extends Patch
- case "DELETE" if printDiffDel =>
- val deleted = delta.getOriginal.getLines.mkString
- if (!deleted.forall(Character.isWhitespace)) {
- lines(pos) = DELETION_COLOR + deleted + ANSI_DEFAULT + lines(pos)
- }
+ private def hirschberg(a: Array[String], b: Array[String]): Array[Patch] = {
+ def build(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = {
+ if (x.isEmpty) {
+ builder += Inserted(y.mkString)
+ } else if (y.isEmpty) {
+ builder += Deleted(x.mkString)
+ } else if (x.length == 1 || y.length == 1) {
+ needlemanWunsch(x, y, builder)
+ } else {
+ val xlen = x.length
+ val xmid = xlen / 2
+ val ylen = y.length
+
+ val (x1, x2) = x.splitAt(xmid)
+ val leftScore = nwScore(x1, y)
+ val rightScore = nwScore(x2.reverse, y.reverse)
+ val scoreSum = (leftScore zip rightScore.reverse).map {
+ case (left, right) => left + right
+ }
+ val max = scoreSum.max
+ val ymid = scoreSum.indexOf(max)
- case _ =>
+ val (y1, y2) = y.splitAt(ymid)
+ build(x1, y1, builder)
+ build(x2, y2, builder)
}
}
+ val builder = Array.newBuilder[Patch]
+ build(a, b, builder)
+ builder.result()
+ }
+
+ private def nwScore(x: Array[String], y: Array[String]): Array[Int] = {
+ def ins(s: String) = -2
+ def del(s: String) = -2
+ def sub(s1: String, s2: String) = if (s1 == s2) 2 else -1
- lines.mkString
+ val score = Array.fill(x.length + 1, y.length + 1)(0)
+ for (j <- 1 to y.length)
+ score(0)(j) = score(0)(j - 1) + ins(y(j - 1))
+ for (i <- 1 to x.length) {
+ score(i)(0) = score(i - 1)(0) + del(x(i - 1))
+ for (j <- 1 to y.length) {
+ val scoreSub = score(i - 1)(j - 1) + sub(x(i - 1), y(j - 1))
+ val scoreDel = score(i - 1)(j) + del(x(i - 1))
+ val scoreIns = score(i)(j - 1) + ins(y(j - 1))
+ score(i)(j) = scoreSub max scoreDel max scoreIns
+ }
+ }
+ Array.tabulate(y.length + 1)(j => score(x.length)(j))
}
+
+ private def needlemanWunsch(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = {
+ def similarity(a: String, b: String) = if (a == b) 2 else -1
+ val d = 1
+ val score = Array.tabulate(x.length + 1, y.length + 1) { (i, j) =>
+ if (i == 0) d * j
+ else if (j == 0) d * i
+ else 0
+ }
+ for (i <- 1 to x.length) {
+ for (j <- 1 to y.length) {
+ val mtch = score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))
+ val delete = score(i - 1)(j) + d
+ val insert = score(i)(j - 1) + d
+ score(i)(j) = mtch max insert max delete
+ }
+ }
+
+ var alignment = List.empty[Patch]
+ var i = x.length
+ var j = y.length
+ while (i > 0 || j > 0) {
+ if (i > 0 && j > 0 && score(i)(j) == score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))) {
+ val newHead =
+ if (x(i - 1) == y(j - 1)) Unmodified(x(i - 1))
+ else Modified(x(i - 1), y(j - 1))
+ alignment = newHead :: alignment
+ i = i - 1
+ j = j - 1
+ } else if (i > 0 && score(i)(j) == score(i - 1)(j) + d) {
+ alignment = Deleted(x(i - 1)) :: alignment
+ i = i - 1
+ } else {
+ alignment = Inserted(y(j - 1)) :: alignment
+ j = j - 1
+ }
+ }
+ builder ++= alignment
+ }
+
}
diff --git a/src/scala/compat/java8/JFunction.java b/src/scala/compat/java8/JFunction.java
index 8e5a77d47..d68805d08 100644
--- a/src/scala/compat/java8/JFunction.java
+++ b/src/scala/compat/java8/JFunction.java
@@ -11,96 +11,183 @@ public final class JFunction {
private JFunction() {}
public static <R> scala.Function0<R> func(JFunction0<R> f) { return f; }
public static scala.Function0<BoxedUnit> proc(JProcedure0 p) { return p; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<BoxedUnit> procSpecialized(JFunction0$mcV$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Byte> funcSpecialized(JFunction0$mcB$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Short> funcSpecialized(JFunction0$mcS$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Integer> funcSpecialized(JFunction0$mcI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Long> funcSpecialized(JFunction0$mcJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Character> funcSpecialized(JFunction0$mcC$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Float> funcSpecialized(JFunction0$mcF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Double> funcSpecialized(JFunction0$mcD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Boolean> funcSpecialized(JFunction0$mcZ$sp f) { return f; }
public static <T1, R> scala.Function1<T1, R> func(JFunction1<T1, R> f) { return f; }
public static <T1> scala.Function1<T1, BoxedUnit> proc(JProcedure1<T1> p) { return p; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, BoxedUnit> procSpecialized(JFunction1$mcVI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Boolean> funcSpecialized(JFunction1$mcZI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Integer> funcSpecialized(JFunction1$mcII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Float> funcSpecialized(JFunction1$mcFI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Long> funcSpecialized(JFunction1$mcJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Double> funcSpecialized(JFunction1$mcDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, BoxedUnit> procSpecialized(JFunction1$mcVJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Boolean> funcSpecialized(JFunction1$mcZJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Integer> funcSpecialized(JFunction1$mcIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Float> funcSpecialized(JFunction1$mcFJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Long> funcSpecialized(JFunction1$mcJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Double> funcSpecialized(JFunction1$mcDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, BoxedUnit> procSpecialized(JFunction1$mcVF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Boolean> funcSpecialized(JFunction1$mcZF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Integer> funcSpecialized(JFunction1$mcIF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Float> funcSpecialized(JFunction1$mcFF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Long> funcSpecialized(JFunction1$mcJF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Double> funcSpecialized(JFunction1$mcDF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, BoxedUnit> procSpecialized(JFunction1$mcVD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Boolean> funcSpecialized(JFunction1$mcZD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Integer> funcSpecialized(JFunction1$mcID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Float> funcSpecialized(JFunction1$mcFD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Long> funcSpecialized(JFunction1$mcJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Double> funcSpecialized(JFunction1$mcDD$sp f) { return f; }
public static <T1, T2, R> scala.Function2<T1, T2, R> func(JFunction2<T1, T2, R> f) { return f; }
public static <T1, T2> scala.Function2<T1, T2, BoxedUnit> proc(JProcedure2<T1, T2> p) { return p; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, BoxedUnit> procSpecialized(JFunction2$mcVII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Boolean> funcSpecialized(JFunction2$mcZII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Integer> funcSpecialized(JFunction2$mcIII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Float> funcSpecialized(JFunction2$mcFII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Long> funcSpecialized(JFunction2$mcJII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Double> funcSpecialized(JFunction2$mcDII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, BoxedUnit> procSpecialized(JFunction2$mcVIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Boolean> funcSpecialized(JFunction2$mcZIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Integer> funcSpecialized(JFunction2$mcIIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Float> funcSpecialized(JFunction2$mcFIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Long> funcSpecialized(JFunction2$mcJIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Double> funcSpecialized(JFunction2$mcDIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, BoxedUnit> procSpecialized(JFunction2$mcVID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Boolean> funcSpecialized(JFunction2$mcZID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Integer> funcSpecialized(JFunction2$mcIID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Float> funcSpecialized(JFunction2$mcFID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Long> funcSpecialized(JFunction2$mcJID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Double> funcSpecialized(JFunction2$mcDID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, BoxedUnit> procSpecialized(JFunction2$mcVJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Boolean> funcSpecialized(JFunction2$mcZJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Integer> funcSpecialized(JFunction2$mcIJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Float> funcSpecialized(JFunction2$mcFJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Long> funcSpecialized(JFunction2$mcJJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Double> funcSpecialized(JFunction2$mcDJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, BoxedUnit> procSpecialized(JFunction2$mcVJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Boolean> funcSpecialized(JFunction2$mcZJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Integer> funcSpecialized(JFunction2$mcIJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Float> funcSpecialized(JFunction2$mcFJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Long> funcSpecialized(JFunction2$mcJJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Double> funcSpecialized(JFunction2$mcDJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, BoxedUnit> procSpecialized(JFunction2$mcVJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Boolean> funcSpecialized(JFunction2$mcZJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Integer> funcSpecialized(JFunction2$mcIJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Float> funcSpecialized(JFunction2$mcFJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Long> funcSpecialized(JFunction2$mcJJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Double> funcSpecialized(JFunction2$mcDJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, BoxedUnit> procSpecialized(JFunction2$mcVDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Boolean> funcSpecialized(JFunction2$mcZDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Integer> funcSpecialized(JFunction2$mcIDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Float> funcSpecialized(JFunction2$mcFDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Long> funcSpecialized(JFunction2$mcJDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Double> funcSpecialized(JFunction2$mcDDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, BoxedUnit> procSpecialized(JFunction2$mcVDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Boolean> funcSpecialized(JFunction2$mcZDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Integer> funcSpecialized(JFunction2$mcIDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Float> funcSpecialized(JFunction2$mcFDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Long> funcSpecialized(JFunction2$mcJDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Double> funcSpecialized(JFunction2$mcDDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, BoxedUnit> procSpecialized(JFunction2$mcVDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Boolean> funcSpecialized(JFunction2$mcZDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Integer> funcSpecialized(JFunction2$mcIDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Float> funcSpecialized(JFunction2$mcFDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Long> funcSpecialized(JFunction2$mcJDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Double> funcSpecialized(JFunction2$mcDDD$sp f) { return f; }
public static <T1, T2, T3, R> scala.Function3<T1, T2, T3, R> func(JFunction3<T1, T2, T3, R> f) { return f; }
public static <T1, T2, T3> scala.Function3<T1, T2, T3, BoxedUnit> proc(JProcedure3<T1, T2, T3> p) { return p; }
diff --git a/src/scala/compat/java8/JFunction1.java b/src/scala/compat/java8/JFunction1.java
index cbd896282..69176cdea 100644
--- a/src/scala/compat/java8/JFunction1.java
+++ b/src/scala/compat/java8/JFunction1.java
@@ -11,229 +11,303 @@ public interface JFunction1<T1, R> extends scala.Function1<T1, R> {
};
@Override
+ @SuppressWarnings("unchecked")
default <A> scala.Function1<T1, A> andThen(scala.Function1<R, A> g) {
return scala.Function1$class.andThen(this, g);
}
@Override
+ @SuppressWarnings("unchecked")
default <A> scala.Function1<A, R> compose(scala.Function1<A, T1> g) {
return scala.Function1$class.compose(this, g);
}
+ @SuppressWarnings("unchecked")
default void apply$mcVI$sp(int v1) {
apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZI$sp(int v1) {
return (Boolean) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default int apply$mcII$sp(int v1) {
return (Integer) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFI$sp(int v1) {
return (Float) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJI$sp(int v1) {
return (Long) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDI$sp(int v1) {
return (Double) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVJ$sp(long v1) {
apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZJ$sp(long v1) {
return (Boolean) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIJ$sp(long v1) {
return (Integer) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFJ$sp(long v1) {
return (Float) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJJ$sp(long v1) {
return (Long) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDJ$sp(long v1) {
return (Double) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVF$sp(float v1) {
apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZF$sp(float v1) {
return (Boolean) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIF$sp(float v1) {
return (Integer) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFF$sp(float v1) {
return (Float) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJF$sp(float v1) {
return (Long) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDF$sp(float v1) {
return (Double) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVD$sp(double v1) {
apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZD$sp(double v1) {
return (Boolean) apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default int apply$mcID$sp(double v1) {
return (Integer) apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFD$sp(double v1) {
return (Float) apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJD$sp(double v1) {
return (Long) apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDD$sp(double v1) {
return (Double) apply((T1) ((Double) v1));
}
-
+
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcVI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcZI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcII$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcFI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcJI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcDI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcVJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcZJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcIJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcFJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcJJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcDJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcVF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcZF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcIF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcFF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcJF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcDF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcVD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcZD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcID$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcFD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcJD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcDD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcVI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcZI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcII$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcFI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcJI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcDI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcVJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcZJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcIJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcFJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcJJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcDJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcVF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcZF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcIF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcFF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcJF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcDF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcVD$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcZD$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcID$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcFD$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcJD$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcDD$sp(scala.Function1 g) {
return andThen(g);
}
diff --git a/src/scala/compat/java8/JFunction10.java b/src/scala/compat/java8/JFunction10.java
index c4d190760..8519ac223 100644
--- a/src/scala/compat/java8/JFunction10.java
+++ b/src/scala/compat/java8/JFunction10.java
@@ -10,10 +10,12 @@ public interface JFunction10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> extends
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, R>>>>>>>>>> curried() {
return scala.Function10$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>, R> tupled() {
return scala.Function10$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction11.java b/src/scala/compat/java8/JFunction11.java
index faa352cf7..032ee40af 100644
--- a/src/scala/compat/java8/JFunction11.java
+++ b/src/scala/compat/java8/JFunction11.java
@@ -10,10 +10,12 @@ public interface JFunction11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> ex
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, R>>>>>>>>>>> curried() {
return scala.Function11$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>, R> tupled() {
return scala.Function11$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction12.java b/src/scala/compat/java8/JFunction12.java
index 1d67e25c9..3f3eff492 100644
--- a/src/scala/compat/java8/JFunction12.java
+++ b/src/scala/compat/java8/JFunction12.java
@@ -10,10 +10,12 @@ public interface JFunction12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, R>>>>>>>>>>>> curried() {
return scala.Function12$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>, R> tupled() {
return scala.Function12$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction13.java b/src/scala/compat/java8/JFunction13.java
index 7872108d1..2bed6ee97 100644
--- a/src/scala/compat/java8/JFunction13.java
+++ b/src/scala/compat/java8/JFunction13.java
@@ -10,10 +10,12 @@ public interface JFunction13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, R>>>>>>>>>>>>> curried() {
return scala.Function13$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>, R> tupled() {
return scala.Function13$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction14.java b/src/scala/compat/java8/JFunction14.java
index 34b45d634..77fac237d 100644
--- a/src/scala/compat/java8/JFunction14.java
+++ b/src/scala/compat/java8/JFunction14.java
@@ -10,10 +10,12 @@ public interface JFunction14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, R>>>>>>>>>>>>>> curried() {
return scala.Function14$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>, R> tupled() {
return scala.Function14$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction15.java b/src/scala/compat/java8/JFunction15.java
index 55e4607fe..25adc1679 100644
--- a/src/scala/compat/java8/JFunction15.java
+++ b/src/scala/compat/java8/JFunction15.java
@@ -10,10 +10,12 @@ public interface JFunction15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, R>>>>>>>>>>>>>>> curried() {
return scala.Function15$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>, R> tupled() {
return scala.Function15$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction16.java b/src/scala/compat/java8/JFunction16.java
index d2795ebf5..fda1ea0c7 100644
--- a/src/scala/compat/java8/JFunction16.java
+++ b/src/scala/compat/java8/JFunction16.java
@@ -10,10 +10,12 @@ public interface JFunction16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, R>>>>>>>>>>>>>>>> curried() {
return scala.Function16$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>, R> tupled() {
return scala.Function16$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction17.java b/src/scala/compat/java8/JFunction17.java
index 9ab7715d1..89cb1b312 100644
--- a/src/scala/compat/java8/JFunction17.java
+++ b/src/scala/compat/java8/JFunction17.java
@@ -10,10 +10,12 @@ public interface JFunction17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, R>>>>>>>>>>>>>>>>> curried() {
return scala.Function17$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>, R> tupled() {
return scala.Function17$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction18.java b/src/scala/compat/java8/JFunction18.java
index fba19eb38..428cee961 100644
--- a/src/scala/compat/java8/JFunction18.java
+++ b/src/scala/compat/java8/JFunction18.java
@@ -10,10 +10,12 @@ public interface JFunction18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, R>>>>>>>>>>>>>>>>>> curried() {
return scala.Function18$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>, R> tupled() {
return scala.Function18$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction19.java b/src/scala/compat/java8/JFunction19.java
index f56551431..95e47df7e 100644
--- a/src/scala/compat/java8/JFunction19.java
+++ b/src/scala/compat/java8/JFunction19.java
@@ -10,10 +10,12 @@ public interface JFunction19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, scala.Function1<T19, R>>>>>>>>>>>>>>>>>>> curried() {
return scala.Function19$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>, R> tupled() {
return scala.Function19$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction2.java b/src/scala/compat/java8/JFunction2.java
index aa023a19f..bad1493bb 100644
--- a/src/scala/compat/java8/JFunction2.java
+++ b/src/scala/compat/java8/JFunction2.java
@@ -10,499 +10,663 @@ public interface JFunction2<T1, T2, R> extends scala.Function2<T1, T2, R> {
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, R>> curried() {
return scala.Function2$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple2<T1, T2>, R> tupled() {
return scala.Function2$class.tupled(this);
}
+ @SuppressWarnings("unchecked")
default void apply$mcVII$sp(int v1, int v2) {
apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZII$sp(int v1, int v2) {
return (Boolean) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIII$sp(int v1, int v2) {
return (Integer) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFII$sp(int v1, int v2) {
return (Float) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJII$sp(int v1, int v2) {
return (Long) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDII$sp(int v1, int v2) {
return (Double) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVIJ$sp(int v1, long v2) {
apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZIJ$sp(int v1, long v2) {
return (Boolean) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIIJ$sp(int v1, long v2) {
return (Integer) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFIJ$sp(int v1, long v2) {
return (Float) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJIJ$sp(int v1, long v2) {
return (Long) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDIJ$sp(int v1, long v2) {
return (Double) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVID$sp(int v1, double v2) {
apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZID$sp(int v1, double v2) {
return (Boolean) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIID$sp(int v1, double v2) {
return (Integer) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFID$sp(int v1, double v2) {
return (Float) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJID$sp(int v1, double v2) {
return (Long) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDID$sp(int v1, double v2) {
return (Double) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVJI$sp(long v1, int v2) {
apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZJI$sp(long v1, int v2) {
return (Boolean) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIJI$sp(long v1, int v2) {
return (Integer) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFJI$sp(long v1, int v2) {
return (Float) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJJI$sp(long v1, int v2) {
return (Long) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDJI$sp(long v1, int v2) {
return (Double) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVJJ$sp(long v1, long v2) {
apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZJJ$sp(long v1, long v2) {
return (Boolean) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIJJ$sp(long v1, long v2) {
return (Integer) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFJJ$sp(long v1, long v2) {
return (Float) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJJJ$sp(long v1, long v2) {
return (Long) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDJJ$sp(long v1, long v2) {
return (Double) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVJD$sp(long v1, double v2) {
apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZJD$sp(long v1, double v2) {
return (Boolean) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIJD$sp(long v1, double v2) {
return (Integer) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFJD$sp(long v1, double v2) {
return (Float) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJJD$sp(long v1, double v2) {
return (Long) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDJD$sp(long v1, double v2) {
return (Double) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVDI$sp(double v1, int v2) {
apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZDI$sp(double v1, int v2) {
return (Boolean) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIDI$sp(double v1, int v2) {
return (Integer) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFDI$sp(double v1, int v2) {
return (Float) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJDI$sp(double v1, int v2) {
return (Long) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDDI$sp(double v1, int v2) {
return (Double) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVDJ$sp(double v1, long v2) {
apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZDJ$sp(double v1, long v2) {
return (Boolean) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIDJ$sp(double v1, long v2) {
return (Integer) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFDJ$sp(double v1, long v2) {
return (Float) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJDJ$sp(double v1, long v2) {
return (Long) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDDJ$sp(double v1, long v2) {
return (Double) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVDD$sp(double v1, double v2) {
apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZDD$sp(double v1, double v2) {
return (Boolean) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIDD$sp(double v1, double v2) {
return (Integer) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFDD$sp(double v1, double v2) {
return (Float) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJDD$sp(double v1, double v2) {
return (Long) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDDD$sp(double v1, double v2) {
return (Double) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDDD$sp() {
return tupled();
}
diff --git a/src/scala/compat/java8/JFunction20.java b/src/scala/compat/java8/JFunction20.java
index dd517c349..ed30b41ef 100644
--- a/src/scala/compat/java8/JFunction20.java
+++ b/src/scala/compat/java8/JFunction20.java
@@ -10,10 +10,12 @@ public interface JFunction20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, scala.Function1<T19, scala.Function1<T20, R>>>>>>>>>>>>>>>>>>>> curried() {
return scala.Function20$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>, R> tupled() {
return scala.Function20$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction21.java b/src/scala/compat/java8/JFunction21.java
index c2e19b30a..aa6da8084 100644
--- a/src/scala/compat/java8/JFunction21.java
+++ b/src/scala/compat/java8/JFunction21.java
@@ -10,10 +10,12 @@ public interface JFunction21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, scala.Function1<T19, scala.Function1<T20, scala.Function1<T21, R>>>>>>>>>>>>>>>>>>>>> curried() {
return scala.Function21$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>, R> tupled() {
return scala.Function21$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction22.java b/src/scala/compat/java8/JFunction22.java
index 76aa230ea..532145157 100644
--- a/src/scala/compat/java8/JFunction22.java
+++ b/src/scala/compat/java8/JFunction22.java
@@ -10,10 +10,12 @@ public interface JFunction22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, scala.Function1<T19, scala.Function1<T20, scala.Function1<T21, scala.Function1<T22, R>>>>>>>>>>>>>>>>>>>>>> curried() {
return scala.Function22$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>, R> tupled() {
return scala.Function22$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction3.java b/src/scala/compat/java8/JFunction3.java
index 938e10a05..731608b2e 100644
--- a/src/scala/compat/java8/JFunction3.java
+++ b/src/scala/compat/java8/JFunction3.java
@@ -10,10 +10,12 @@ public interface JFunction3<T1, T2, T3, R> extends scala.Function3<T1, T2, T3, R
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, R>>> curried() {
return scala.Function3$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple3<T1, T2, T3>, R> tupled() {
return scala.Function3$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction4.java b/src/scala/compat/java8/JFunction4.java
index 08687fbfd..9bff4c835 100644
--- a/src/scala/compat/java8/JFunction4.java
+++ b/src/scala/compat/java8/JFunction4.java
@@ -10,10 +10,12 @@ public interface JFunction4<T1, T2, T3, T4, R> extends scala.Function4<T1, T2, T
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, R>>>> curried() {
return scala.Function4$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple4<T1, T2, T3, T4>, R> tupled() {
return scala.Function4$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction5.java b/src/scala/compat/java8/JFunction5.java
index 84b9b97cb..a5566a5ba 100644
--- a/src/scala/compat/java8/JFunction5.java
+++ b/src/scala/compat/java8/JFunction5.java
@@ -10,10 +10,12 @@ public interface JFunction5<T1, T2, T3, T4, T5, R> extends scala.Function5<T1, T
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, R>>>>> curried() {
return scala.Function5$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple5<T1, T2, T3, T4, T5>, R> tupled() {
return scala.Function5$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction6.java b/src/scala/compat/java8/JFunction6.java
index 957d5470d..b30fb5f47 100644
--- a/src/scala/compat/java8/JFunction6.java
+++ b/src/scala/compat/java8/JFunction6.java
@@ -10,10 +10,12 @@ public interface JFunction6<T1, T2, T3, T4, T5, T6, R> extends scala.Function6<T
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, R>>>>>> curried() {
return scala.Function6$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple6<T1, T2, T3, T4, T5, T6>, R> tupled() {
return scala.Function6$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction7.java b/src/scala/compat/java8/JFunction7.java
index 51352aba6..4a4a92cba 100644
--- a/src/scala/compat/java8/JFunction7.java
+++ b/src/scala/compat/java8/JFunction7.java
@@ -10,10 +10,12 @@ public interface JFunction7<T1, T2, T3, T4, T5, T6, T7, R> extends scala.Functio
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, R>>>>>>> curried() {
return scala.Function7$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple7<T1, T2, T3, T4, T5, T6, T7>, R> tupled() {
return scala.Function7$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction8.java b/src/scala/compat/java8/JFunction8.java
index 5e0a776ff..825236e48 100644
--- a/src/scala/compat/java8/JFunction8.java
+++ b/src/scala/compat/java8/JFunction8.java
@@ -10,10 +10,12 @@ public interface JFunction8<T1, T2, T3, T4, T5, T6, T7, T8, R> extends scala.Fun
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, R>>>>>>>> curried() {
return scala.Function8$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>, R> tupled() {
return scala.Function8$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction9.java b/src/scala/compat/java8/JFunction9.java
index dc9b8e71c..d0e40c316 100644
--- a/src/scala/compat/java8/JFunction9.java
+++ b/src/scala/compat/java8/JFunction9.java
@@ -10,10 +10,12 @@ public interface JFunction9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> extends scala
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, R>>>>>>>>> curried() {
return scala.Function9$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>, R> tupled() {
return scala.Function9$class.tupled(this);
}
diff --git a/test/dotc/build.scala b/test/dotc/build.scala
index 2963b8f1e..a805af7e5 100644
--- a/test/dotc/build.scala
+++ b/test/dotc/build.scala
@@ -26,6 +26,5 @@ object build extends tests {
dotty // build output dir
val p = Runtime.getRuntime.exec(Array("jar", "cf", "dotty.jar", "-C", "out", "."))
p.waitFor()
- p
}
}
diff --git a/test/dotc/tests.scala b/test/dotc/tests.scala
index 31e74fa97..9f95a30c1 100644
--- a/test/dotc/tests.scala
+++ b/test/dotc/tests.scala
@@ -110,6 +110,7 @@ class tests extends CompilerTest {
@Test def rewrites = compileFile(posScala2Dir, "rewrites", "-rewrite" :: scala2mode)
@Test def pos_859 = compileFile(posSpecialDir, "i859", scala2mode)(allowDeepSubtypes)
+ @Test def pos_t8146a = compileFile(posSpecialDir, "t8146a")(allowDeepSubtypes)
@Test def pos_t5545 = {
// compile by hand in two batches, since junit lacks the infrastructure to
diff --git a/test/test/DottyDocParsingTests.scala b/test/test/DottyDocParsingTests.scala
index b09d048da..ed89c6114 100644
--- a/test/test/DottyDocParsingTests.scala
+++ b/test/test/DottyDocParsingTests.scala
@@ -14,7 +14,7 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(c: TypeDef)) =>
- assert(c.rawComment == None, "Should not have a comment, mainly used for exhaustive tests")
+ assert(c.rawComment.map(_.chrs) == None, "Should not have a comment, mainly used for exhaustive tests")
}
}
@@ -29,7 +29,7 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(t @ TypeDef(name, _))) if name.toString == "Class" =>
- checkDocString(t.rawComment, "/** Hello world! */")
+ checkDocString(t.rawComment.map(_.chrs), "/** Hello world! */")
}
}
@@ -44,7 +44,7 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(t @ TypeDef(name, _))) if name.toString == "Class" =>
- checkDocString(t.rawComment, "/** Hello /* multiple open */ world! */")
+ checkDocString(t.rawComment.map(_.chrs), "/** Hello /* multiple open */ world! */")
}
}
@Test def multipleClassesInPackage = {
@@ -62,8 +62,8 @@ class DottyDocParsingTests extends DottyDocTest {
checkCompile("frontend", source) { (_, ctx) =>
ctx.compilationUnit.untpdTree match {
case PackageDef(_, Seq(c1 @ TypeDef(_,_), c2 @ TypeDef(_,_))) => {
- checkDocString(c1.rawComment, "/** Class1 docstring */")
- checkDocString(c2.rawComment, "/** Class2 docstring */")
+ checkDocString(c1.rawComment.map(_.chrs), "/** Class1 docstring */")
+ checkDocString(c2.rawComment.map(_.chrs), "/** Class2 docstring */")
}
}
}
@@ -77,7 +77,7 @@ class DottyDocParsingTests extends DottyDocTest {
""".stripMargin
checkFrontend(source) {
- case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment, "/** Class without package */")
+ case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment.map(_.chrs), "/** Class without package */")
}
}
@@ -85,7 +85,7 @@ class DottyDocParsingTests extends DottyDocTest {
val source = "/** Trait docstring */\ntrait Trait"
checkFrontend(source) {
- case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment, "/** Trait docstring */")
+ case PackageDef(_, Seq(t @ TypeDef(_,_))) => checkDocString(t.rawComment.map(_.chrs), "/** Trait docstring */")
}
}
@@ -101,8 +101,8 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(t1 @ TypeDef(_,_), t2 @ TypeDef(_,_))) => {
- checkDocString(t1.rawComment, "/** Trait1 docstring */")
- checkDocString(t2.rawComment, "/** Trait2 docstring */")
+ checkDocString(t1.rawComment.map(_.chrs), "/** Trait1 docstring */")
+ checkDocString(t2.rawComment.map(_.chrs), "/** Trait2 docstring */")
}
}
}
@@ -127,10 +127,10 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(t1 @ TypeDef(_,_), c2 @ TypeDef(_,_), cc3 @ TypeDef(_,_), _, ac4 @ TypeDef(_,_))) => {
- checkDocString(t1.rawComment, "/** Trait1 docstring */")
- checkDocString(c2.rawComment, "/** Class2 docstring */")
- checkDocString(cc3.rawComment, "/** CaseClass3 docstring */")
- checkDocString(ac4.rawComment, "/** AbstractClass4 docstring */")
+ checkDocString(t1.rawComment.map(_.chrs), "/** Trait1 docstring */")
+ checkDocString(c2.rawComment.map(_.chrs), "/** Class2 docstring */")
+ checkDocString(cc3.rawComment.map(_.chrs), "/** CaseClass3 docstring */")
+ checkDocString(ac4.rawComment.map(_.chrs), "/** AbstractClass4 docstring */")
}
}
}
@@ -147,9 +147,9 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(outer @ TypeDef(_, tpl @ Template(_,_,_,_)))) => {
- checkDocString(outer.rawComment, "/** Outer docstring */")
+ checkDocString(outer.rawComment.map(_.chrs), "/** Outer docstring */")
tpl.body match {
- case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */")
+ case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */")
case _ => assert(false, "Couldn't find inner class")
}
}
@@ -171,10 +171,10 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case PackageDef(_, Seq(o1 @ TypeDef(_, tpl @ Template(_,_,_,_)), o2 @ TypeDef(_,_))) => {
- checkDocString(o1.rawComment, "/** Outer1 docstring */")
- checkDocString(o2.rawComment, "/** Outer2 docstring */")
+ checkDocString(o1.rawComment.map(_.chrs), "/** Outer1 docstring */")
+ checkDocString(o2.rawComment.map(_.chrs), "/** Outer2 docstring */")
tpl.body match {
- case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */")
+ case (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */")
case _ => assert(false, "Couldn't find inner class")
}
}
@@ -196,9 +196,9 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case p @ PackageDef(_, Seq(o1: MemberDef[Untyped], o2: MemberDef[Untyped])) => {
assertEquals(o1.name.toString, "Object1")
- checkDocString(o1.rawComment, "/** Object1 docstring */")
+ checkDocString(o1.rawComment.map(_.chrs), "/** Object1 docstring */")
assertEquals(o2.name.toString, "Object2")
- checkDocString(o2.rawComment, "/** Object2 docstring */")
+ checkDocString(o2.rawComment.map(_.chrs), "/** Object2 docstring */")
}
}
}
@@ -223,12 +223,12 @@ class DottyDocParsingTests extends DottyDocTest {
checkFrontend(source) {
case p @ PackageDef(_, Seq(o1: ModuleDef, o2: ModuleDef)) => {
assert(o1.name.toString == "Object1")
- checkDocString(o1.rawComment, "/** Object1 docstring */")
+ checkDocString(o1.rawComment.map(_.chrs), "/** Object1 docstring */")
assert(o2.name.toString == "Object2")
- checkDocString(o2.rawComment, "/** Object2 docstring */")
+ checkDocString(o2.rawComment.map(_.chrs), "/** Object2 docstring */")
o2.impl.body match {
- case _ :: (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment, "/** Inner docstring */")
+ case _ :: (inner @ TypeDef(_,_)) :: _ => checkDocString(inner.rawComment.map(_.chrs), "/** Inner docstring */")
case _ => assert(false, "Couldn't find inner class")
}
}
@@ -257,14 +257,14 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case PackageDef(_, Seq(p: ModuleDef)) => {
- checkDocString(p.rawComment, "/** Package object docstring */")
+ checkDocString(p.rawComment.map(_.chrs), "/** Package object docstring */")
p.impl.body match {
case (b: TypeDef) :: (t: TypeDef) :: (o: ModuleDef) :: Nil => {
- checkDocString(b.rawComment, "/** Boo docstring */")
- checkDocString(t.rawComment, "/** Trait docstring */")
- checkDocString(o.rawComment, "/** InnerObject docstring */")
- checkDocString(o.impl.body.head.asInstanceOf[TypeDef].rawComment, "/** InnerClass docstring */")
+ checkDocString(b.rawComment.map(_.chrs), "/** Boo docstring */")
+ checkDocString(t.rawComment.map(_.chrs), "/** Trait docstring */")
+ checkDocString(o.rawComment.map(_.chrs), "/** InnerObject docstring */")
+ checkDocString(o.impl.body.head.asInstanceOf[TypeDef].rawComment.map(_.chrs), "/** InnerClass docstring */")
}
case _ => assert(false, "Incorrect structure inside package object")
}
@@ -284,7 +284,7 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case PackageDef(_, Seq(c: TypeDef)) =>
- checkDocString(c.rawComment, "/** Real comment */")
+ checkDocString(c.rawComment.map(_.chrs), "/** Real comment */")
}
}
@@ -303,7 +303,7 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case PackageDef(_, Seq(c: TypeDef)) =>
- checkDocString(c.rawComment, "/** Real comment */")
+ checkDocString(c.rawComment.map(_.chrs), "/** Real comment */")
}
}
@@ -329,9 +329,9 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) => {
o.impl.body match {
case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
- checkDocString(v1.rawComment, "/** val1 */")
- checkDocString(v2.rawComment, "/** val2 */")
- checkDocString(v3.rawComment, "/** val3 */")
+ checkDocString(v1.rawComment.map(_.chrs), "/** val1 */")
+ checkDocString(v2.rawComment.map(_.chrs), "/** val2 */")
+ checkDocString(v3.rawComment.map(_.chrs), "/** val3 */")
}
case _ => assert(false, "Incorrect structure inside object")
}
@@ -361,9 +361,9 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) => {
o.impl.body match {
case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
- checkDocString(v1.rawComment, "/** var1 */")
- checkDocString(v2.rawComment, "/** var2 */")
- checkDocString(v3.rawComment, "/** var3 */")
+ checkDocString(v1.rawComment.map(_.chrs), "/** var1 */")
+ checkDocString(v2.rawComment.map(_.chrs), "/** var2 */")
+ checkDocString(v3.rawComment.map(_.chrs), "/** var3 */")
}
case _ => assert(false, "Incorrect structure inside object")
}
@@ -393,9 +393,9 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) => {
o.impl.body match {
case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
- checkDocString(v1.rawComment, "/** def1 */")
- checkDocString(v2.rawComment, "/** def2 */")
- checkDocString(v3.rawComment, "/** def3 */")
+ checkDocString(v1.rawComment.map(_.chrs), "/** def1 */")
+ checkDocString(v2.rawComment.map(_.chrs), "/** def2 */")
+ checkDocString(v3.rawComment.map(_.chrs), "/** def3 */")
}
case _ => assert(false, "Incorrect structure inside object")
}
@@ -425,9 +425,9 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) => {
o.impl.body match {
case (v1: MemberDef) :: (v2: MemberDef) :: (v3: MemberDef) :: Nil => {
- checkDocString(v1.rawComment, "/** type1 */")
- checkDocString(v2.rawComment, "/** type2 */")
- checkDocString(v3.rawComment, "/** type3 */")
+ checkDocString(v1.rawComment.map(_.chrs), "/** type1 */")
+ checkDocString(v2.rawComment.map(_.chrs), "/** type2 */")
+ checkDocString(v3.rawComment.map(_.chrs), "/** type3 */")
}
case _ => assert(false, "Incorrect structure inside object")
}
@@ -451,7 +451,7 @@ class DottyDocParsingTests extends DottyDocTest {
case PackageDef(_, Seq(o: ModuleDef)) =>
o.impl.body match {
case (foo: MemberDef) :: Nil =>
- expectNoDocString(foo.rawComment)
+ expectNoDocString(foo.rawComment.map(_.chrs))
case _ => assert(false, "Incorrect structure inside object")
}
}
@@ -468,7 +468,7 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case p @ PackageDef(_, Seq(_, c: TypeDef)) =>
- checkDocString(c.rawComment, "/** Class1 */")
+ checkDocString(c.rawComment.map(_.chrs), "/** Class1 */")
}
}
@@ -483,7 +483,7 @@ class DottyDocParsingTests extends DottyDocTest {
import dotty.tools.dotc.ast.untpd._
checkFrontend(source) {
case p @ PackageDef(_, Seq(c: TypeDef)) =>
- checkDocString(c.rawComment, "/** Class1 */")
+ checkDocString(c.rawComment.map(_.chrs), "/** Class1 */")
}
}
} /* End class */
diff --git a/tests/pending/pos/t3999b.scala b/tests/disabled/not-representable/pos/t3999b.scala
index 0f3f7d642..0f3f7d642 100644
--- a/tests/pending/pos/t3999b.scala
+++ b/tests/disabled/not-representable/pos/t3999b.scala
diff --git a/tests/run/t5544/Api_1.scala b/tests/disabled/not-representable/pos/t5544/Api_1.scala
index b4c92864d..30994fa07 100644
--- a/tests/run/t5544/Api_1.scala
+++ b/tests/disabled/not-representable/pos/t5544/Api_1.scala
@@ -1,3 +1,4 @@
+// Uses structural types; therefore not expressible in dotty
import scala.annotation.StaticAnnotation
class ann(val bar: Any) extends StaticAnnotation
diff --git a/tests/run/t5544/Test_2.scala b/tests/disabled/not-representable/pos/t5544/Test_2.scala
index ea9232221..ea9232221 100644
--- a/tests/run/t5544/Test_2.scala
+++ b/tests/disabled/not-representable/pos/t5544/Test_2.scala
diff --git a/tests/pending/pos/t7035.scala b/tests/disabled/not-representable/pos/t7035.scala
index f45bd0a87..b1ce66cc6 100644
--- a/tests/pending/pos/t7035.scala
+++ b/tests/disabled/not-representable/pos/t7035.scala
@@ -1,3 +1,5 @@
+// no longer works because dotty uses name-nased pattern matching for case classes
+
case class Y(final var x: Int, final private var y: String, final val z1: Boolean, final private val z2: Any) {
import Test.{y => someY}
diff --git a/tests/pending/pos/t7228.scala b/tests/disabled/not-representable/pos/t7228.scala
index 5d936f652..525327857 100644
--- a/tests/pending/pos/t7228.scala
+++ b/tests/disabled/not-representable/pos/t7228.scala
@@ -1,3 +1,4 @@
+// no longer works because dotty does not have a concept of weak conformance
object AdaptWithWeaklyConformantType {
implicit class D(d: Double) { def double = d*2 }
diff --git a/tests/pending/pos/t8111.scala b/tests/disabled/not-representable/pos/t8111.scala
index 3f0e766ce..04a8e20de 100644
--- a/tests/pending/pos/t8111.scala
+++ b/tests/disabled/not-representable/pos/t8111.scala
@@ -1,3 +1,4 @@
+// structural types, cannot represent
trait T {
def crashy(ma: Any): Unit = {
diff --git a/tests/pending/run/t2337.scala b/tests/disabled/not-representable/t2337.scala
index edb574cba..9e3b8c555 100644
--- a/tests/pending/run/t2337.scala
+++ b/tests/disabled/not-representable/t2337.scala
@@ -1,4 +1,4 @@
-
+// Failure of autotupling in the presence of overloaded functions.
object Test {
def compare(first: Any, second: Any): Any = {
diff --git a/tests/pending/pos/t5604b/T_1.scala b/tests/disabled/not-testable/t5604b/T_1.scala
index 179dcb10c..179dcb10c 100644
--- a/tests/pending/pos/t5604b/T_1.scala
+++ b/tests/disabled/not-testable/t5604b/T_1.scala
diff --git a/tests/pending/pos/t5604b/T_2.scala b/tests/disabled/not-testable/t5604b/T_2.scala
index 179dcb10c..179dcb10c 100644
--- a/tests/pending/pos/t5604b/T_2.scala
+++ b/tests/disabled/not-testable/t5604b/T_2.scala
diff --git a/tests/pending/pos/t5604b/Test_1.scala b/tests/disabled/not-testable/t5604b/Test_1.scala
index f7c58ebe8..f7c58ebe8 100644
--- a/tests/pending/pos/t5604b/Test_1.scala
+++ b/tests/disabled/not-testable/t5604b/Test_1.scala
diff --git a/tests/pending/pos/t5604b/Test_2.scala b/tests/disabled/not-testable/t5604b/Test_2.scala
index f7c58ebe8..f7c58ebe8 100644
--- a/tests/pending/pos/t5604b/Test_2.scala
+++ b/tests/disabled/not-testable/t5604b/Test_2.scala
diff --git a/tests/pending/pos/t5604b/pack_1.scala b/tests/disabled/not-testable/t5604b/pack_1.scala
index f50d568bf..f50d568bf 100644
--- a/tests/pending/pos/t5604b/pack_1.scala
+++ b/tests/disabled/not-testable/t5604b/pack_1.scala
diff --git a/tests/pending/pos/t5954a/A_1.scala b/tests/disabled/not-testable/t5954a/A_1.scala
index 10ead0b1c..10ead0b1c 100644
--- a/tests/pending/pos/t5954a/A_1.scala
+++ b/tests/disabled/not-testable/t5954a/A_1.scala
diff --git a/tests/pending/pos/t5954a/B_2.scala b/tests/disabled/not-testable/t5954a/B_2.scala
index 10ead0b1c..10ead0b1c 100644
--- a/tests/pending/pos/t5954a/B_2.scala
+++ b/tests/disabled/not-testable/t5954a/B_2.scala
diff --git a/tests/pending/pos/t5954b/A_1.scala b/tests/disabled/not-testable/t5954b/A_1.scala
index 8465e8f8c..8465e8f8c 100644
--- a/tests/pending/pos/t5954b/A_1.scala
+++ b/tests/disabled/not-testable/t5954b/A_1.scala
diff --git a/tests/pending/pos/t5954b/B_2.scala b/tests/disabled/not-testable/t5954b/B_2.scala
index f7e4704b3..f7e4704b3 100644
--- a/tests/pending/pos/t5954b/B_2.scala
+++ b/tests/disabled/not-testable/t5954b/B_2.scala
diff --git a/tests/pending/pos/t5954c/A_1.scala b/tests/disabled/not-testable/t5954c/A_1.scala
index 29ad9547a..29ad9547a 100644
--- a/tests/pending/pos/t5954c/A_1.scala
+++ b/tests/disabled/not-testable/t5954c/A_1.scala
diff --git a/tests/pending/pos/t5954c/B_2.scala b/tests/disabled/not-testable/t5954c/B_2.scala
index 29ad9547a..29ad9547a 100644
--- a/tests/pending/pos/t5954c/B_2.scala
+++ b/tests/disabled/not-testable/t5954c/B_2.scala
diff --git a/tests/pending/pos/t5954d/A_1.scala b/tests/disabled/not-testable/t5954d/A_1.scala
index 8465e8f8c..8465e8f8c 100644
--- a/tests/pending/pos/t5954d/A_1.scala
+++ b/tests/disabled/not-testable/t5954d/A_1.scala
diff --git a/tests/pending/pos/t5954d/B_2.scala b/tests/disabled/not-testable/t5954d/B_2.scala
index a4aa2eb58..a4aa2eb58 100644
--- a/tests/pending/pos/t5954d/B_2.scala
+++ b/tests/disabled/not-testable/t5954d/B_2.scala
diff --git a/tests/pending/pos/t8134/A_1.scala b/tests/disabled/not-testable/t8134/A_1.scala
index 32bce003f..32bce003f 100644
--- a/tests/pending/pos/t8134/A_1.scala
+++ b/tests/disabled/not-testable/t8134/A_1.scala
diff --git a/tests/pending/pos/t8134/B_2.scala b/tests/disabled/not-testable/t8134/B_2.scala
index 32bce003f..32bce003f 100644
--- a/tests/pending/pos/t8134/B_2.scala
+++ b/tests/disabled/not-testable/t8134/B_2.scala
diff --git a/tests/pending/pos/depmet_implicit_oopsla_zipwith.scala b/tests/disabled/structural-type/pos/depmet_implicit_oopsla_zipwith.scala
index 83171f865..83171f865 100644
--- a/tests/pending/pos/depmet_implicit_oopsla_zipwith.scala
+++ b/tests/disabled/structural-type/pos/depmet_implicit_oopsla_zipwith.scala
diff --git a/tests/pending/pos/t8237b.scala b/tests/disabled/typetags/pos/t8237b.scala
index 52bb310e8..52bb310e8 100644
--- a/tests/pending/pos/t8237b.scala
+++ b/tests/disabled/typetags/pos/t8237b.scala
diff --git a/tests/neg/applydynamic_sip.check b/tests/neg/applydynamic_sip.check
new file mode 100644
index 000000000..1bd8304bf
--- /dev/null
+++ b/tests/neg/applydynamic_sip.check
@@ -0,0 +1,52 @@
+tests/neg/applydynamic_sip.scala:8: error: value applyDynamic is not a member of Dynamic(Test.qual)
+possible cause: maybe a wrong Dynamic method signature?
+ qual.sel(a, a2: _*) // error
+ ^
+tests/neg/applydynamic_sip.scala:9: error: applyDynamicNamed does not support passing a vararg parameter
+ qual.sel(arg = a, a2: _*) // error
+ ^
+tests/neg/applydynamic_sip.scala:10: error: applyDynamicNamed does not support passing a vararg parameter
+ qual.sel(arg, arg2 = "a2", a2: _*) // error
+ ^
+tests/neg/applydynamic_sip.scala:20: error: type mismatch:
+ found : String("sel")
+ required: Int
+ bad1.sel // error
+ ^
+tests/neg/applydynamic_sip.scala:21: error: type mismatch:
+ found : String("sel")
+ required: Int
+ bad1.sel(1) // error // error
+ ^
+tests/neg/applydynamic_sip.scala:21: error: method applyDynamic in class Bad1 does not take more parameters
+ bad1.sel(1) // error // error
+ ^
+tests/neg/applydynamic_sip.scala:22: error: type mismatch:
+ found : String("sel")
+ required: Int
+ bad1.sel(a = 1) // error // error
+ ^
+tests/neg/applydynamic_sip.scala:22: error: method applyDynamicNamed in class Bad1 does not take more parameters
+ bad1.sel(a = 1) // error // error
+ ^
+tests/neg/applydynamic_sip.scala:23: error: type mismatch:
+ found : String("sel")
+ required: Int
+ bad1.sel = 1 // error // error
+ ^
+tests/neg/applydynamic_sip.scala:23: error: method updateDynamic in class Bad1 does not take more parameters
+ bad1.sel = 1 // error // error
+ ^
+tests/neg/applydynamic_sip.scala:32: error: method selectDynamic in class Bad2 does not take parameters
+ bad2.sel // error
+ ^
+tests/neg/applydynamic_sip.scala:33: error: method applyDynamic in class Bad2 does not take parameters
+ bad2.sel(1) // error
+ ^
+tests/neg/applydynamic_sip.scala:34: error: method applyDynamicNamed in class Bad2 does not take parameters
+ bad2.sel(a = 1) // error
+ ^
+tests/neg/applydynamic_sip.scala:35: error: method updateDynamic in class Bad2 does not take parameters
+ bad2.sel = 1 // error
+ ^
+14 errors found
diff --git a/tests/untried/neg/applydynamic_sip.flags b/tests/neg/applydynamic_sip.flags
index 1141f9750..1141f9750 100644
--- a/tests/untried/neg/applydynamic_sip.flags
+++ b/tests/neg/applydynamic_sip.flags
diff --git a/tests/neg/applydynamic_sip.scala b/tests/neg/applydynamic_sip.scala
new file mode 100644
index 000000000..7b131e7ff
--- /dev/null
+++ b/tests/neg/applydynamic_sip.scala
@@ -0,0 +1,36 @@
+import scala.language.dynamics
+object Test extends App {
+ val qual: Dynamic = ???
+ val expr = "expr"
+ val a = "a"
+ val a2 = "a2"
+
+ qual.sel(a, a2: _*) // error
+ qual.sel(arg = a, a2: _*) // error
+ qual.sel(arg, arg2 = "a2", a2: _*) // error
+
+ class Bad1 extends Dynamic {
+ def selectDynamic(n: Int) = n
+ def applyDynamic(n: Int) = n
+ def applyDynamicNamed(n: Int) = n
+ def updateDynamic(n: Int) = n
+
+ }
+ val bad1 = new Bad1
+ bad1.sel // error
+ bad1.sel(1) // error // error
+ bad1.sel(a = 1) // error // error
+ bad1.sel = 1 // error // error
+
+ class Bad2 extends Dynamic {
+ def selectDynamic = 1
+ def applyDynamic = 1
+ def applyDynamicNamed = 1
+ def updateDynamic = 1
+ }
+ val bad2 = new Bad2
+ bad2.sel // error
+ bad2.sel(1) // error
+ bad2.sel(a = 1) // error
+ bad2.sel = 1 // error
+}
diff --git a/tests/neg/emptyCatch.scala b/tests/neg/emptyCatch.scala
new file mode 100644
index 000000000..60951d27a
--- /dev/null
+++ b/tests/neg/emptyCatch.scala
@@ -0,0 +1,3 @@
+object Test {
+ try {} catch {} // error: `catch` block does not contain a valid expression, try adding a case like - `case e: Exception =>` to the block
+}
diff --git a/tests/untried/neg/t6355b.check b/tests/neg/t6355b.check
index f827f07e5..fb73b9c42 100644
--- a/tests/untried/neg/t6355b.check
+++ b/tests/neg/t6355b.check
@@ -1,11 +1,11 @@
t6355b.scala:14: error: value applyDynamic is not a member of A
error after rewriting to x.<applyDynamic: error>("bippy")
possible cause: maybe a wrong Dynamic method signature?
- println(x.bippy(42))
+ println(x.bippy(42)) // error
^
t6355b.scala:15: error: value applyDynamic is not a member of A
error after rewriting to x.<applyDynamic: error>("bippy")
possible cause: maybe a wrong Dynamic method signature?
- println(x.bippy("42"))
+ println(x.bippy("42")) // error
^
two errors found
diff --git a/tests/untried/neg/t6355b.scala b/tests/neg/t6355b.scala
index 5f3c97cb0..bba3c4fdc 100644
--- a/tests/untried/neg/t6355b.scala
+++ b/tests/neg/t6355b.scala
@@ -11,7 +11,7 @@ class B(method: String) {
object Test {
def main(args: Array[String]): Unit = {
val x = new A
- println(x.bippy(42))
- println(x.bippy("42"))
+ println(x.bippy(42)) // error
+ println(x.bippy("42")) // error
}
}
diff --git a/tests/untried/neg/t6663.check b/tests/neg/t6663.check
index aa4faa4a4..aa4faa4a4 100644
--- a/tests/untried/neg/t6663.check
+++ b/tests/neg/t6663.check
diff --git a/tests/untried/neg/t6663.scala b/tests/neg/t6663.scala
index 4a358dfbc..aa4ab08ed 100644
--- a/tests/untried/neg/t6663.scala
+++ b/tests/neg/t6663.scala
@@ -13,7 +13,7 @@ object Test extends App {
// but, before fixing SI-6663, became
// C(42).selectDynamic("foo").get, ignoring
// the [String] type parameter
- var v = new C(42).foo[String].get :Int
+ var v = new C(42).foo[String].get :Int // error
println(v)
}
diff --git a/tests/untried/neg/t6920.check b/tests/neg/t6920.check
index ee4eafb83..8bfd16a5f 100644
--- a/tests/untried/neg/t6920.check
+++ b/tests/neg/t6920.check
@@ -1,6 +1,6 @@
t6920.scala:9: error: too many arguments for method applyDynamicNamed: (values: Seq[(String, Any)])String
error after rewriting to CompilerError.this.test.applyDynamicNamed("crushTheCompiler")(scala.Tuple2("a", 1), scala.Tuple2("b", 2))
possible cause: maybe a wrong Dynamic method signature?
- test.crushTheCompiler(a = 1, b = 2)
+ test.crushTheCompiler(a = 1, b = 2) // error
^
one error found
diff --git a/tests/untried/neg/t6920.scala b/tests/neg/t6920.scala
index 25dc7b3b6..9601ed8d2 100644
--- a/tests/untried/neg/t6920.scala
+++ b/tests/neg/t6920.scala
@@ -6,5 +6,5 @@ class DynTest extends Dynamic {
class CompilerError {
val test = new DynTest
- test.crushTheCompiler(a = 1, b = 2)
+ test.crushTheCompiler(a = 1, b = 2) // error
}
diff --git a/tests/pending/pos/t7239.scala b/tests/neg/t7239.scala
index 16e9d00f1..f3a379b4e 100644
--- a/tests/pending/pos/t7239.scala
+++ b/tests/neg/t7239.scala
@@ -1,3 +1,5 @@
+// Dotty rewrites only withFilter calls occurring in for expressions to filter calls.
+// So this test does not compile.
object Test {
def BrokenMethod(): HasFilter[(Int, String)] = ???
@@ -15,12 +17,12 @@ object Test {
(implicit F0: NoImplicit): HasWithFilter = ???
}
- BrokenMethod().withFilter(_ => true) // okay
- BrokenMethod().filter(_ => true) // okay
+ BrokenMethod().withFilter(_ => true) // error
+ BrokenMethod().filter(_ => true) // ok
locally {
import addWithFilter._
- BrokenMethod().withFilter((_: (Int, String)) => true) // okay
+ BrokenMethod().withFilter((_: (Int, String)) => true) // error
}
locally {
@@ -33,6 +35,6 @@ object Test {
// `(B => Boolean)`. Only later during pickling does the
// defensive check for erroneous types in the tree pick up
// the problem.
- BrokenMethod().withFilter(x => true) // erroneous or inaccessible type.
+ BrokenMethod().withFilter(x => true) // error
}
}
diff --git a/tests/pending/pos/t8002-nested-scope.scala b/tests/neg/t8002-nested-scope.scala
index a2088bce7..78a03ce66 100644
--- a/tests/pending/pos/t8002-nested-scope.scala
+++ b/tests/neg/t8002-nested-scope.scala
@@ -13,7 +13,7 @@ class C {
{
val a = 0
object C {
- new C().x
+ new C().x // error: cannot be accessed
}
}
}
diff --git a/tests/untried/neg/t8006.check b/tests/neg/t8006.check
index fbac26e3a..98207ba30 100644
--- a/tests/untried/neg/t8006.check
+++ b/tests/neg/t8006.check
@@ -1,6 +1,6 @@
t8006.scala:3: error: too many arguments for method applyDynamicNamed: (value: (String, Any))String
error after rewriting to X.this.d.applyDynamicNamed("meth")(scala.Tuple2("value1", 10), scala.Tuple2("value2", 100))
possible cause: maybe a wrong Dynamic method signature?
- d.meth(value1 = 10, value2 = 100) // two arguments here, but only one is allowed
+ d.meth(value1 = 10, value2 = 100) // error: two arguments here, but only one is allowed
^
one error found
diff --git a/tests/untried/neg/t8006.scala b/tests/neg/t8006.scala
index 8dc60697d..34946a659 100644
--- a/tests/untried/neg/t8006.scala
+++ b/tests/neg/t8006.scala
@@ -1,6 +1,6 @@
object X {
val d = new D
- d.meth(value1 = 10, value2 = 100) // two arguments here, but only one is allowed
+ d.meth(value1 = 10, value2 = 100) // error: two arguments here, but only one is allowed
}
import language.dynamics
class D extends Dynamic {
diff --git a/tests/patmat/NonAbstractSealed.check b/tests/patmat/NonAbstractSealed.check
new file mode 100644
index 000000000..9224ee370
--- /dev/null
+++ b/tests/patmat/NonAbstractSealed.check
@@ -0,0 +1,5 @@
+./tests/patmat/NonAbstractSealed.scala:6: warning: match may not be exhaustive.
+It would fail on the following input: _: A
+ (null: A) match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/NonAbstractSealed.scala b/tests/patmat/NonAbstractSealed.scala
new file mode 100644
index 000000000..ff2e90aee
--- /dev/null
+++ b/tests/patmat/NonAbstractSealed.scala
@@ -0,0 +1,10 @@
+sealed class A
+class B extends A
+class C extends A
+
+object Test {
+ (null: A) match {
+ case t: B =>
+ case t: C =>
+ }
+}
diff --git a/tests/patmat/TwoTrait.scala b/tests/patmat/TwoTrait.scala
new file mode 100644
index 000000000..b8e3402c5
--- /dev/null
+++ b/tests/patmat/TwoTrait.scala
@@ -0,0 +1,12 @@
+object Test {
+ sealed trait A
+ sealed trait B
+
+ abstract sealed class Parent
+ class Foo extends Parent with A with B
+ class Bar extends Parent with B with A
+
+ (null: A) match {
+ case _: B =>
+ }
+}
diff --git a/tests/patmat/aladdin1055/A.scala b/tests/patmat/aladdin1055/A.scala
new file mode 100644
index 000000000..862336e30
--- /dev/null
+++ b/tests/patmat/aladdin1055/A.scala
@@ -0,0 +1,6 @@
+object A {
+ sealed trait T { def f: Int }
+ class TT extends T { def f = 0 }
+
+ def foo = new T { def f = 1 } // local subclass of sealed trait T
+}
diff --git a/tests/patmat/aladdin1055/Test_1.scala.ignore b/tests/patmat/aladdin1055/Test_1.scala.ignore
new file mode 100644
index 000000000..39d9b1dc9
--- /dev/null
+++ b/tests/patmat/aladdin1055/Test_1.scala.ignore
@@ -0,0 +1,5 @@
+object Test {
+ def foo(t: A.T) = t match {
+ case a: A.TT => 0
+ }
+}
diff --git a/tests/patmat/aladdin1055/expected.check.ignore b/tests/patmat/aladdin1055/expected.check.ignore
new file mode 100644
index 000000000..a8024ad02
--- /dev/null
+++ b/tests/patmat/aladdin1055/expected.check.ignore
@@ -0,0 +1,5 @@
+./tests/patmat/aladdin1055/Test_1.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: (_ : this.<local child>)
+ def foo(t: A.T) = t match {
+ ^
+one warning found
diff --git a/tests/patmat/enum/Day.java b/tests/patmat/enum/Day.java
new file mode 100644
index 000000000..eedb9a72b
--- /dev/null
+++ b/tests/patmat/enum/Day.java
@@ -0,0 +1,4 @@
+public enum Day {
+ SUNDAY, MONDAY, TUESDAY, WEDNESDAY,
+ THURSDAY, FRIDAY, SATURDAY
+} \ No newline at end of file
diff --git a/tests/patmat/enum/expected.check b/tests/patmat/enum/expected.check
new file mode 100644
index 000000000..b3dafa8bd
--- /dev/null
+++ b/tests/patmat/enum/expected.check
@@ -0,0 +1,9 @@
+./tests/patmat/enum/patmat-enum.scala:4: warning: match may not be exhaustive.
+It would fail on the following input: SATURDAY, FRIDAY, THURSDAY, SUNDAY
+ day match {
+ ^
+./tests/patmat/enum/patmat-enum.scala:15: warning: match may not be exhaustive.
+It would fail on the following input: SATURDAY, FRIDAY, THURSDAY
+ day match {
+ ^
+two warnings found \ No newline at end of file
diff --git a/tests/patmat/enum/patmat-enum.scala b/tests/patmat/enum/patmat-enum.scala
new file mode 100644
index 000000000..ec5c90255
--- /dev/null
+++ b/tests/patmat/enum/patmat-enum.scala
@@ -0,0 +1,21 @@
+object Test1 {
+ val day: Day = ???
+
+ day match {
+ case Day.MONDAY => true
+ case Day.TUESDAY => true
+ case Day.WEDNESDAY => true
+ }
+}
+
+object Test2 {
+ import Day._
+ val day: Day = ???
+
+ day match {
+ case MONDAY => true
+ case TUESDAY => true
+ case WEDNESDAY => true
+ case SUNDAY => true
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/exhausting.check b/tests/patmat/exhausting.check
new file mode 100644
index 000000000..790b12334
--- /dev/null
+++ b/tests/patmat/exhausting.check
@@ -0,0 +1,25 @@
+./tests/patmat/exhausting.scala:21: warning: match may not be exhaustive.
+It would fail on the following input: List(_), List(_, _, _)
+ def fail1[T](xs: List[T]) = xs match {
+ ^
+./tests/patmat/exhausting.scala:27: warning: match may not be exhaustive.
+It would fail on the following input: Nil
+ def fail2[T](xs: List[T]) = xs match {
+ ^
+./tests/patmat/exhausting.scala:32: warning: match may not be exhaustive.
+It would fail on the following input: List(_, _)
+ def fail3a(xs: List[Int]) = xs match {
+ ^
+./tests/patmat/exhausting.scala:39: warning: match may not be exhaustive.
+It would fail on the following input: Bar3
+ def fail3[T](x: Foo[T]) = x match {
+ ^
+./tests/patmat/exhausting.scala:44: warning: match may not be exhaustive.
+It would fail on the following input: (Bar2, Bar2)
+ def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match {
+ ^
+./tests/patmat/exhausting.scala:53: warning: match may not be exhaustive.
+It would fail on the following input: (Bar2, Bar2), (Bar2, Bar1), (Bar1, Bar3), (Bar1, Bar2)
+ def fail5[T](xx: (Foo[T], Foo[T])) = xx match {
+ ^
+6 warnings found
diff --git a/tests/patmat/exhausting.scala b/tests/patmat/exhausting.scala
new file mode 100644
index 000000000..03e8198dd
--- /dev/null
+++ b/tests/patmat/exhausting.scala
@@ -0,0 +1,58 @@
+object Test {
+ sealed abstract class Foo[T]
+ case object Bar1 extends Foo[Int]
+ case object Bar2 extends Foo[String]
+ case object Bar3 extends Foo[Any]
+
+ def ex1[T](xs: List[T]) = xs match {
+ case ys: List[_] => "ok"
+ }
+ def ex2[T](xx: (Foo[T], Foo[T])) = xx match {
+ case (Bar1, Bar1) => ()
+ case (_, Bar1) => ()
+ case (_, Bar3) => ()
+ case (_, Bar2) => ()
+ }
+ def ex3[T](xx: (Foo[T], Foo[T])) = xx match {
+ case (_: Foo[_], _: Foo[_]) => ()
+ }
+
+ // fails for: ::(_, Nil), ::(_, ::(_, ::(_, _))), ...
+ def fail1[T](xs: List[T]) = xs match {
+ case Nil => "ok"
+ case x :: y :: Nil => "ok"
+ }
+
+ // fails for: Nil
+ def fail2[T](xs: List[T]) = xs match {
+ case _ :: _ => "ok"
+ }
+
+ // fails for: ::(<not in (2, 1)>, _)
+ def fail3a(xs: List[Int]) = xs match {
+ case 1 :: _ =>
+ case 2 :: _ =>
+ case Nil =>
+ }
+
+ // fails for: Bar3
+ def fail3[T](x: Foo[T]) = x match {
+ case Bar1 => "ok"
+ case Bar2 => "ok"
+ }
+ // fails for: (Bar2, Bar2)
+ def fail4[T <: AnyRef](xx: (Foo[T], Foo[T])) = xx match {
+ case (Bar1, Bar1) => ()
+ case (Bar2, Bar3) => ()
+ case (Bar3, _) => ()
+ }
+ // fails for: (Bar1, Bar2)
+ // fails for: (Bar1, Bar3)
+ // fails for: (Bar2, Bar1)
+ // fails for: (Bar2, Bar2)
+ def fail5[T](xx: (Foo[T], Foo[T])) = xx match {
+ case (Bar1, Bar1) => ()
+ case (Bar2, Bar3) => ()
+ case (Bar3, _) => ()
+ }
+}
diff --git a/tests/patmat/exhaustive_heuristics.scala b/tests/patmat/exhaustive_heuristics.scala
new file mode 100644
index 000000000..7d682f6aa
--- /dev/null
+++ b/tests/patmat/exhaustive_heuristics.scala
@@ -0,0 +1,26 @@
+// tests exhaustivity doesn't give warnings (due to its heuristic rewrites kicking in or it backing off)
+object Test {
+ // List() => Nil
+ List(1) match {
+ case List() =>
+ case x :: xs =>
+ }
+
+ // we don't look into guards
+ val turnOffChecks = true
+ List(1) match {
+ case _ if turnOffChecks =>
+ }
+
+ // we back off when there are any user-defined extractors
+ // in fact this is exhaustive, but we pretend we don't know since List's unapplySeq is not special to the compiler
+ // to compensate our ignorance, we back off
+ // well, in truth, we do rewrite List() to Nil, but otherwise we do nothing
+ // the full rewrite List(a, b) to a :: b :: Nil, for example is planned (but not sure it's a good idea)
+ List(true, false) match {
+ case List(_, _, _:_*) =>
+ case List(node, _:_*) =>
+ case Nil =>
+ }
+
+} \ No newline at end of file
diff --git a/tests/patmat/for.scala b/tests/patmat/for.scala
new file mode 100644
index 000000000..ae9dcf65e
--- /dev/null
+++ b/tests/patmat/for.scala
@@ -0,0 +1,5 @@
+object Test {
+ def foo[A, B](l: List[(A, B)]): List[A] = {
+ for ((a, b) <- l) yield a
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/gadt.check b/tests/patmat/gadt.check
new file mode 100644
index 000000000..f2154fa60
--- /dev/null
+++ b/tests/patmat/gadt.check
@@ -0,0 +1,17 @@
+./tests/patmat/gadt.scala:13: warning: match may not be exhaustive.
+It would fail on the following input: IntLit(_)
+ def foo1b(x: Expr[Int]) = x match {
+ ^
+./tests/patmat/gadt.scala:22: warning: match may not be exhaustive.
+It would fail on the following input: Or(_, _)
+ def foo2b(x: Expr[Boolean]) = x match {
+ ^
+./tests/patmat/gadt.scala:45: warning: match may not be exhaustive.
+It would fail on the following input: BooleanLit(_), IntLit(_)
+ def foo4b(x: Expr[_]) = x match {
+ ^
+./tests/patmat/gadt.scala:55: warning: match may not be exhaustive.
+It would fail on the following input: Sum(_, _)
+ def foo5b[T <: Int](x: Expr[T]) = x match {
+ ^
+four warnings found \ No newline at end of file
diff --git a/tests/patmat/gadt.scala b/tests/patmat/gadt.scala
new file mode 100644
index 000000000..0541ed61f
--- /dev/null
+++ b/tests/patmat/gadt.scala
@@ -0,0 +1,58 @@
+object Test {
+ sealed trait Expr[T]
+ case class IntLit(i: Int) extends Expr[Int]
+ case class BooleanLit(b: Boolean) extends Expr[Boolean]
+ case class Sum(l: Expr[Int], r: Expr[Int]) extends Expr[Int]
+ case class Or(l: Expr[Boolean], r: Expr[Boolean]) extends Expr[Boolean]
+
+ def foo1a(x: Expr[Int]) = x match {
+ case _: IntLit => true
+ case _: Sum => true
+ }
+
+ def foo1b(x: Expr[Int]) = x match {
+ case _: Sum => true
+ }
+
+ def foo2a(x: Expr[Boolean]) = x match {
+ case _: BooleanLit => true
+ case _: Or => true
+ }
+
+ def foo2b(x: Expr[Boolean]) = x match {
+ case _: BooleanLit => true
+ }
+
+ def foo3a(x: Expr[Boolean]) = x match {
+ case _: BooleanLit => true
+ case _: Or => true
+ // case _: Sum => true
+ }
+
+ def foo3b(x: Expr[Int]) = x match {
+ case _: IntLit => true
+ case _: Sum => true
+ // case _: Or => true
+ }
+
+ def foo4a(x: Expr[_]) = x match {
+ case _: IntLit => true
+ case _: Sum => true
+ case _: BooleanLit => true
+ case _: Or => true
+ }
+
+ def foo4b(x: Expr[_]) = x match {
+ case _: Sum => true
+ case _: Or => true
+ }
+
+ def foo5a[T <: Int](x: Expr[T]) = x match {
+ case _: IntLit => true
+ case _: Sum => true
+ }
+
+ def foo5b[T <: Int](x: Expr[T]) = x match {
+ case _: IntLit => true
+ }
+}
diff --git a/tests/patmat/gadt2.scala.ignore b/tests/patmat/gadt2.scala.ignore
new file mode 100644
index 000000000..80ba72c70
--- /dev/null
+++ b/tests/patmat/gadt2.scala.ignore
@@ -0,0 +1,14 @@
+sealed trait Nat[+T]
+case class Zero() extends Nat[Nothing]
+case class Succ[T]() extends Nat[T]
+
+sealed trait Vect[+N <: Nat[_], +T]
+case class VN[T]() extends Vect[Zero, T]
+case class VC[T, N <: Nat[_]](x: T, xs: Vect[N, T]) extends Vect[Succ[N], T]
+
+object Test {
+ def foo[N <: Nat[_], A, B](v1: Vect[N, A], v2: Vect[N, B]) = (v1, v2) match {
+ case (VN(), VN()) => 1
+ case (VC(x, xs), VC(y, ys)) => 2
+ }
+}
diff --git a/tests/patmat/gadt3.scala.ignore b/tests/patmat/gadt3.scala.ignore
new file mode 100644
index 000000000..c39416414
--- /dev/null
+++ b/tests/patmat/gadt3.scala.ignore
@@ -0,0 +1,10 @@
+sealed trait Expr[T]
+case class IntExpr(x: Int) extends Expr[Int]
+case class BooleanExpr(b: Boolean) extends Expr[Boolean]
+
+object Test {
+ def foo[T](x: Expr[T], y: Expr[T]) = (x, y) match {
+ case (IntExpr(_), IntExpr(_)) =>
+ case (BooleanExpr(_), BooleanExpr(_)) =>
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/i947.check b/tests/patmat/i947.check
new file mode 100644
index 000000000..5cce559c4
--- /dev/null
+++ b/tests/patmat/i947.check
@@ -0,0 +1,4 @@
+./tests/patmat/i947.scala:10: warning: unreachable code
+ case ys: List[d18383] => false
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/i947.scala b/tests/patmat/i947.scala
new file mode 100644
index 000000000..0f2d9e775
--- /dev/null
+++ b/tests/patmat/i947.scala
@@ -0,0 +1,16 @@
+object Test {
+
+ class c {
+
+ private var x: Int = 0
+
+ override def equals(other: Any) = other match {
+ case o: c => x == o.x
+ case xs: List[c] => false
+ case ys: List[d18383] => false
+ case _ => false
+ }
+
+
+ }
+}
diff --git a/tests/patmat/outer-ref-checks.scala b/tests/patmat/outer-ref-checks.scala
new file mode 100644
index 000000000..35983fe92
--- /dev/null
+++ b/tests/patmat/outer-ref-checks.scala
@@ -0,0 +1,106 @@
+import scala.annotation.unchecked.uncheckedVariance
+
+class Outer {
+ // A final class gets no outer ref, so we expect to see warnings where an outer ref check should be performed
+ final case class Inner(val s: String) // unchecked warning
+
+ def belongs(a: Any): Unit = a match {
+ case Inner(s) => // unchecked warning
+ case _ =>
+ }
+
+ def belongsStaticSameOuter(a: Inner): Unit = a match {
+ case Inner(s) => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+
+ def belongsOtherOuter(a: Outer#Inner): Unit = a match {
+ case Inner(s) => // unchecked warning
+ case O.Inner(s) => // unchecked warning
+ case _ =>
+ }
+}
+
+object O extends Outer {
+ def belongsStaticSameOuter2(a: Inner): Unit = a match {
+ case Inner(s) => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+
+ def belongsStaticSameOuter3(a: Inner): Unit = a match {
+ case _: Inner => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+
+ def belongsStaticSameOuter4(a: Inner): Unit = a match {
+ case _: (Inner @uncheckedVariance) => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+
+ def belongsOtherOuter2(a: Outer#Inner): Unit = a match {
+ case Inner(s) => // unchecked warning
+ case _ =>
+ }
+
+ def belongsOtherOuter3(a: Outer#Inner): Unit = a match {
+ case _: Inner => // unchecked warning
+ case _ =>
+ }
+
+ def belongsOtherOuter4(a: Outer#Inner): Unit = a match {
+ case _: (Inner @unchecked) => // warning supressed
+ case _ =>
+ }
+
+ def belongsOtherOuter5(a: Outer#Inner): Unit = a match {
+ case _: (Inner @uncheckedVariance) => // unchecked warning
+ case _ =>
+ }
+
+ def nested: Unit = {
+ final case class I(s: String)
+
+ def check1(a: Any): Unit = a match {
+ case I(s) => // no need for outer check
+ case _ =>
+ }
+
+ def check2(a: I): Unit = a match {
+ case I(s) => // no need for outer check
+ // match is exhaustive, no default case needed
+ }
+ }
+}
+
+class O2 {
+ def nested: Unit = {
+ final case class I(s: String)
+
+ def check1(a: Any): Unit = a match {
+ case I(s) => // no need for outer check (is this correct?)
+ case _ =>
+ }
+
+ def check2(a: I): Unit = a match {
+ case I(s) => // no need for outer check (is this correct?)
+ // match is exhaustive, no default case needed
+ }
+ }
+}
+
+package p {
+ object T {
+ case class C(x: Int)
+ }
+}
+
+object U {
+ val T = p.T
+}
+
+class Test {
+ def m(a: Any) = a match {
+ case U.T.C(1) => 1 // used to warn
+ case _ => 1
+ }
+}
diff --git a/tests/patmat/partial-function.scala b/tests/patmat/partial-function.scala
new file mode 100644
index 000000000..f168489da
--- /dev/null
+++ b/tests/patmat/partial-function.scala
@@ -0,0 +1,12 @@
+sealed abstract class TA
+sealed abstract class TB extends TA
+case object B extends TB
+case object B2 extends TB
+
+case class CC(i: Int, tb: TB)
+
+object Test {
+ def foo: PartialFunction[CC, Unit] = {
+ case CC(_, B) => ()
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/patmat-adt.check b/tests/patmat/patmat-adt.check
new file mode 100644
index 000000000..f4e1ce369
--- /dev/null
+++ b/tests/patmat/patmat-adt.check
@@ -0,0 +1,21 @@
+./tests/patmat/patmat-adt.scala:7: warning: match may not be exhaustive.
+It would fail on the following input: Bad(Good(_)), Good(Bad(_))
+ def foo1a(x: Odd) = x match { // warning: Good(_: Bad), Bad(_: Good)
+ ^
+./tests/patmat/patmat-adt.scala:19: warning: match may not be exhaustive.
+It would fail on the following input: Some(_)
+ def foo2(x: Option[Int]) = x match { // warning: Some(_: Int)
+ ^
+./tests/patmat/patmat-adt.scala:24: warning: match may not be exhaustive.
+It would fail on the following input: (None, Some(_)), (_, Some(_))
+ def foo3a[T](x: Option[T]) = (x, x) match { // warning: (Some(_), Some(_)), (None, Some(_))
+ ^
+./tests/patmat/patmat-adt.scala:29: warning: match may not be exhaustive.
+It would fail on the following input: (None, None), (Some(_), Some(_))
+ def foo3b[T](x: Option[T]) = (x, x) match { // warning: (Some(_), Some(_)), (None, None)
+ ^
+./tests/patmat/patmat-adt.scala:50: warning: match may not be exhaustive.
+It would fail on the following input: LetL(BooleanLit), LetL(IntLit)
+ def foo5(tree: Tree) : Any = tree match {
+ ^
+5 warnings found \ No newline at end of file
diff --git a/tests/patmat/patmat-adt.scala b/tests/patmat/patmat-adt.scala
new file mode 100644
index 000000000..e7eac4e4a
--- /dev/null
+++ b/tests/patmat/patmat-adt.scala
@@ -0,0 +1,58 @@
+object PatmatADT {
+ abstract sealed class Odd(x: Odd)
+
+ case class Good(x: Odd) extends Odd(x)
+ case class Bad(x: Odd) extends Odd(x)
+
+ def foo1a(x: Odd) = x match { // warning: Good(_: Bad), Bad(_: Good)
+ case Good(_: Good) => false
+ case Bad(_: Bad) => false
+ }
+
+ def foo1b(x: Odd) = x match {
+ case Good(_: Good) => false
+ case Bad(_: Bad) => false
+ case Good(_: Bad) => false
+ case Bad(_: Good) => false
+ }
+
+ def foo2(x: Option[Int]) = x match { // warning: Some(_: Int)
+ case Some(_: Double) => true
+ case None => true
+ }
+
+ def foo3a[T](x: Option[T]) = (x, x) match { // warning: (Some(_), Some(_)), (None, Some(_))
+ case (Some(_), None) => true
+ case (None, None) => true
+ }
+
+ def foo3b[T](x: Option[T]) = (x, x) match { // warning: (Some(_), Some(_)), (None, None)
+ case (Some(_), None) => true
+ case (None, Some(_)) => true
+ }
+
+ sealed trait Base
+ case class Foo() extends Base
+
+ def foo4(x: Base) = x match {
+ case Foo() =>
+ }
+
+ sealed abstract class CL3Literal
+ case object IntLit extends CL3Literal
+ case object CharLit extends CL3Literal
+ case object BooleanLit extends CL3Literal
+
+
+ sealed abstract class Tree
+ case class LetL(value: CL3Literal) extends Tree
+
+ def foo5(tree: Tree) : Any = tree match {
+ case LetL(CharLit) =>
+ }
+
+ def foo6[T](l: List[T]): Boolean = l match {
+ case x::xs => true
+ case Nil => false
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/patmat-extractor.scala b/tests/patmat/patmat-extractor.scala
new file mode 100644
index 000000000..02fde96dc
--- /dev/null
+++ b/tests/patmat/patmat-extractor.scala
@@ -0,0 +1,17 @@
+sealed trait Node
+case class NodeA(i: Int) extends Node
+case class NodeB(b: Boolean) extends Node
+case class NodeC(s: String) extends Node
+
+object Node {
+ def unapply(node: Node): Option[(Node, Node)] = ???
+}
+
+// currently scalac can't do anything with following
+// it's possible to do better in our case
+object Test {
+ def foo(x: Node): Boolean = x match { // unexhaustive
+ case Node(NodeA(_), NodeB(_)) => true
+ case Node(NodeA(4), NodeB(false)) => true // unreachable code
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/patmat-indent.check b/tests/patmat/patmat-indent.check
new file mode 100644
index 000000000..3a76e0a95
--- /dev/null
+++ b/tests/patmat/patmat-indent.check
@@ -0,0 +1,13 @@
+./tests/patmat/patmat-indent.scala:9: warning: match may not be exhaustive.
+It would fail on the following input: Nil
+ def foo1a[T](l: List[T]) = l match {
+ ^
+./tests/patmat/patmat-indent.scala:23: warning: match may not be exhaustive.
+It would fail on the following input: _: Boolean
+ def foo2(b: Boolean) = b match {
+ ^
+./tests/patmat/patmat-indent.scala:27: warning: match may not be exhaustive.
+It would fail on the following input: _: Int
+ def foo3(x: Int) = x match {
+ ^
+three warnings found \ No newline at end of file
diff --git a/tests/patmat/patmat-indent.scala b/tests/patmat/patmat-indent.scala
new file mode 100644
index 000000000..ef25bb2c7
--- /dev/null
+++ b/tests/patmat/patmat-indent.scala
@@ -0,0 +1,30 @@
+object Test {
+ val Nil = scala.Nil
+ val X = 5
+
+ object Inner {
+ val Y = false
+ }
+
+ def foo1a[T](l: List[T]) = l match {
+ case x::xs => false
+ }
+
+ def foo1b[T](l: List[T]) = l match {
+ case Nil => true
+ case x::xs => false
+ }
+
+ def foo1c[T](l: List[T]) = l match {
+ case Test.Nil => true
+ case x::xs => false
+ }
+
+ def foo2(b: Boolean) = b match {
+ case Inner.Y => false
+ }
+
+ def foo3(x: Int) = x match {
+ case X => 0
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/patmat-ortype.check b/tests/patmat/patmat-ortype.check
new file mode 100644
index 000000000..2291da251
--- /dev/null
+++ b/tests/patmat/patmat-ortype.check
@@ -0,0 +1,13 @@
+./tests/patmat/patmat-ortype.scala:8: warning: match may not be exhaustive.
+It would fail on the following input: _: String
+ def foo2a(x: Int | Double | String) = x match { // _: String not matched
+ ^
+./tests/patmat/patmat-ortype.scala:18: warning: match may not be exhaustive.
+It would fail on the following input: Some(_: String), None
+ def foo3(x: Option[Int | Double | String]) = x match { // warning: None, Some(_: String) not matched
+ ^
+./tests/patmat/patmat-ortype.scala:36: warning: match may not be exhaustive.
+It would fail on the following input: Some(_: String)
+ def foo5b(x: Option[Int | Double | String]) = x match { // warning: Some(_: String) not matched
+ ^
+three warnings found \ No newline at end of file
diff --git a/tests/patmat/patmat-ortype.scala b/tests/patmat/patmat-ortype.scala
new file mode 100644
index 000000000..c7419acd3
--- /dev/null
+++ b/tests/patmat/patmat-ortype.scala
@@ -0,0 +1,40 @@
+object PatmatOrType {
+
+ def foo1(x: Int | Double) = x match {
+ case _: Int => true
+ case _: Double => true
+ }
+
+ def foo2a(x: Int | Double | String) = x match { // _: String not matched
+ case _: Int => true
+ case _: Double => true
+ }
+
+ def foo2b(x: Int | Double | String) = x match {
+ case _: Int => true
+ case _: (Double | String) => true
+ }
+
+ def foo3(x: Option[Int | Double | String]) = x match { // warning: None, Some(_: String) not matched
+ case Some(_: Int) => true
+ case Some(_: Double) => true
+ }
+
+ def foo4(x: Option[Int | Double | String]) = x match {
+ case Some(_: Int) => true
+ case Some(_: Double) => true
+ case Some(_: String) => true
+ case None => false
+ }
+
+ def foo5a(x: Option[Int | Double | String]) = x match {
+ case Some(_: (Int | Double)) => true
+ case Some(_: String) => true
+ case None => false
+ }
+
+ def foo5b(x: Option[Int | Double | String]) = x match { // warning: Some(_: String) not matched
+ case Some(_: (Int | Double)) => true
+ case None => false
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/patmatexhaust-huge.check b/tests/patmat/patmatexhaust-huge.check
new file mode 100644
index 000000000..06cac90bd
--- /dev/null
+++ b/tests/patmat/patmatexhaust-huge.check
@@ -0,0 +1,5 @@
+./tests/patmat/patmatexhaust-huge.scala:404: warning: match may not be exhaustive.
+It would fail on the following input: C397, C392
+ def f(c: C): Int = c match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/patmatexhaust-huge.scala b/tests/patmat/patmatexhaust-huge.scala
new file mode 100644
index 000000000..c4008b995
--- /dev/null
+++ b/tests/patmat/patmatexhaust-huge.scala
@@ -0,0 +1,806 @@
+abstract sealed trait C
+case object C1 extends C
+case object C2 extends C
+case object C3 extends C
+case object C4 extends C
+case object C5 extends C
+case object C6 extends C
+case object C7 extends C
+case object C8 extends C
+case object C9 extends C
+case object C10 extends C
+case object C11 extends C
+case object C12 extends C
+case object C13 extends C
+case object C14 extends C
+case object C15 extends C
+case object C16 extends C
+case object C17 extends C
+case object C18 extends C
+case object C19 extends C
+case object C20 extends C
+case object C21 extends C
+case object C22 extends C
+case object C23 extends C
+case object C24 extends C
+case object C25 extends C
+case object C26 extends C
+case object C27 extends C
+case object C28 extends C
+case object C29 extends C
+case object C30 extends C
+case object C31 extends C
+case object C32 extends C
+case object C33 extends C
+case object C34 extends C
+case object C35 extends C
+case object C36 extends C
+case object C37 extends C
+case object C38 extends C
+case object C39 extends C
+case object C40 extends C
+case object C41 extends C
+case object C42 extends C
+case object C43 extends C
+case object C44 extends C
+case object C45 extends C
+case object C46 extends C
+case object C47 extends C
+case object C48 extends C
+case object C49 extends C
+case object C50 extends C
+case object C51 extends C
+case object C52 extends C
+case object C53 extends C
+case object C54 extends C
+case object C55 extends C
+case object C56 extends C
+case object C57 extends C
+case object C58 extends C
+case object C59 extends C
+case object C60 extends C
+case object C61 extends C
+case object C62 extends C
+case object C63 extends C
+case object C64 extends C
+case object C65 extends C
+case object C66 extends C
+case object C67 extends C
+case object C68 extends C
+case object C69 extends C
+case object C70 extends C
+case object C71 extends C
+case object C72 extends C
+case object C73 extends C
+case object C74 extends C
+case object C75 extends C
+case object C76 extends C
+case object C77 extends C
+case object C78 extends C
+case object C79 extends C
+case object C80 extends C
+case object C81 extends C
+case object C82 extends C
+case object C83 extends C
+case object C84 extends C
+case object C85 extends C
+case object C86 extends C
+case object C87 extends C
+case object C88 extends C
+case object C89 extends C
+case object C90 extends C
+case object C91 extends C
+case object C92 extends C
+case object C93 extends C
+case object C94 extends C
+case object C95 extends C
+case object C96 extends C
+case object C97 extends C
+case object C98 extends C
+case object C99 extends C
+case object C100 extends C
+case object C101 extends C
+case object C102 extends C
+case object C103 extends C
+case object C104 extends C
+case object C105 extends C
+case object C106 extends C
+case object C107 extends C
+case object C108 extends C
+case object C109 extends C
+case object C110 extends C
+case object C111 extends C
+case object C112 extends C
+case object C113 extends C
+case object C114 extends C
+case object C115 extends C
+case object C116 extends C
+case object C117 extends C
+case object C118 extends C
+case object C119 extends C
+case object C120 extends C
+case object C121 extends C
+case object C122 extends C
+case object C123 extends C
+case object C124 extends C
+case object C125 extends C
+case object C126 extends C
+case object C127 extends C
+case object C128 extends C
+case object C129 extends C
+case object C130 extends C
+case object C131 extends C
+case object C132 extends C
+case object C133 extends C
+case object C134 extends C
+case object C135 extends C
+case object C136 extends C
+case object C137 extends C
+case object C138 extends C
+case object C139 extends C
+case object C140 extends C
+case object C141 extends C
+case object C142 extends C
+case object C143 extends C
+case object C144 extends C
+case object C145 extends C
+case object C146 extends C
+case object C147 extends C
+case object C148 extends C
+case object C149 extends C
+case object C150 extends C
+case object C151 extends C
+case object C152 extends C
+case object C153 extends C
+case object C154 extends C
+case object C155 extends C
+case object C156 extends C
+case object C157 extends C
+case object C158 extends C
+case object C159 extends C
+case object C160 extends C
+case object C161 extends C
+case object C162 extends C
+case object C163 extends C
+case object C164 extends C
+case object C165 extends C
+case object C166 extends C
+case object C167 extends C
+case object C168 extends C
+case object C169 extends C
+case object C170 extends C
+case object C171 extends C
+case object C172 extends C
+case object C173 extends C
+case object C174 extends C
+case object C175 extends C
+case object C176 extends C
+case object C177 extends C
+case object C178 extends C
+case object C179 extends C
+case object C180 extends C
+case object C181 extends C
+case object C182 extends C
+case object C183 extends C
+case object C184 extends C
+case object C185 extends C
+case object C186 extends C
+case object C187 extends C
+case object C188 extends C
+case object C189 extends C
+case object C190 extends C
+case object C191 extends C
+case object C192 extends C
+case object C193 extends C
+case object C194 extends C
+case object C195 extends C
+case object C196 extends C
+case object C197 extends C
+case object C198 extends C
+case object C199 extends C
+case object C200 extends C
+case object C201 extends C
+case object C202 extends C
+case object C203 extends C
+case object C204 extends C
+case object C205 extends C
+case object C206 extends C
+case object C207 extends C
+case object C208 extends C
+case object C209 extends C
+case object C210 extends C
+case object C211 extends C
+case object C212 extends C
+case object C213 extends C
+case object C214 extends C
+case object C215 extends C
+case object C216 extends C
+case object C217 extends C
+case object C218 extends C
+case object C219 extends C
+case object C220 extends C
+case object C221 extends C
+case object C222 extends C
+case object C223 extends C
+case object C224 extends C
+case object C225 extends C
+case object C226 extends C
+case object C227 extends C
+case object C228 extends C
+case object C229 extends C
+case object C230 extends C
+case object C231 extends C
+case object C232 extends C
+case object C233 extends C
+case object C234 extends C
+case object C235 extends C
+case object C236 extends C
+case object C237 extends C
+case object C238 extends C
+case object C239 extends C
+case object C240 extends C
+case object C241 extends C
+case object C242 extends C
+case object C243 extends C
+case object C244 extends C
+case object C245 extends C
+case object C246 extends C
+case object C247 extends C
+case object C248 extends C
+case object C249 extends C
+case object C250 extends C
+case object C251 extends C
+case object C252 extends C
+case object C253 extends C
+case object C254 extends C
+case object C255 extends C
+case object C256 extends C
+case object C257 extends C
+case object C258 extends C
+case object C259 extends C
+case object C260 extends C
+case object C261 extends C
+case object C262 extends C
+case object C263 extends C
+case object C264 extends C
+case object C265 extends C
+case object C266 extends C
+case object C267 extends C
+case object C268 extends C
+case object C269 extends C
+case object C270 extends C
+case object C271 extends C
+case object C272 extends C
+case object C273 extends C
+case object C274 extends C
+case object C275 extends C
+case object C276 extends C
+case object C277 extends C
+case object C278 extends C
+case object C279 extends C
+case object C280 extends C
+case object C281 extends C
+case object C282 extends C
+case object C283 extends C
+case object C284 extends C
+case object C285 extends C
+case object C286 extends C
+case object C287 extends C
+case object C288 extends C
+case object C289 extends C
+case object C290 extends C
+case object C291 extends C
+case object C292 extends C
+case object C293 extends C
+case object C294 extends C
+case object C295 extends C
+case object C296 extends C
+case object C297 extends C
+case object C298 extends C
+case object C299 extends C
+case object C300 extends C
+case object C301 extends C
+case object C302 extends C
+case object C303 extends C
+case object C304 extends C
+case object C305 extends C
+case object C306 extends C
+case object C307 extends C
+case object C308 extends C
+case object C309 extends C
+case object C310 extends C
+case object C311 extends C
+case object C312 extends C
+case object C313 extends C
+case object C314 extends C
+case object C315 extends C
+case object C316 extends C
+case object C317 extends C
+case object C318 extends C
+case object C319 extends C
+case object C320 extends C
+case object C321 extends C
+case object C322 extends C
+case object C323 extends C
+case object C324 extends C
+case object C325 extends C
+case object C326 extends C
+case object C327 extends C
+case object C328 extends C
+case object C329 extends C
+case object C330 extends C
+case object C331 extends C
+case object C332 extends C
+case object C333 extends C
+case object C334 extends C
+case object C335 extends C
+case object C336 extends C
+case object C337 extends C
+case object C338 extends C
+case object C339 extends C
+case object C340 extends C
+case object C341 extends C
+case object C342 extends C
+case object C343 extends C
+case object C344 extends C
+case object C345 extends C
+case object C346 extends C
+case object C347 extends C
+case object C348 extends C
+case object C349 extends C
+case object C350 extends C
+case object C351 extends C
+case object C352 extends C
+case object C353 extends C
+case object C354 extends C
+case object C355 extends C
+case object C356 extends C
+case object C357 extends C
+case object C358 extends C
+case object C359 extends C
+case object C360 extends C
+case object C361 extends C
+case object C362 extends C
+case object C363 extends C
+case object C364 extends C
+case object C365 extends C
+case object C366 extends C
+case object C367 extends C
+case object C368 extends C
+case object C369 extends C
+case object C370 extends C
+case object C371 extends C
+case object C372 extends C
+case object C373 extends C
+case object C374 extends C
+case object C375 extends C
+case object C376 extends C
+case object C377 extends C
+case object C378 extends C
+case object C379 extends C
+case object C380 extends C
+case object C381 extends C
+case object C382 extends C
+case object C383 extends C
+case object C384 extends C
+case object C385 extends C
+case object C386 extends C
+case object C387 extends C
+case object C388 extends C
+case object C389 extends C
+case object C390 extends C
+case object C391 extends C
+case object C392 extends C
+case object C393 extends C
+case object C394 extends C
+case object C395 extends C
+case object C396 extends C
+case object C397 extends C
+case object C398 extends C
+case object C399 extends C
+case object C400 extends C
+
+object M {
+ def f(c: C): Int = c match {
+ case C1 => 1
+ case C2 => 2
+ case C3 => 3
+ case C4 => 4
+ case C5 => 5
+ case C6 => 6
+ case C7 => 7
+ case C8 => 8
+ case C9 => 9
+ case C10 => 10
+ case C11 => 11
+ case C12 => 12
+ case C13 => 13
+ case C14 => 14
+ case C15 => 15
+ case C16 => 16
+ case C17 => 17
+ case C18 => 18
+ case C19 => 19
+ case C20 => 20
+ case C21 => 21
+ case C22 => 22
+ case C23 => 23
+ case C24 => 24
+ case C25 => 25
+ case C26 => 26
+ case C27 => 27
+ case C28 => 28
+ case C29 => 29
+ case C30 => 30
+ case C31 => 31
+ case C32 => 32
+ case C33 => 33
+ case C34 => 34
+ case C35 => 35
+ case C36 => 36
+ case C37 => 37
+ case C38 => 38
+ case C39 => 39
+ case C40 => 40
+ case C41 => 41
+ case C42 => 42
+ case C43 => 43
+ case C44 => 44
+ case C45 => 45
+ case C46 => 46
+ case C47 => 47
+ case C48 => 48
+ case C49 => 49
+ case C50 => 50
+ case C51 => 51
+ case C52 => 52
+ case C53 => 53
+ case C54 => 54
+ case C55 => 55
+ case C56 => 56
+ case C57 => 57
+ case C58 => 58
+ case C59 => 59
+ case C60 => 60
+ case C61 => 61
+ case C62 => 62
+ case C63 => 63
+ case C64 => 64
+ case C65 => 65
+ case C66 => 66
+ case C67 => 67
+ case C68 => 68
+ case C69 => 69
+ case C70 => 70
+ case C71 => 71
+ case C72 => 72
+ case C73 => 73
+ case C74 => 74
+ case C75 => 75
+ case C76 => 76
+ case C77 => 77
+ case C78 => 78
+ case C79 => 79
+ case C80 => 80
+ case C81 => 81
+ case C82 => 82
+ case C83 => 83
+ case C84 => 84
+ case C85 => 85
+ case C86 => 86
+ case C87 => 87
+ case C88 => 88
+ case C89 => 89
+ case C90 => 90
+ case C91 => 91
+ case C92 => 92
+ case C93 => 93
+ case C94 => 94
+ case C95 => 95
+ case C96 => 96
+ case C97 => 97
+ case C98 => 98
+ case C99 => 99
+ case C100 => 100
+ case C101 => 101
+ case C102 => 102
+ case C103 => 103
+ case C104 => 104
+ case C105 => 105
+ case C106 => 106
+ case C107 => 107
+ case C108 => 108
+ case C109 => 109
+ case C110 => 110
+ case C111 => 111
+ case C112 => 112
+ case C113 => 113
+ case C114 => 114
+ case C115 => 115
+ case C116 => 116
+ case C117 => 117
+ case C118 => 118
+ case C119 => 119
+ case C120 => 120
+ case C121 => 121
+ case C122 => 122
+ case C123 => 123
+ case C124 => 124
+ case C125 => 125
+ case C126 => 126
+ case C127 => 127
+ case C128 => 128
+ case C129 => 129
+ case C130 => 130
+ case C131 => 131
+ case C132 => 132
+ case C133 => 133
+ case C134 => 134
+ case C135 => 135
+ case C136 => 136
+ case C137 => 137
+ case C138 => 138
+ case C139 => 139
+ case C140 => 140
+ case C141 => 141
+ case C142 => 142
+ case C143 => 143
+ case C144 => 144
+ case C145 => 145
+ case C146 => 146
+ case C147 => 147
+ case C148 => 148
+ case C149 => 149
+ case C150 => 150
+ case C151 => 151
+ case C152 => 152
+ case C153 => 153
+ case C154 => 154
+ case C155 => 155
+ case C156 => 156
+ case C157 => 157
+ case C158 => 158
+ case C159 => 159
+ case C160 => 160
+ case C161 => 161
+ case C162 => 162
+ case C163 => 163
+ case C164 => 164
+ case C165 => 165
+ case C166 => 166
+ case C167 => 167
+ case C168 => 168
+ case C169 => 169
+ case C170 => 170
+ case C171 => 171
+ case C172 => 172
+ case C173 => 173
+ case C174 => 174
+ case C175 => 175
+ case C176 => 176
+ case C177 => 177
+ case C178 => 178
+ case C179 => 179
+ case C180 => 180
+ case C181 => 181
+ case C182 => 182
+ case C183 => 183
+ case C184 => 184
+ case C185 => 185
+ case C186 => 186
+ case C187 => 187
+ case C188 => 188
+ case C189 => 189
+ case C190 => 190
+ case C191 => 191
+ case C192 => 192
+ case C193 => 193
+ case C194 => 194
+ case C195 => 195
+ case C196 => 196
+ case C197 => 197
+ case C198 => 198
+ case C199 => 199
+ case C200 => 200
+ case C201 => 201
+ case C202 => 202
+ case C203 => 203
+ case C204 => 204
+ case C205 => 205
+ case C206 => 206
+ case C207 => 207
+ case C208 => 208
+ case C209 => 209
+ case C210 => 210
+ case C211 => 211
+ case C212 => 212
+ case C213 => 213
+ case C214 => 214
+ case C215 => 215
+ case C216 => 216
+ case C217 => 217
+ case C218 => 218
+ case C219 => 219
+ case C220 => 220
+ case C221 => 221
+ case C222 => 222
+ case C223 => 223
+ case C224 => 224
+ case C225 => 225
+ case C226 => 226
+ case C227 => 227
+ case C228 => 228
+ case C229 => 229
+ case C230 => 230
+ case C231 => 231
+ case C232 => 232
+ case C233 => 233
+ case C234 => 234
+ case C235 => 235
+ case C236 => 236
+ case C237 => 237
+ case C238 => 238
+ case C239 => 239
+ case C240 => 240
+ case C241 => 241
+ case C242 => 242
+ case C243 => 243
+ case C244 => 244
+ case C245 => 245
+ case C246 => 246
+ case C247 => 247
+ case C248 => 248
+ case C249 => 249
+ case C250 => 250
+ case C251 => 251
+ case C252 => 252
+ case C253 => 253
+ case C254 => 254
+ case C255 => 255
+ case C256 => 256
+ case C257 => 257
+ case C258 => 258
+ case C259 => 259
+ case C260 => 260
+ case C261 => 261
+ case C262 => 262
+ case C263 => 263
+ case C264 => 264
+ case C265 => 265
+ case C266 => 266
+ case C267 => 267
+ case C268 => 268
+ case C269 => 269
+ case C270 => 270
+ case C271 => 271
+ case C272 => 272
+ case C273 => 273
+ case C274 => 274
+ case C275 => 275
+ case C276 => 276
+ case C277 => 277
+ case C278 => 278
+ case C279 => 279
+ case C280 => 280
+ case C281 => 281
+ case C282 => 282
+ case C283 => 283
+ case C284 => 284
+ case C285 => 285
+ case C286 => 286
+ case C287 => 287
+ case C288 => 288
+ case C289 => 289
+ case C290 => 290
+ case C291 => 291
+ case C292 => 292
+ case C293 => 293
+ case C294 => 294
+ case C295 => 295
+ case C296 => 296
+ case C297 => 297
+ case C298 => 298
+ case C299 => 299
+ case C300 => 300
+ case C301 => 301
+ case C302 => 302
+ case C303 => 303
+ case C304 => 304
+ case C305 => 305
+ case C306 => 306
+ case C307 => 307
+ case C308 => 308
+ case C309 => 309
+ case C310 => 310
+ case C311 => 311
+ case C312 => 312
+ case C313 => 313
+ case C314 => 314
+ case C315 => 315
+ case C316 => 316
+ case C317 => 317
+ case C318 => 318
+ case C319 => 319
+ case C320 => 320
+ case C321 => 321
+ case C322 => 322
+ case C323 => 323
+ case C324 => 324
+ case C325 => 325
+ case C326 => 326
+ case C327 => 327
+ case C328 => 328
+ case C329 => 329
+ case C330 => 330
+ case C331 => 331
+ case C332 => 332
+ case C333 => 333
+ case C334 => 334
+ case C335 => 335
+ case C336 => 336
+ case C337 => 337
+ case C338 => 338
+ case C339 => 339
+ case C340 => 340
+ case C341 => 341
+ case C342 => 342
+ case C343 => 343
+ case C344 => 344
+ case C345 => 345
+ case C346 => 346
+ case C347 => 347
+ case C348 => 348
+ case C349 => 349
+ case C350 => 350
+ case C351 => 351
+ case C352 => 352
+ case C353 => 353
+ case C354 => 354
+ case C355 => 355
+ case C356 => 356
+ case C357 => 357
+ case C358 => 358
+ case C359 => 359
+ case C360 => 360
+ case C361 => 361
+ case C362 => 362
+ case C363 => 363
+ case C364 => 364
+ case C365 => 365
+ case C366 => 366
+ case C367 => 367
+ case C368 => 368
+ case C369 => 369
+ case C370 => 370
+ case C371 => 371
+ case C372 => 372
+ case C373 => 373
+ case C374 => 374
+ case C375 => 375
+ case C376 => 376
+ case C377 => 377
+ case C378 => 378
+ case C379 => 379
+ case C380 => 380
+ case C381 => 381
+ case C382 => 382
+ case C383 => 383
+ case C384 => 384
+ case C385 => 385
+ case C386 => 386
+ case C387 => 387
+ case C388 => 388
+ case C389 => 389
+ case C390 => 390
+ case C391 => 391
+// case C392 => 392
+ case C393 => 393
+ case C394 => 394
+ case C395 => 395
+ case C396 => 396
+// case C397 => 397
+ case C398 => 398
+ case C399 => 399
+ case C400 => 400
+ }
+}
diff --git a/tests/patmat/patmatexhaust.check b/tests/patmat/patmatexhaust.check
new file mode 100644
index 000000000..ef2b578d6
--- /dev/null
+++ b/tests/patmat/patmatexhaust.check
@@ -0,0 +1,33 @@
+./tests/patmat/patmatexhaust.scala:7: warning: match may not be exhaustive.
+It would fail on the following input: Baz
+ def ma1(x:Foo) = x match {
+ ^
+./tests/patmat/patmatexhaust.scala:11: warning: match may not be exhaustive.
+It would fail on the following input: Bar(_)
+ def ma2(x:Foo) = x match {
+ ^
+./tests/patmat/patmatexhaust.scala:23: warning: match may not be exhaustive.
+It would fail on the following input: (Qult(), Qult()), (Kult(_), Kult(_))
+ def ma3(x:Mult) = (x,x) match { // not exhaustive
+ ^
+./tests/patmat/patmatexhaust.scala:49: warning: match may not be exhaustive.
+It would fail on the following input: _: Gp
+ def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included
+ ^
+./tests/patmat/patmatexhaust.scala:75: warning: match may not be exhaustive.
+It would fail on the following input: _: B
+ def ma9(x: B) = x match {
+ ^
+./tests/patmat/patmatexhaust.scala:100: warning: match may not be exhaustive.
+It would fail on the following input: _: C1
+ def ma10(x: C) = x match { // not exhaustive: C1 is not sealed.
+ ^
+./tests/patmat/patmatexhaust.scala:114: warning: match may not be exhaustive.
+It would fail on the following input: D2(), D1
+ def ma10(x: C) = x match { // not exhaustive: C1 has subclasses.
+ ^
+./tests/patmat/patmatexhaust.scala:126: warning: match may not be exhaustive.
+It would fail on the following input: _: C1
+ def ma10(x: C) = x match { // not exhaustive: C1 is not abstract.
+ ^
+8 warnings found \ No newline at end of file
diff --git a/tests/patmat/patmatexhaust.scala b/tests/patmat/patmatexhaust.scala
new file mode 100644
index 000000000..26f0c12a9
--- /dev/null
+++ b/tests/patmat/patmatexhaust.scala
@@ -0,0 +1,131 @@
+class TestSealedExhaustive { // compile only
+ sealed abstract class Foo
+
+ case class Bar(x:Int) extends Foo
+ case object Baz extends Foo
+
+ def ma1(x:Foo) = x match {
+ case Bar(_) => // not exhaustive
+ }
+
+ def ma2(x:Foo) = x match {
+ case Baz => // not exhaustive
+ }
+
+ sealed abstract class Mult
+ case class Kult(s:Mult) extends Mult
+ case class Qult() extends Mult
+
+ def ma33(x:Kult) = x match { // exhaustive
+ case Kult(_) => // exhaustive
+ }
+
+ def ma3(x:Mult) = (x,x) match { // not exhaustive
+ case (Kult(_), Qult()) => // Kult missing
+ //case (Kult(_), Kult(_)) =>
+ case (Qult(), Kult(_)) => // Qult missing
+ //case (Qult(), Qult()) =>
+ }
+
+ def ma3u(x:Mult) = ((x,x) : @unchecked) match { // not exhaustive, but not checked!
+ case (Kult(_), Qult()) =>
+ case (Qult(), Kult(_)) =>
+ }
+
+ sealed abstract class Deep
+
+ case object Ga extends Deep
+ sealed class Gp extends Deep
+ case object Gu extends Gp
+
+ def zma3(x:Deep) = x match { // exhaustive!
+ case _ =>
+ }
+ def zma4(x:Deep) = x match { // exhaustive!
+ case Ga =>
+ case _ =>
+ }
+
+ def ma4(x:Deep) = x match { // missing cases: Gu, Gp which is not abstract so must be included
+ case Ga =>
+ }
+
+ def ma5(x:Deep) = x match {
+ case Gu =>
+ case _ if 1 == 0 =>
+ case Ga =>
+ }
+
+ def ma6() = List(1,2) match { // give up
+ case List(1,2) =>
+ case x :: xs =>
+ }
+
+ def ma7() = List(1,2) match { //exhaustive
+ case 1::2::Nil =>
+ case _ =>
+ }
+
+ sealed class B
+ case class B1() extends B
+ case object B2 extends B
+ def ma8(x: B) = x match {
+ case _: B => true
+ }
+ def ma9(x: B) = x match {
+ case B1() => true // missing B, which is not abstract so must be included
+ case B2 => true
+ }
+
+ object ob1 {
+ sealed abstract class C
+ sealed abstract class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // exhaustive: abstract sealed C1 is dead end.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+
+ object ob2 {
+ sealed abstract class C
+ abstract class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 is not sealed.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+ object ob3 {
+ sealed abstract class C
+ sealed abstract class C1 extends C
+ object D1 extends C1
+ case class D2() extends C1
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 has subclasses.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+ object ob4 {
+ sealed abstract class C
+ sealed class C1 extends C
+ object C2 extends C
+ case class C3() extends C
+ case object C4 extends C
+
+ def ma10(x: C) = x match { // not exhaustive: C1 is not abstract.
+ case C3() => true
+ case C2 | C4 => true
+ }
+ }
+}
diff --git a/tests/patmat/sealed-java-enums.check b/tests/patmat/sealed-java-enums.check
new file mode 100644
index 000000000..ed93d3d40
--- /dev/null
+++ b/tests/patmat/sealed-java-enums.check
@@ -0,0 +1,5 @@
+./tests/patmat/sealed-java-enums.scala:5: warning: match may not be exhaustive.
+It would fail on the following input: TERMINATED, TIMED_WAITING, BLOCKED
+ def f(state: State) = state match {
+ ^
+one warning found
diff --git a/tests/patmat/sealed-java-enums.scala b/tests/patmat/sealed-java-enums.scala
new file mode 100644
index 000000000..2daf93f30
--- /dev/null
+++ b/tests/patmat/sealed-java-enums.scala
@@ -0,0 +1,10 @@
+import java.lang.Thread.State
+import java.lang.Thread.State._
+
+object Test {
+ def f(state: State) = state match {
+ case NEW | WAITING => true
+ case RUNNABLE => false
+ // and I forget the rest
+ }
+}
diff --git a/tests/patmat/t1056.scala b/tests/patmat/t1056.scala
new file mode 100644
index 000000000..68f1ff273
--- /dev/null
+++ b/tests/patmat/t1056.scala
@@ -0,0 +1,5 @@
+object Test {
+ type T = PartialFunction[String,String]
+ def g(h: T) = ()
+ g({case s: String => s})
+}
diff --git a/tests/patmat/t2425.scala b/tests/patmat/t2425.scala
new file mode 100644
index 000000000..477d5467a
--- /dev/null
+++ b/tests/patmat/t2425.scala
@@ -0,0 +1,15 @@
+trait B
+class D extends B
+object Test extends App {
+ def foo[T](bar: T) = {
+ bar match {
+ case _: Array[Array[_]] => println("array 2d")
+ case _: Array[_] => println("array 1d")
+ case _ => println("something else")
+ }
+ }
+ foo(Array.fill(10)(2))
+ foo(Array.fill(10, 10)(2))
+ foo(Array.fill(10, 10, 10)(2))
+ foo(List(1, 2, 3))
+}
diff --git a/tests/patmat/t2442/MyEnum.java b/tests/patmat/t2442/MyEnum.java
new file mode 100644
index 000000000..3ffbbb31b
--- /dev/null
+++ b/tests/patmat/t2442/MyEnum.java
@@ -0,0 +1,3 @@
+public enum MyEnum {
+ ONE, TWO, THREE;
+} \ No newline at end of file
diff --git a/tests/patmat/t2442/MySecondEnum.java b/tests/patmat/t2442/MySecondEnum.java
new file mode 100644
index 000000000..0f841286d
--- /dev/null
+++ b/tests/patmat/t2442/MySecondEnum.java
@@ -0,0 +1,6 @@
+public enum MySecondEnum {
+ RED(1), BLUE(2) { public void foo() {} };
+ MySecondEnum(int i) {}
+
+ public void foo() {}
+} \ No newline at end of file
diff --git a/tests/patmat/t2442/expected.check b/tests/patmat/t2442/expected.check
new file mode 100644
index 000000000..33110ce43
--- /dev/null
+++ b/tests/patmat/t2442/expected.check
@@ -0,0 +1,9 @@
+./tests/patmat/t2442/t2442.scala:4: warning: match may not be exhaustive.
+It would fail on the following input: THREE
+ def f(e: MyEnum) = e match {
+ ^
+./tests/patmat/t2442/t2442.scala:11: warning: match may not be exhaustive.
+It would fail on the following input: BLUE
+ def g(e: MySecondEnum) = e match {
+ ^
+two warnings found
diff --git a/tests/patmat/t2442/t2442.scala b/tests/patmat/t2442/t2442.scala
new file mode 100644
index 000000000..b0a0f3cd4
--- /dev/null
+++ b/tests/patmat/t2442/t2442.scala
@@ -0,0 +1,15 @@
+class Test {
+ import MyEnum._
+
+ def f(e: MyEnum) = e match {
+ case ONE => println("one")
+ case TWO => println("two")
+ // missing case --> exhaustivity warning!
+ }
+
+ import MySecondEnum._
+ def g(e: MySecondEnum) = e match {
+ case RED => println("red")
+ // missing case --> exhaustivity warning!
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t3097.scala b/tests/patmat/t3097.scala
new file mode 100644
index 000000000..3ff61b3c7
--- /dev/null
+++ b/tests/patmat/t3097.scala
@@ -0,0 +1,35 @@
+sealed trait ISimpleValue
+
+sealed trait IListValue extends ISimpleValue {
+ def items: List[IAtomicValue[_]]
+}
+
+sealed trait IAtomicValue[O] extends ISimpleValue {
+ def data: O
+}
+
+sealed trait IAbstractDoubleValue[O] extends IAtomicValue[O] {
+}
+
+sealed trait IDoubleValue extends IAbstractDoubleValue[Double]
+
+case class ListValue(val items: List[IAtomicValue[_]]) extends IListValue
+
+class DoubleValue(val data: Double) extends IDoubleValue {
+ def asDouble = data
+}
+
+object Test {
+
+ /**
+ * @param args the command line arguments
+ */
+ def main(args: Array[String]): Unit = {
+ val v: ISimpleValue = new DoubleValue(1)
+ v match {
+ case m: IListValue => println("list")
+ case a: IAtomicValue[_] => println("atomic")
+ }
+
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t3098/a.scala b/tests/patmat/t3098/a.scala
new file mode 100644
index 000000000..57a103c7a
--- /dev/null
+++ b/tests/patmat/t3098/a.scala
@@ -0,0 +1,6 @@
+// Traits.scala
+sealed trait T
+
+trait A extends T
+trait B extends T
+trait C extends T
diff --git a/tests/patmat/t3098/b.scala b/tests/patmat/t3098/b.scala
new file mode 100644
index 000000000..84a1f9f6f
--- /dev/null
+++ b/tests/patmat/t3098/b.scala
@@ -0,0 +1,8 @@
+// Test.scala
+object Test {
+ def f = (null: T) match {
+ case _: A => println("A")
+ case _: B => println("B")
+ // no C
+ }
+}
diff --git a/tests/patmat/t3098/expected.check b/tests/patmat/t3098/expected.check
new file mode 100644
index 000000000..331904111
--- /dev/null
+++ b/tests/patmat/t3098/expected.check
@@ -0,0 +1,5 @@
+./tests/patmat/t3098/b.scala:3: warning: match may not be exhaustive.
+It would fail on the following input: _: C
+ def f = (null: T) match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t3111.check b/tests/patmat/t3111.check
new file mode 100644
index 000000000..46ff0a6a9
--- /dev/null
+++ b/tests/patmat/t3111.check
@@ -0,0 +1,8 @@
+./tests/patmat/t3111.scala:4: warning: match may not be exhaustive.
+It would fail on the following input: false
+ bool match {
+ ^
+./tests/patmat/t3111.scala:11: warning: unreachable code
+ case _ => "cats and dogs living together... mass hysteria!"
+ ^
+two warnings found \ No newline at end of file
diff --git a/tests/patmat/t3111.scala b/tests/patmat/t3111.scala
new file mode 100644
index 000000000..8f2bc5a27
--- /dev/null
+++ b/tests/patmat/t3111.scala
@@ -0,0 +1,13 @@
+object Test {
+ val bool: Boolean = false
+
+ bool match {
+ case true => "true!"
+ }
+
+ bool match {
+ case true => "true!"
+ case false => "false!"
+ case _ => "cats and dogs living together... mass hysteria!"
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t3163.check b/tests/patmat/t3163.check
new file mode 100644
index 000000000..3da94e2c2
--- /dev/null
+++ b/tests/patmat/t3163.check
@@ -0,0 +1,5 @@
+./tests/patmat/t3163.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: _: AnyVal
+ def foo(x : AnyVal) = x match {case b : Boolean => "It's a bool"}
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t3163.scala b/tests/patmat/t3163.scala
new file mode 100644
index 000000000..2e0f2c1d9
--- /dev/null
+++ b/tests/patmat/t3163.scala
@@ -0,0 +1,3 @@
+object Test {
+ def foo(x : AnyVal) = x match {case b : Boolean => "It's a bool"}
+} \ No newline at end of file
diff --git a/tests/patmat/t3683.scala b/tests/patmat/t3683.scala
new file mode 100644
index 000000000..44be9d6c6
--- /dev/null
+++ b/tests/patmat/t3683.scala
@@ -0,0 +1,19 @@
+sealed trait Foo
+sealed trait Bar extends Foo
+sealed trait W[T >: Bar <: Foo]
+sealed case class X() extends W[Foo]
+sealed case class Y() extends W[Bar]
+sealed case class Z[T >: Bar <: Foo](
+ z1: W[T]
+) extends W[T]
+
+object Main {
+ def func(w: W[Bar]): Int = {
+ w match {
+ // Error if I include it, warning if I do not!
+ // case X() => 2
+ case Y() => 1
+ case Z(z) => func(z)
+ }
+ }
+}
diff --git a/tests/patmat/t3683a.check b/tests/patmat/t3683a.check
new file mode 100644
index 000000000..df5e691c6
--- /dev/null
+++ b/tests/patmat/t3683a.check
@@ -0,0 +1,5 @@
+./tests/patmat/t3683a.scala:14: warning: match may not be exhaustive.
+It would fail on the following input: XX()
+ w match {
+ ^
+one warning found
diff --git a/tests/patmat/t3683a.scala b/tests/patmat/t3683a.scala
new file mode 100644
index 000000000..6d1915213
--- /dev/null
+++ b/tests/patmat/t3683a.scala
@@ -0,0 +1,20 @@
+sealed trait Foo
+sealed trait Bar extends Foo
+sealed trait W[T >: Bar <: Foo]
+case class X() extends W[Foo]
+case class XX() extends W[Bar]
+case class Y() extends W[Bar]
+case class Z[T >: Bar <: Foo](
+ z1: W[T]
+) extends W[T]
+
+object Main {
+ // should warn for not including XX()
+ def f1(w: W[Bar]): Int = {
+ w match {
+ // case XX() => 2
+ case Y() => 1
+ case Z(z) => f1(z)
+ }
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t4020.scala b/tests/patmat/t4020.scala
new file mode 100644
index 000000000..f97646019
--- /dev/null
+++ b/tests/patmat/t4020.scala
@@ -0,0 +1,25 @@
+class A {
+ sealed trait Foo
+}
+
+object a1 extends A {
+ case class Foo1(i: Int) extends Foo
+}
+
+object a2 extends A {
+ case class Foo2(i: Int) extends Foo
+}
+
+class B {
+ def mthd(foo: a2.Foo) = {
+ foo match {
+ case a2.Foo2(i) => i
+
+ // Note: This case is impossible. In fact, scalac
+ // will (correctly) report an error if it is uncommented,
+ // but a warning if it is commented.
+
+ // case a1.Foo1(i) => i
+ }
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t4333.scala.ignore b/tests/patmat/t4333.scala.ignore
new file mode 100644
index 000000000..07d105c74
--- /dev/null
+++ b/tests/patmat/t4333.scala.ignore
@@ -0,0 +1,7 @@
+object Enum extends Enumeration { val A, B, C = Value }
+
+object Test {
+ def foo(v : Enum.Value) = v match {
+ case Enum.B => println("B")
+ }
+}
diff --git a/tests/patmat/t4408.check b/tests/patmat/t4408.check
new file mode 100644
index 000000000..53bfe1c2c
--- /dev/null
+++ b/tests/patmat/t4408.check
@@ -0,0 +1,5 @@
+./tests/patmat/t4408.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: List(_, _, _)
+ def printList(in: List[String]): Unit = in match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t4408.scala b/tests/patmat/t4408.scala
new file mode 100644
index 000000000..419b66369
--- /dev/null
+++ b/tests/patmat/t4408.scala
@@ -0,0 +1,16 @@
+object Test {
+ def printList(in: List[String]): Unit = in match {
+ case Nil => Unit
+
+ case (s: String) :: Nil =>
+ println(s)
+
+ case head :: (s: String) :: Nil =>
+ printList(head :: Nil)
+ for(i <- head){
+ print(i)
+ }
+ println
+ println(s)
+ }
+}
diff --git a/tests/patmat/t4526.check b/tests/patmat/t4526.check
new file mode 100644
index 000000000..b577cbc0c
--- /dev/null
+++ b/tests/patmat/t4526.check
@@ -0,0 +1,13 @@
+./tests/patmat/t4526.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: _: Int
+ def foo(a: Int) = a match {
+ ^
+./tests/patmat/t4526.scala:7: warning: match may not be exhaustive.
+It would fail on the following input: (_, _)
+ def bar(a: (Int, Int)) = a match {
+ ^
+./tests/patmat/t4526.scala:12: warning: match may not be exhaustive.
+It would fail on the following input: (false, false), (true, true)
+ def baz(a: (Boolean, Boolean)) = a match {
+ ^
+three warnings found \ No newline at end of file
diff --git a/tests/patmat/t4526.scala b/tests/patmat/t4526.scala
new file mode 100644
index 000000000..d531c6b34
--- /dev/null
+++ b/tests/patmat/t4526.scala
@@ -0,0 +1,16 @@
+object Test{
+ def foo(a: Int) = a match {
+ case 5 => "Five!"
+ case 42 => "The answer."
+ }
+
+ def bar(a: (Int, Int)) = a match {
+ case (5, 5) => "Two fives!"
+ case (42, 21) => "The answer and a half."
+ }
+
+ def baz(a: (Boolean, Boolean)) = a match {
+ case (true, false) => "tf"
+ case (false, true) => "ft"
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t4691.check b/tests/patmat/t4691.check
new file mode 100644
index 000000000..4d2c24506
--- /dev/null
+++ b/tests/patmat/t4691.check
@@ -0,0 +1,5 @@
+./tests/patmat/t4691.scala:15: warning: match may not be exhaustive.
+It would fail on the following input: NodeType2(_)
+ def test (x: Node) = x match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t4691.scala b/tests/patmat/t4691.scala
new file mode 100644
index 000000000..bfaa61670
--- /dev/null
+++ b/tests/patmat/t4691.scala
@@ -0,0 +1,18 @@
+sealed trait Node
+
+class NodeType1 (val a:Int) extends Node
+class NodeType2 (val b:Int) extends Node
+
+object NodeType1 {
+ def unapply (x : NodeType1) : Some[Int] = Some(x.a)
+}
+
+object NodeType2 {
+ def unapply (x : NodeType2) : Some[Int] = Some(x.b)
+}
+
+object Test {
+ def test (x: Node) = x match {
+ case NodeType1(a) => "got node type 1 " + a
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t4691_exhaust_extractor.check b/tests/patmat/t4691_exhaust_extractor.check
new file mode 100644
index 000000000..e7d1e17f9
--- /dev/null
+++ b/tests/patmat/t4691_exhaust_extractor.check
@@ -0,0 +1,13 @@
+./tests/patmat/t4691_exhaust_extractor.scala:17: warning: match may not be exhaustive.
+It would fail on the following input: _: Bar3
+ def f1(x: Foo) = x match {
+ ^
+./tests/patmat/t4691_exhaust_extractor.scala:23: warning: match may not be exhaustive.
+It would fail on the following input: _: Bar3
+ def f2(x: Foo) = x match {
+ ^
+./tests/patmat/t4691_exhaust_extractor.scala:29: warning: match may not be exhaustive.
+It would fail on the following input: _: Bar3
+ def f3(x: Foo) = x match {
+ ^
+three warnings found
diff --git a/tests/patmat/t4691_exhaust_extractor.scala b/tests/patmat/t4691_exhaust_extractor.scala
new file mode 100644
index 000000000..c68c33d65
--- /dev/null
+++ b/tests/patmat/t4691_exhaust_extractor.scala
@@ -0,0 +1,33 @@
+sealed trait Foo
+class Bar1 extends Foo
+class Bar2 extends Foo
+class Bar3 extends Foo
+
+// these extractors are known to always succeed as they return a Some
+object Baz1 {
+ def unapply(x: Bar1): Some[Int] = Some(1)
+}
+object Baz2 {
+ def unapply(x: Bar2): Some[Int] = Some(2)
+}
+
+
+object Test {
+ // warning: missing Bar3
+ def f1(x: Foo) = x match {
+ case _: Bar1 => 1
+ case _: Bar2 => 2
+ }
+
+ // warning: missing Bar3
+ def f2(x: Foo) = x match {
+ case _: Bar1 => 1
+ case Baz2(x) => x
+ }
+
+ // warning: missing Bar3
+ def f3(x: Foo) = x match {
+ case Baz1(x) => x
+ case Baz2(x) => x
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t5440.check b/tests/patmat/t5440.check
new file mode 100644
index 000000000..0780d6529
--- /dev/null
+++ b/tests/patmat/t5440.check
@@ -0,0 +1,5 @@
+./tests/patmat/t5440.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: (Nil, List(_)), (List(_), Nil)
+ def merge(list1: List[Long], list2: List[Long]): Boolean = (list1, list2) match {
+ ^
+one warning found
diff --git a/tests/patmat/t5440.scala b/tests/patmat/t5440.scala
new file mode 100644
index 000000000..6721b0562
--- /dev/null
+++ b/tests/patmat/t5440.scala
@@ -0,0 +1,6 @@
+object Test {
+ def merge(list1: List[Long], list2: List[Long]): Boolean = (list1, list2) match {
+ case (hd1::_, hd2::_) => true
+ case (Nil, Nil) => true
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t5968.scala b/tests/patmat/t5968.scala
new file mode 100644
index 000000000..14cc903c8
--- /dev/null
+++ b/tests/patmat/t5968.scala
@@ -0,0 +1,7 @@
+object Test {
+ object X
+ def f(e: Either[Int, X.type]) = e match {
+ case Left(i) => i
+ case Right(X) => 0
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t6008.scala b/tests/patmat/t6008.scala
new file mode 100644
index 000000000..c42e9c5a5
--- /dev/null
+++ b/tests/patmat/t6008.scala
@@ -0,0 +1,5 @@
+object Test {
+ def x(in: (Int, Boolean)) = in match {
+ case (i: Int, b: Boolean) => 3
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t6146.scala b/tests/patmat/t6146.scala
new file mode 100644
index 000000000..b5bde826b
--- /dev/null
+++ b/tests/patmat/t6146.scala
@@ -0,0 +1,60 @@
+// No unreachable or exhaustiveness warnings, please.
+
+//
+// The reported bug
+//
+
+trait AxisCompanion {
+ sealed trait Format
+ object Format {
+ case object Decimal extends Format
+ case object Integer extends Format
+ // Gives an unrelated warning: The outer reference in this type test cannot be checked at run time.
+ //final case class Time( hours: Boolean = false, millis: Boolean = true ) extends Format
+ }
+}
+object Axis extends AxisCompanion
+class Axis {
+ import Axis._
+ def test( f: Format ) = f match {
+ case Format.Integer => "Int"
+ // case Format.Time( hours, millis ) => "Time"
+ case Format.Decimal => "Dec"
+ }
+}
+
+
+//
+// Some tricksier variations
+//
+
+trait T1[X] {
+ trait T2[Y] {
+ sealed trait Format
+ object Format {
+ case object Decimal extends Format
+ case object Integer extends Format
+ }
+ }
+}
+
+object O1 extends T1[Any] {
+ object O2 extends T2[Any] {
+
+ }
+}
+
+case object Shorty extends O1.O2.Format
+
+class Test1 {
+ import O1.O2._
+ val FI: Format.Integer.type = Format.Integer
+ def test( f: Format ) = {
+ val ff: f.type = f
+ ff match {
+ case FI => "Int"
+ case Format.Decimal => "Dec"
+ case Shorty => "Sho"
+ }
+ }
+}
diff --git a/tests/patmat/t6420.check b/tests/patmat/t6420.check
new file mode 100644
index 000000000..c62b33d18
--- /dev/null
+++ b/tests/patmat/t6420.check
@@ -0,0 +1,5 @@
+./tests/patmat/t6420.scala:5: warning: match may not be exhaustive.
+It would fail on the following input: (Nil, _), (List(_, _), _), (Nil, Nil), (Nil, List(_, _)), (List(_, _), Nil), (List(_, _), List(_, _)), (_, Nil), (_, List(_, _))
+ def foo(x: List[Boolean], y: List[Boolean]) = (x,y) match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t6420.scala b/tests/patmat/t6420.scala
new file mode 100644
index 000000000..80c0f90f6
--- /dev/null
+++ b/tests/patmat/t6420.scala
@@ -0,0 +1,11 @@
+object Test {
+ val c0 = false
+ val c1 = true
+
+ def foo(x: List[Boolean], y: List[Boolean]) = (x,y) match {
+ case (`c0`::x, `c0`::y) => x
+ case (`c0`::x, `c1`::y) => y
+ case (`c1`::x, `c0`::y) => y
+ case (`c1`::x, `c1`::y) => x
+ }
+}
diff --git a/tests/patmat/t6450.scala b/tests/patmat/t6450.scala
new file mode 100644
index 000000000..157f1ce81
--- /dev/null
+++ b/tests/patmat/t6450.scala
@@ -0,0 +1,9 @@
+sealed abstract class FoundNode[T]
+// case class A[T](x: T) extends FoundNode[T]
+
+object Foo {
+ val v: (Some[_], FoundNode[_]) = (???, ???)
+ v match {
+ case (x: Some[t], _) =>
+ }
+}
diff --git a/tests/patmat/t6582_exhaust_big.check b/tests/patmat/t6582_exhaust_big.check
new file mode 100644
index 000000000..c244e5ba5
--- /dev/null
+++ b/tests/patmat/t6582_exhaust_big.check
@@ -0,0 +1,5 @@
+./tests/patmat/t6582_exhaust_big.scala:27: warning: match may not be exhaustive.
+It would fail on the following input: Z.Z11()
+ def foo(z: Z) = z match {
+ ^
+one warning found
diff --git a/tests/patmat/t6582_exhaust_big.scala b/tests/patmat/t6582_exhaust_big.scala
new file mode 100644
index 000000000..dd639eb56
--- /dev/null
+++ b/tests/patmat/t6582_exhaust_big.scala
@@ -0,0 +1,32 @@
+sealed abstract class Z
+object Z {
+ object Z0 extends Z
+ case class Z1() extends Z
+ object Z2 extends Z
+ case class Z3() extends Z
+ object Z4 extends Z
+ case class Z5() extends Z
+ object Z6 extends Z
+ case class Z7() extends Z
+ object Z8 extends Z
+ case class Z9() extends Z
+ object Z10 extends Z
+ case class Z11() extends Z
+ object Z12 extends Z
+ case class Z13() extends Z
+ object Z14 extends Z
+ case class Z15() extends Z
+ object Z16 extends Z
+ case class Z17() extends Z
+ object Z18 extends Z
+ case class Z19() extends Z
+}
+
+object Test {
+ import Z._
+ def foo(z: Z) = z match {
+ case Z0 | Z1() | Z2 | Z3() | Z4 | Z5() | Z6 | Z7() | Z8 | Z9() |
+ Z10 | Z12 | Z13() | Z14 | Z15() | Z16 | Z17() | Z18 | Z19()
+ =>
+ }
+}
diff --git a/tests/patmat/t6818.scala b/tests/patmat/t6818.scala
new file mode 100644
index 000000000..2334095c4
--- /dev/null
+++ b/tests/patmat/t6818.scala
@@ -0,0 +1,11 @@
+object Test {
+ type Id[X] = X
+
+ def foo(x:Id[Option[Int]]) = x match {
+ case Some(n) => "foo"
+ case None => "bar"
+ }
+
+ foo(Some(3)) // "foo"
+ foo(None) // "bar"
+} \ No newline at end of file
diff --git a/tests/patmat/t7020.check b/tests/patmat/t7020.check
new file mode 100644
index 000000000..c091535ae
--- /dev/null
+++ b/tests/patmat/t7020.check
@@ -0,0 +1,17 @@
+./tests/patmat/t7020.scala:3: warning: match may not be exhaustive.
+It would fail on the following input: List(_, _)
+ List(5) match {
+ ^
+./tests/patmat/t7020.scala:10: warning: match may not be exhaustive.
+It would fail on the following input: List(_, _)
+ List(5) match {
+ ^
+./tests/patmat/t7020.scala:17: warning: match may not be exhaustive.
+It would fail on the following input: List(_, _)
+ List(5) match {
+ ^
+./tests/patmat/t7020.scala:24: warning: match may not be exhaustive.
+It would fail on the following input: List(_, _)
+ List(5) match {
+ ^
+four warnings found
diff --git a/tests/patmat/t7020.scala b/tests/patmat/t7020.scala
new file mode 100644
index 000000000..cc5421bab
--- /dev/null
+++ b/tests/patmat/t7020.scala
@@ -0,0 +1,30 @@
+object Test {
+ // warning was non-deterministic
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+
+ List(5) match {
+ case 1 :: Nil | 2 :: Nil =>
+ case (x@(4 | 5 | 6)) :: Nil =>
+ case 7 :: Nil =>
+ case Nil =>
+ }
+}
diff --git a/tests/patmat/t7206.scala.ignore b/tests/patmat/t7206.scala.ignore
new file mode 100644
index 000000000..0133f1808
--- /dev/null
+++ b/tests/patmat/t7206.scala.ignore
@@ -0,0 +1,19 @@
+object E extends Enumeration {
+ val V = Value
+}
+
+sealed case class C(e: E.Value)
+
+class Test {
+ def foo(c: C) = {
+ c match {
+ case C(E.V) => {}
+ }
+ }
+
+ def foo2(e: E.Value) = {
+ e match {
+ case E.V => {}
+ }
+ }
+}
diff --git a/tests/patmat/t7285.check b/tests/patmat/t7285.check
new file mode 100644
index 000000000..703706cdc
--- /dev/null
+++ b/tests/patmat/t7285.check
@@ -0,0 +1,13 @@
+./tests/patmat/t7285.scala:15: warning: match may not be exhaustive.
+It would fail on the following input: (Up, Down)
+ (d1, d2) match {
+ ^
+./tests/patmat/t7285.scala:33: warning: match may not be exhaustive.
+It would fail on the following input: Down
+ (d1) match {
+ ^
+./tests/patmat/t7285.scala:51: warning: match may not be exhaustive.
+It would fail on the following input: (Base.Up, Base.Down)
+ (d1, d2) match {
+ ^
+three warnings found \ No newline at end of file
diff --git a/tests/patmat/t7285.scala b/tests/patmat/t7285.scala
new file mode 100644
index 000000000..d40df7fe8
--- /dev/null
+++ b/tests/patmat/t7285.scala
@@ -0,0 +1,55 @@
+sealed abstract class Base
+
+
+object Test1 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ def foo(d1: Base, d2: Base) =
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Up) => true
+ }
+ }
+}
+
+object Test2 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ def foo(d1: Base, d2: Base) =
+ (d1) match {
+ case Test2.Base.Up => false
+ }
+ }
+}
+
+
+object Test4 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base
+
+ case object Up extends Base
+ }
+
+ import Test4.Base._
+ def foo(d1: Base, d2: Base) =
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Test4.Base.Up) => true
+ }
+}
diff --git a/tests/patmat/t7285a.scala b/tests/patmat/t7285a.scala
new file mode 100644
index 000000000..49f6b663b
--- /dev/null
+++ b/tests/patmat/t7285a.scala
@@ -0,0 +1,83 @@
+sealed abstract class Base
+
+object Test {
+ case object Up extends Base
+
+ def foo(d1: Base) =
+ d1 match {
+ case Up =>
+ }
+
+ // Sealed subtype: ModuleTypeRef <empty>.this.Test.Up.type
+ // Pattern: UniqueThisType Test.this.type
+}
+
+
+object Test1 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ def foo(d1: Base, d2: Base) =
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Up) => true
+ case (Up, Down) => false
+ }
+ }
+}
+
+object Test2 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ def foo(d1: Base, d2: Base) =
+ (d1) match {
+ case Up | Down => false
+ }
+ }
+}
+
+object Test3 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base
+
+ def foo(d1: Base, d2: Base) =
+ (d1, d2) match {
+ case (Down, Down) => false
+ }
+ }
+}
+
+object Test4 {
+ sealed abstract class Base
+
+ object Base {
+ case object Down extends Base {
+ }
+
+ case object Up extends Base {
+ }
+
+ }
+ import Test4.Base._
+ def foo(d1: Base, d2: Base) =
+ (d1, d2) match {
+ case (Up, Up) | (Down, Down) => false
+ case (Down, Test4.Base.Up) => true
+ case (Up, Down) => false
+ }
+}
diff --git a/tests/patmat/t7298.scala b/tests/patmat/t7298.scala
new file mode 100644
index 000000000..6fba5e120
--- /dev/null
+++ b/tests/patmat/t7298.scala
@@ -0,0 +1,11 @@
+sealed trait Bool
+
+object Bool {
+ case object FALSE extends Bool
+ case object TRUE extends Bool
+
+ def show(b: Bool) = b match {
+ case FALSE => "1"
+ case TRUE => "2"
+ }
+}
diff --git a/tests/patmat/t7353.scala b/tests/patmat/t7353.scala
new file mode 100644
index 000000000..7a8fea115
--- /dev/null
+++ b/tests/patmat/t7353.scala
@@ -0,0 +1,11 @@
+sealed trait EthernetType
+
+object EthernetType {
+ final case object Gigabit extends EthernetType
+ final case object FastEthernet extends EthernetType
+
+ final def toInt(t: EthernetType) = t match {
+ case Gigabit => 1
+ case FastEthernet => 2
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t7437.scala b/tests/patmat/t7437.scala
new file mode 100644
index 000000000..b0c5dff7c
--- /dev/null
+++ b/tests/patmat/t7437.scala
@@ -0,0 +1,17 @@
+sealed trait IntegralNumber
+sealed trait FiniteNumber extends IntegralNumber
+
+object IntegralNumber {
+
+ sealed abstract class BaseNumber extends IntegralNumber
+ sealed abstract class NonFinite extends BaseNumber
+ object NaN extends NonFinite
+ sealed abstract class FiniteNumberImpl[N](val value: N) extends BaseNumber with FiniteNumber
+ sealed class IntNumber(value: Int) extends FiniteNumberImpl[Int](value)
+
+ def test(t: IntNumber, o: IntegralNumber) = o match {
+ case NaN => -1
+ case o: IntNumber => t.value.compare(o.value)
+ }
+
+} \ No newline at end of file
diff --git a/tests/patmat/t7466.check b/tests/patmat/t7466.check
new file mode 100644
index 000000000..8e575f6a2
--- /dev/null
+++ b/tests/patmat/t7466.check
@@ -0,0 +1,5 @@
+./tests/patmat/t7466.scala:8: warning: match may not be exhaustive.
+It would fail on the following input: (_, _)
+ (b1, b2) match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t7466.scala b/tests/patmat/t7466.scala
new file mode 100644
index 000000000..a74bf4ee2
--- /dev/null
+++ b/tests/patmat/t7466.scala
@@ -0,0 +1,17 @@
+object Test extends App {
+ val Yes1 = true
+ val Yes2 = true
+ val No1 = false
+ val No2 = false
+
+ def test(b1: Boolean, b2: Boolean) = {
+ (b1, b2) match {
+ case (No1, No2) => println("1")
+ case (No1, Yes2) => println("2")
+ case (Yes1, No2) => println("3")
+ case (Yes1, Yes2) => println("4")
+ }
+ }
+
+ test(No1, Yes2)
+} \ No newline at end of file
diff --git a/tests/patmat/t7631.check b/tests/patmat/t7631.check
new file mode 100644
index 000000000..ede3703e2
--- /dev/null
+++ b/tests/patmat/t7631.check
@@ -0,0 +1,5 @@
+./tests/patmat/t7631.scala:8: warning: match may not be exhaustive.
+It would fail on the following input: TestB()
+ val x = input match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t7631.scala b/tests/patmat/t7631.scala
new file mode 100644
index 000000000..13e74183f
--- /dev/null
+++ b/tests/patmat/t7631.scala
@@ -0,0 +1,11 @@
+sealed trait Test
+case class TestA() extends Test
+case class TestB() extends Test
+
+object Tester {
+ val input : Test = TestA()
+ val num = 3
+ val x = input match {
+ case TestA() if num == 3 => 2
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t7669.check b/tests/patmat/t7669.check
new file mode 100644
index 000000000..2804dbf5c
--- /dev/null
+++ b/tests/patmat/t7669.check
@@ -0,0 +1,5 @@
+./tests/patmat/t7669.scala:10: warning: match may not be exhaustive.
+It would fail on the following input: NotHandled(_)
+ def exhausto(expr: Expr): Unit = expr match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t7669.scala b/tests/patmat/t7669.scala
new file mode 100644
index 000000000..3aa74129e
--- /dev/null
+++ b/tests/patmat/t7669.scala
@@ -0,0 +1,14 @@
+object Test {
+
+ sealed abstract class Expr
+ // Change type of `arg` to `Any` and the exhaustiveness warning
+ // is issued below
+ case class Op(arg: Expr) extends Expr
+ case class NotHandled(num: Double) extends Expr
+
+
+ def exhausto(expr: Expr): Unit = expr match {
+ case Op(Op(_)) =>
+ case Op(_) =>
+ }
+}
diff --git a/tests/patmat/t7746.check b/tests/patmat/t7746.check
new file mode 100644
index 000000000..be4c53570
--- /dev/null
+++ b/tests/patmat/t7746.check
@@ -0,0 +1,5 @@
+./tests/patmat/t7746.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: Some(_), None
+ def f[T](x: Option[T]) = x match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t7746.scala b/tests/patmat/t7746.scala
new file mode 100644
index 000000000..91f3823a4
--- /dev/null
+++ b/tests/patmat/t7746.scala
@@ -0,0 +1,5 @@
+object Test {
+ def f[T](x: Option[T]) = x match {
+ case Some(Some(5)) => true
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t8068.scala b/tests/patmat/t8068.scala
new file mode 100644
index 000000000..9837b7381
--- /dev/null
+++ b/tests/patmat/t8068.scala
@@ -0,0 +1,14 @@
+trait K[A] {
+ sealed trait T
+ case class C(x: Int) extends T
+ case object O extends T
+}
+
+object Hello {
+ def f[A](k: K[A])(t: k.T) = {
+ t match {
+ case k.C(x) => ???
+ case k.O => ???
+ }
+ }
+}
diff --git a/tests/patmat/t8178.check b/tests/patmat/t8178.check
new file mode 100644
index 000000000..963845f53
--- /dev/null
+++ b/tests/patmat/t8178.check
@@ -0,0 +1,13 @@
+./tests/patmat/t8178.scala:6: warning: match may not be exhaustive.
+It would fail on the following input: FailsChild2(_)
+ f match {
+ ^
+./tests/patmat/t8178.scala:14: warning: match may not be exhaustive.
+It would fail on the following input: VarArgs1(_)
+ f match {
+ ^
+./tests/patmat/t8178.scala:27: warning: match may not be exhaustive.
+It would fail on the following input: SeqArgs2(_)
+ f match {
+ ^
+three warnings found \ No newline at end of file
diff --git a/tests/patmat/t8178.scala b/tests/patmat/t8178.scala
new file mode 100644
index 000000000..4fb39955b
--- /dev/null
+++ b/tests/patmat/t8178.scala
@@ -0,0 +1,33 @@
+sealed trait Fails
+case class VarArgs1(a: String*) extends Fails
+case class FailsChild2(a: Seq[String]) extends Fails
+object FailsTest {
+ def matchOnVarArgsFirstFails(f: Fails) = {
+ f match {
+ case VarArgs1(_) => ???
+ // BUG: Without this line we should get a non-exhaustive match compiler error.
+ //case FailsChild2(_) => ???
+ }
+ }
+
+ def matchOnSeqArgsFirstWorks(f: Fails) = {
+ f match {
+ case FailsChild2(_) => ???
+ // Without this line, the compiler reports a "match may not be exhaustive" error as expected.
+ // case VarArgs1(_) => ???
+ }
+ }
+}
+
+sealed trait Works
+case class SeqArgs1(a: Seq[String]) extends Works
+case class SeqArgs2(a: Seq[String]) extends Works
+object WorksTest {
+ def matcher(f: Works) = {
+ f match {
+ case SeqArgs1(_) => ???
+ // Without this line, the compiler reports a "match may not be exhaustive" error as expected.
+ // case SeqArgs2(_) => ???
+ }
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t8412.check b/tests/patmat/t8412.check
new file mode 100644
index 000000000..b82b33999
--- /dev/null
+++ b/tests/patmat/t8412.check
@@ -0,0 +1,5 @@
+./tests/patmat/t8412.scala:7: warning: match may not be exhaustive.
+It would fail on the following input: Lit(_)
+ tree match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t8412.scala b/tests/patmat/t8412.scala
new file mode 100644
index 000000000..f4b2b6090
--- /dev/null
+++ b/tests/patmat/t8412.scala
@@ -0,0 +1,14 @@
+sealed trait Tree
+case class Let(sth: List[Any]) extends Tree
+case class Lit(sth: Any) extends Tree
+
+object Test {
+ def wroong(tree: Tree) =
+ tree match {
+ case Let(_ :: rest) =>
+ ???
+ case Let(Nil) =>
+ ???
+ // no warning for missing Lit(_) in 2.10
+ }
+}
diff --git a/tests/patmat/t8430.check b/tests/patmat/t8430.check
new file mode 100644
index 000000000..4493062bf
--- /dev/null
+++ b/tests/patmat/t8430.check
@@ -0,0 +1,5 @@
+./tests/patmat/t8430.scala:15: warning: match may not be exhaustive.
+It would fail on the following input: LetF, LetC, LetP, LetL(UnitLit), LetL(BooleanLit), LetL(IntLit)
+ def transform(tree: Tree) : Any = tree match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t8430.scala b/tests/patmat/t8430.scala
new file mode 100644
index 000000000..ccd4585d9
--- /dev/null
+++ b/tests/patmat/t8430.scala
@@ -0,0 +1,19 @@
+sealed trait CL3Literal
+case object IntLit extends CL3Literal
+case object CharLit extends CL3Literal
+case object BooleanLit extends CL3Literal
+case object UnitLit extends CL3Literal
+
+
+sealed trait Tree
+case class LetL(value: CL3Literal) extends Tree
+case object LetP extends Tree
+case object LetC extends Tree
+case object LetF extends Tree
+
+object Test {
+ def transform(tree: Tree) : Any = tree match {
+ case LetL(CharLit) =>
+ ???
+ }
+}
diff --git a/tests/patmat/t8511.check b/tests/patmat/t8511.check
new file mode 100644
index 000000000..df07d019a
--- /dev/null
+++ b/tests/patmat/t8511.check
@@ -0,0 +1,5 @@
+./tests/patmat/t8511.scala:18: warning: match may not be exhaustive.
+It would fail on the following input: Baz(), Bar(_)
+ private def logic(head: Expr): String = head match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t8511.scala b/tests/patmat/t8511.scala
new file mode 100644
index 000000000..bc7f64713
--- /dev/null
+++ b/tests/patmat/t8511.scala
@@ -0,0 +1,25 @@
+sealed trait Expr
+final case class Foo(other: Option[String]) extends Expr
+final case class Bar(someConstant: String) extends Expr
+final case class Baz() extends Expr
+final case class EatsExhaustiveWarning(other: Reference) extends Expr
+
+sealed trait Reference {
+ val value: String
+}
+
+object Reference {
+ def unapply(reference: Reference): Option[(String)] = {
+ Some(reference.value)
+ }
+}
+
+object EntryPoint {
+ private def logic(head: Expr): String = head match {
+ case Foo(_) =>
+ ???
+ // Commenting this line only causes the exhaustive search warning to be emitted
+ case EatsExhaustiveWarning(Reference(text)) =>
+ ???
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t8546.scala b/tests/patmat/t8546.scala
new file mode 100644
index 000000000..c39d749b4
--- /dev/null
+++ b/tests/patmat/t8546.scala
@@ -0,0 +1,49 @@
+package test
+
+class F1() {
+ private sealed abstract class T
+ private case class A(m: Int) extends T
+ private case class B() extends T
+ private case object C extends T
+
+ // No warnings here
+ private def foo(t: T) = t match {
+ case A(m) => println("A:" + m)
+ case B() => println("B")
+ case C => println("C")
+ }
+
+ def test(m: Int): Unit = {
+ foo(A(m))
+ foo(B())
+ foo(C)
+ }
+}
+
+class F2[M]() {
+ private sealed abstract class T
+ private case class A(m: M) extends T
+ private case class B() extends T
+ private case object C extends T
+
+ // match may not be exhaustive. It would fail on the following input: C
+ private def foo(t: T) = t match {
+ case A(m) => println("A:" + m)
+ case B() => println("B")
+ case C => println("C")
+ }
+
+ def test(m: M): Unit = {
+ foo(A(m))
+ foo(B())
+ foo(C)
+ }
+
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ new F1().test(1)
+ new F2[Int]().test(1)
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t8606.scala b/tests/patmat/t8606.scala
new file mode 100644
index 000000000..9388c9f02
--- /dev/null
+++ b/tests/patmat/t8606.scala
@@ -0,0 +1,18 @@
+class Cl[T] {
+
+ sealed trait A {
+ def foo = this match {
+ case AObj => 0
+ case BObj => 0
+ case ACls(x) => 0
+ case BCls(x) => 0
+ }
+ }
+
+ case object AObj extends A
+ case class ACls(x: Int) extends A
+
+ sealed trait B extends A
+ case object BObj extends B
+ case class BCls(x: Int) extends B
+}
diff --git a/tests/patmat/t8700a/Bar.scala b/tests/patmat/t8700a/Bar.scala
new file mode 100644
index 000000000..33ad8e987
--- /dev/null
+++ b/tests/patmat/t8700a/Bar.scala
@@ -0,0 +1,9 @@
+object Bar {
+ def bar1(foo: Foo) = foo match {
+ case Foo.A => 1
+ }
+
+ def bar2(foo: Baz) = foo match {
+ case Baz.A => 1
+ }
+}
diff --git a/tests/patmat/t8700a/Baz.java b/tests/patmat/t8700a/Baz.java
new file mode 100644
index 000000000..49f15e121
--- /dev/null
+++ b/tests/patmat/t8700a/Baz.java
@@ -0,0 +1,11 @@
+public enum Baz {
+ A {
+ public void baz1() {}
+ },
+ B {
+ public void baz1() {}
+ };
+
+ public abstract void baz1();
+ public void baz2() {}
+}
diff --git a/tests/patmat/t8700a/Foo.java b/tests/patmat/t8700a/Foo.java
new file mode 100644
index 000000000..cc8e9daf1
--- /dev/null
+++ b/tests/patmat/t8700a/Foo.java
@@ -0,0 +1,4 @@
+public enum Foo {
+ A,
+ B
+}
diff --git a/tests/patmat/t8700a/expected.check b/tests/patmat/t8700a/expected.check
new file mode 100644
index 000000000..83f1c5a9e
--- /dev/null
+++ b/tests/patmat/t8700a/expected.check
@@ -0,0 +1,9 @@
+./tests/patmat/t8700a/Bar.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: B
+ def bar1(foo: Foo) = foo match {
+ ^
+./tests/patmat/t8700a/Bar.scala:6: warning: match may not be exhaustive.
+It would fail on the following input: B
+ def bar2(foo: Baz) = foo match {
+ ^
+two warnings found
diff --git a/tests/patmat/t9129.check b/tests/patmat/t9129.check
new file mode 100644
index 000000000..aa722a61a
--- /dev/null
+++ b/tests/patmat/t9129.check
@@ -0,0 +1,5 @@
+./tests/patmat/t9129.scala:21: warning: match may not be exhaustive.
+It would fail on the following input: Two(B2, A2), Two(_, A2)
+ def foo(c: C): Unit = c match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t9129.scala b/tests/patmat/t9129.scala
new file mode 100644
index 000000000..89f08f0ac
--- /dev/null
+++ b/tests/patmat/t9129.scala
@@ -0,0 +1,29 @@
+object Test {
+
+ sealed abstract class A
+
+ case object A1 extends A
+
+ case object A2 extends A
+
+ sealed abstract class B
+
+ case object B1 extends B
+
+ case object B2 extends B
+
+ sealed abstract class C
+
+ final case class One(a: A, b: B) extends C
+
+ final case class Two(b: B, a: A) extends C
+
+ def foo(c: C): Unit = c match {
+ case One(A1, B1) =>
+ case One(A2, B1) =>
+ case One(A1, B2) =>
+ case One(A2, B2) =>
+ case Two(B1, A1) =>
+ case Two(B2, A1) =>
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t9232.check b/tests/patmat/t9232.check
new file mode 100644
index 000000000..c3957c0ff
--- /dev/null
+++ b/tests/patmat/t9232.check
@@ -0,0 +1,5 @@
+./tests/patmat/t9232.scala:13: warning: match may not be exhaustive.
+It would fail on the following input: Node2()
+ def transformTree(tree: Tree): Any = tree match {
+ ^
+one warning found
diff --git a/tests/patmat/t9232.scala b/tests/patmat/t9232.scala
new file mode 100644
index 000000000..975ec58db
--- /dev/null
+++ b/tests/patmat/t9232.scala
@@ -0,0 +1,16 @@
+final class Foo(val value: Int)
+
+object Foo {
+ def unapplySeq(foo: Foo): Some[Seq[Int]] = Some(List(foo.value))
+ // def unapply(foo: Foo): Some[Int] = Some(foo.value)
+}
+
+sealed trait Tree
+case class Node1(foo: Foo) extends Tree
+case class Node2() extends Tree
+
+object Test {
+ def transformTree(tree: Tree): Any = tree match {
+ case Node1(Foo(1)) => ???
+ }
+}
diff --git a/tests/patmat/t9289.check b/tests/patmat/t9289.check
new file mode 100644
index 000000000..5240988e2
--- /dev/null
+++ b/tests/patmat/t9289.check
@@ -0,0 +1,9 @@
+./tests/patmat/t9289.scala:9: warning: match may not be exhaustive.
+It would fail on the following input: module.LetR()
+ def patmat(tree: module.Tree) = tree match {
+ ^
+./tests/patmat/t9289.scala:20: warning: match may not be exhaustive.
+It would fail on the following input: module.LetR()
+ def patmat(tree: module.Tree) = tree match {
+ ^
+two warnings found \ No newline at end of file
diff --git a/tests/patmat/t9289.scala b/tests/patmat/t9289.scala
new file mode 100644
index 000000000..714a4a0e3
--- /dev/null
+++ b/tests/patmat/t9289.scala
@@ -0,0 +1,28 @@
+trait Module {
+ sealed trait Tree
+
+ case class LetL() extends Tree
+ case class LetR() extends Tree
+}
+
+class Patmat[T <: Module](val module: T) {
+ def patmat(tree: module.Tree) = tree match {
+ case module.LetL() => ???
+ }
+
+ def exhaust(tree: module.Tree) = tree match {
+ case module.LetL() => ???
+ case module.LetR() => ???
+ }
+}
+
+class Patmat2(val module: Module) {
+ def patmat(tree: module.Tree) = tree match {
+ case module.LetL() => ???
+ }
+
+ def exhaust(tree: module.Tree) = tree match {
+ case module.LetL() => ???
+ case module.LetR() => ???
+ }
+}
diff --git a/tests/patmat/t9351.check b/tests/patmat/t9351.check
new file mode 100644
index 000000000..03b94c2c0
--- /dev/null
+++ b/tests/patmat/t9351.check
@@ -0,0 +1,13 @@
+./tests/patmat/t9351.scala:8: warning: match may not be exhaustive.
+It would fail on the following input: _: A
+ a match {
+ ^
+./tests/patmat/t9351.scala:17: warning: match may not be exhaustive.
+It would fail on the following input: (_, _), (_, None), (_, Some(_))
+ (a, o) match {
+ ^
+./tests/patmat/t9351.scala:28: warning: match may not be exhaustive.
+It would fail on the following input: (_, _)
+ (a, b) match {
+ ^
+three warnings found \ No newline at end of file
diff --git a/tests/patmat/t9351.scala b/tests/patmat/t9351.scala
new file mode 100644
index 000000000..9b9bd4312
--- /dev/null
+++ b/tests/patmat/t9351.scala
@@ -0,0 +1,35 @@
+trait A {}
+case object B extends A {}
+case object C extends A {}
+
+class X {
+ def good = {
+ val a: A = B
+ a match {
+ case B =>
+ case C =>
+ }
+ }
+
+ def bad = {
+ val a: A = B
+ val o: Option[Int] = None
+ (a, o) match {
+ case (B, None) =>
+ case (B, Some(_)) =>
+ case (C, None) =>
+ case (C, Some(_)) =>
+ }
+ }
+
+ def alsoGood = {
+ val a: A = B
+ val b: A = C
+ (a, b) match {
+ case (B, B) =>
+ case (B, C) =>
+ case (C, B) =>
+ case (C, C) =>
+ }
+ }
+}
diff --git a/tests/patmat/t9398.check b/tests/patmat/t9398.check
new file mode 100644
index 000000000..0efbf231d
--- /dev/null
+++ b/tests/patmat/t9398.check
@@ -0,0 +1,5 @@
+./tests/patmat/t9398.scala:11: warning: match may not be exhaustive.
+It would fail on the following input: CC(_, B2)
+ case CC(_, B) => ()
+ ^
+one warning found
diff --git a/tests/patmat/t9398.scala b/tests/patmat/t9398.scala
new file mode 100644
index 000000000..6d4d6bd3b
--- /dev/null
+++ b/tests/patmat/t9398.scala
@@ -0,0 +1,13 @@
+sealed abstract class TA
+sealed abstract class TB extends TA
+case object B extends TB
+case object B2 extends TB
+
+case class CC(i: Int, tb: TB)
+
+object Test {
+ // Should warn that CC(_, B2) isn't matched
+ def foo: CC => Unit = {
+ case CC(_, B) => ()
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t9399.scala b/tests/patmat/t9399.scala
new file mode 100644
index 000000000..89dbedd96
--- /dev/null
+++ b/tests/patmat/t9399.scala
@@ -0,0 +1,16 @@
+sealed abstract class TA
+sealed abstract class TB extends TA
+case object A extends TA
+case object B extends TB
+
+sealed trait C
+case class CTA(id: Int, da: TA) extends C
+case class CTB(id: Int, da: TB) extends C
+
+object Test {
+ val test: C => Unit = {
+ case CTA(_, A) =>
+ case CTA(_, B) =>
+ case CTB(_, B) =>
+ }
+}
diff --git a/tests/patmat/t9411a.scala b/tests/patmat/t9411a.scala
new file mode 100644
index 000000000..d5264663e
--- /dev/null
+++ b/tests/patmat/t9411a.scala
@@ -0,0 +1,27 @@
+object OhNoes {
+
+ sealed trait F
+ sealed abstract class FA extends F
+ sealed abstract class FB extends F
+
+ case object FA1 extends FA
+ case object FB1 extends FB
+ case object FB2 extends FB
+
+ sealed trait G
+ case object G1 extends G
+ case object G2 extends G
+
+ sealed trait H
+ case class H1(a: FB, b: G) extends H
+ case class H2(a: F) extends H
+
+ val demo: H => Unit = {
+ case H1(FB1, G1) =>
+ case H1(FB2, G2) =>
+ case H2(_: FB) =>
+ case H2(_: FA) =>
+ case H1(FB1, G2) =>
+ case H1(FB2, G1) =>
+ }
+}
diff --git a/tests/patmat/t9411b.scala b/tests/patmat/t9411b.scala
new file mode 100644
index 000000000..6888ba938
--- /dev/null
+++ b/tests/patmat/t9411b.scala
@@ -0,0 +1,36 @@
+object OhNoes {
+
+ sealed trait F
+ sealed abstract class FA extends F
+ sealed abstract class FB extends F
+
+ case object FA1 extends FA
+ case object FB1 extends FB
+ case object FB2 extends FB
+
+ sealed trait G
+ case object G1 extends G
+ case object G2 extends G
+
+ sealed trait H
+ case class H1(a: FB, b: G) extends H
+ case class H2(b: F) extends H
+
+ val demo: H => Unit = {
+ case H1(FB1, G1) =>
+ case H1(FB2, G2) =>
+ case H2(_: FB) =>
+ case H2(_: FA) =>
+ case H1(FB1, G2) =>
+ case H1(FB2, G1) =>
+ }
+
+ val demo2: H => Unit = {
+ case H2(_: FA) =>
+ case H2(_: FB) =>
+ case H1(FB1, G1) =>
+ case H1(FB2, G1) =>
+ case H1(FB1, G2) =>
+ case H1(FB2, G2) =>
+ }
+}
diff --git a/tests/patmat/t9573.check b/tests/patmat/t9573.check
new file mode 100644
index 000000000..4ec379161
--- /dev/null
+++ b/tests/patmat/t9573.check
@@ -0,0 +1,5 @@
+./tests/patmat/t9573.scala:9: warning: match may not be exhaustive.
+It would fail on the following input: Horse(_)
+ x match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t9573.scala b/tests/patmat/t9573.scala
new file mode 100644
index 000000000..2a32c2599
--- /dev/null
+++ b/tests/patmat/t9573.scala
@@ -0,0 +1,13 @@
+class Foo {
+
+ def foo = {
+ abstract sealed class Animal
+ case class Goat(age: Int) extends Animal
+ case class Horse(age: Int) extends Animal
+
+ val x: Animal = Goat(1)
+ x match {
+ case Goat(_) => println("a goat")
+ }
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t9630.scala b/tests/patmat/t9630.scala
new file mode 100644
index 000000000..c846faa99
--- /dev/null
+++ b/tests/patmat/t9630.scala
@@ -0,0 +1,21 @@
+sealed trait OpError
+sealed trait RequestErrorType
+sealed trait ProcessingErrorType
+
+final case class InvalidEndpoint(reason: String) extends RequestErrorType
+final case class InvalidParameters(reason: String) extends RequestErrorType
+
+final case class InvalidFormat(response: String) extends ProcessingErrorType
+final case class EntityNotFound(id: Long) extends ProcessingErrorType
+
+final case class RequestError(errorType: RequestErrorType) extends OpError
+final case class ProcessingError(errorType: ProcessingErrorType) extends OpError
+
+object Test{
+ def printMatches(error: OpError): Unit = error match {
+ case RequestError(InvalidEndpoint(reason)) => //print something
+ case RequestError(InvalidParameters(reason)) => //print something
+ case ProcessingError(InvalidFormat(format)) => //print something
+ case ProcessingError(EntityNotFound(entityId)) => //print something
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t9657.check b/tests/patmat/t9657.check
new file mode 100644
index 000000000..d3e2ec73f
--- /dev/null
+++ b/tests/patmat/t9657.check
@@ -0,0 +1,17 @@
+./tests/patmat/t9657.scala:29: warning: match may not be exhaustive.
+It would fail on the following input: Bus(_)
+ def refuel2[P <: Petrol.type](vehicle: Vehicle {type A = P} ): Vehicle = vehicle match {
+ ^
+./tests/patmat/t9657.scala:38: warning: match may not be exhaustive.
+It would fail on the following input: Bus(_)
+ def foo2(vehicle: Vehicle {type A <: Petrol.type} ): Vehicle = vehicle match {
+ ^
+./tests/patmat/t9657.scala:49: warning: match may not be exhaustive.
+It would fail on the following input: Bus(_)
+ def bar2(vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match {
+ ^
+./tests/patmat/t9657.scala:58: warning: match may not be exhaustive.
+It would fail on the following input: Bus(_)
+ def qux2[P <: Petrol.type](vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match {
+ ^
+four warnings found \ No newline at end of file
diff --git a/tests/patmat/t9657.scala b/tests/patmat/t9657.scala
new file mode 100644
index 000000000..f9769574e
--- /dev/null
+++ b/tests/patmat/t9657.scala
@@ -0,0 +1,62 @@
+sealed trait PowerSource
+
+case object Petrol extends PowerSource
+
+case object Pedal extends PowerSource
+
+sealed abstract class Vehicle {
+ type A <: PowerSource
+}
+
+case object Bicycle extends Vehicle {
+ type A = Pedal.type
+}
+
+case class Bus(fuel: Int) extends Vehicle {
+ type A = Petrol.type
+}
+
+case class Car(fuel: Int) extends Vehicle {
+ type A = Petrol.type
+}
+
+class Test {
+ def refuel[P <: Petrol.type](vehicle: Vehicle {type A = P} ): Vehicle = vehicle match {
+ case Car(_) => Car(100)
+ case Bus(_) => Bus(100)
+ }
+
+ def refuel2[P <: Petrol.type](vehicle: Vehicle {type A = P} ): Vehicle = vehicle match {
+ case Car(_) => Car(100)
+ }
+
+ def foo1(vehicle: Vehicle {type A <: Petrol.type} ): Vehicle = vehicle match {
+ case Car(_) => Car(100)
+ case Bus(_) => Bus(100)
+ }
+
+ def foo2(vehicle: Vehicle {type A <: Petrol.type} ): Vehicle = vehicle match {
+ case Car(_) => Car(100)
+ }
+
+ type P = Petrol.type
+
+ def bar1(vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match {
+ case Car(_) => Car(100)
+ case Bus(_) => Bus(100)
+ }
+
+ def bar2(vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match {
+ case Car(_) => Car(100)
+ }
+
+ def qux1[P <: Petrol.type](vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match {
+ case Car(_) => Car(100)
+ case Bus(_) => Bus(100)
+ }
+
+ def qux2[P <: Petrol.type](vehicle: Vehicle {type A <: P} ): Vehicle = vehicle match {
+ case Car(_) => Car(100)
+ }
+
+}
diff --git a/tests/patmat/t9672.check b/tests/patmat/t9672.check
new file mode 100644
index 000000000..3284d1df1
--- /dev/null
+++ b/tests/patmat/t9672.check
@@ -0,0 +1,5 @@
+./tests/patmat/t9672.scala:22: warning: match may not be exhaustive.
+It would fail on the following input: SimpleExpr.IntExpr(_)
+ def func(expr: Expr) = expr match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t9672.scala b/tests/patmat/t9672.scala
new file mode 100644
index 000000000..fe068f3d5
--- /dev/null
+++ b/tests/patmat/t9672.scala
@@ -0,0 +1,28 @@
+trait Hierarchy {
+ sealed trait Expr
+}
+trait If {
+ this: Hierarchy =>
+ case class If(cond: Expr, yes: Expr, no: Expr) extends Expr
+}
+trait Word {
+ this: Hierarchy =>
+ case class Word(name: String) extends Expr
+}
+trait IntExpr {
+ this: Hierarchy =>
+ case class IntExpr(value : Int) extends Expr
+}
+
+object SimpleExpr extends Hierarchy with If with Word with IntExpr
+//object OtherExpr extends Hierarchy with If with IntExpr
+
+object Demo extends App {
+ import SimpleExpr._
+ def func(expr: Expr) = expr match {
+ case If(cond, yes, no) => cond
+ case Word(name) => name
+ // compiler should emit warning "missing case statement"
+ // emits the wrong warning "unreachable code"
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t9677.check b/tests/patmat/t9677.check
new file mode 100644
index 000000000..f1e1817cb
--- /dev/null
+++ b/tests/patmat/t9677.check
@@ -0,0 +1,4 @@
+./tests/patmat/t9677.scala:20: warning: unreachable code
+ case path: A => println("Not root")
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t9677.scala b/tests/patmat/t9677.scala
new file mode 100644
index 000000000..1e9b1df5e
--- /dev/null
+++ b/tests/patmat/t9677.scala
@@ -0,0 +1,23 @@
+sealed abstract class Base
+
+sealed trait A extends Base
+
+object A {
+
+ case object Root extends Base
+
+ def apply(param: String): A = {
+ new A {}
+ }
+}
+
+object ExhaustiveMatchWarning {
+
+ def test: Unit = {
+ val b: Base = A("blabla")
+ b match {
+ case A.Root => println("Root")
+ case path: A => println("Not root")
+ }
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/t9779.check b/tests/patmat/t9779.check
new file mode 100644
index 000000000..0e0d8d5f4
--- /dev/null
+++ b/tests/patmat/t9779.check
@@ -0,0 +1,5 @@
+./tests/patmat/t9779.scala:10: warning: match may not be exhaustive.
+It would fail on the following input: _: a.Elem
+ private def toLuaValue(eX: a.Elem[_]): String = eX match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/t9779.scala b/tests/patmat/t9779.scala
new file mode 100644
index 000000000..9c418b0b1
--- /dev/null
+++ b/tests/patmat/t9779.scala
@@ -0,0 +1,13 @@
+trait Elems {
+ sealed class Elem[A] extends Dummy
+
+ val UnitElement: Elem[Unit]
+
+ trait Dummy
+}
+
+class BadMatch[A <: Elems](a: A) {
+ private def toLuaValue(eX: a.Elem[_]): String = eX match {
+ case a.UnitElement => "" // type mismatch
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/try.scala b/tests/patmat/try.scala
new file mode 100644
index 000000000..d7df24ee0
--- /dev/null
+++ b/tests/patmat/try.scala
@@ -0,0 +1,5 @@
+object Test {
+ try 2/0 catch {
+ case e: Exception =>
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/tuple.scala b/tests/patmat/tuple.scala
new file mode 100644
index 000000000..f33a5cfec
--- /dev/null
+++ b/tests/patmat/tuple.scala
@@ -0,0 +1,5 @@
+object Test {
+ (4, (4, 6)) match {
+ case (x, (y, z)) => true
+ }
+} \ No newline at end of file
diff --git a/tests/patmat/virtpatmat_apply.check b/tests/patmat/virtpatmat_apply.check
new file mode 100644
index 000000000..d10d82165
--- /dev/null
+++ b/tests/patmat/virtpatmat_apply.check
@@ -0,0 +1,5 @@
+./tests/patmat/virtpatmat_apply.scala:2: warning: match may not be exhaustive.
+It would fail on the following input: List(_)
+ List(1, 2, 3) match {
+ ^
+one warning found \ No newline at end of file
diff --git a/tests/patmat/virtpatmat_apply.scala b/tests/patmat/virtpatmat_apply.scala
new file mode 100644
index 000000000..646d15f90
--- /dev/null
+++ b/tests/patmat/virtpatmat_apply.scala
@@ -0,0 +1,7 @@
+object Test {
+ List(1, 2, 3) match {
+ case Nil => println("FAIL")
+ case x :: y :: xs if xs.length == 2 => println("FAIL")
+ case x :: y :: xs if xs.length == 1 => println("OK "+ y)
+ }
+}
diff --git a/tests/patmat/virtpatmat_exhaust_compound.check b/tests/patmat/virtpatmat_exhaust_compound.check
new file mode 100644
index 000000000..72e034068
--- /dev/null
+++ b/tests/patmat/virtpatmat_exhaust_compound.check
@@ -0,0 +1,15 @@
+virtpatmat_exhaust_compound.scala:14: warning: match may not be exhaustive.
+It would fail on the following inputs: O1, O2, O4
+ a match {
+ ^
+virtpatmat_exhaust_compound.scala:18: warning: match may not be exhaustive.
+It would fail on the following input: O4
+ def t1(a: Product with Base with Base2) = a match {
+ ^
+virtpatmat_exhaust_compound.scala:22: warning: match may not be exhaustive.
+It would fail on the following input: O2
+ def t2(a: Product with Base { def foo: Int }) = a match {
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/tests/patmat/virtpatmat_exhaust_compound.scala.ignore b/tests/patmat/virtpatmat_exhaust_compound.scala.ignore
new file mode 100644
index 000000000..4ff04dd06
--- /dev/null
+++ b/tests/patmat/virtpatmat_exhaust_compound.scala.ignore
@@ -0,0 +1,29 @@
+sealed trait Base
+case object O1 extends Base
+case object O2 extends Base {
+ def foo: Int = 0
+}
+
+sealed trait Base2
+case object O3 extends Base2
+
+case object O4 extends Base with Base2
+
+object Test {
+ val a /*: Product with Serializable with Base */ = if (true) O1 else O2
+ a match {
+ case null =>
+ }
+
+ def t1(a: Product with Base with Base2) = a match {
+ case null => // O1..O3 should *not* be possible here
+ }
+
+ def t2(a: Product with Base { def foo: Int }) = a match {
+ case null => // O2 in the domain
+ }
+
+ def t3(a: Product with Base { def bar: Int }) = a match {
+ case null => // nothing in the domain
+ }
+}
diff --git a/tests/patmat/virtpatmat_reach_sealed_unsealed.check b/tests/patmat/virtpatmat_reach_sealed_unsealed.check
new file mode 100644
index 000000000..ef5ec1a00
--- /dev/null
+++ b/tests/patmat/virtpatmat_reach_sealed_unsealed.check
@@ -0,0 +1,11 @@
+./tests/patmat/virtpatmat_reach_sealed_unsealed.scala:16: warning: match may not be exhaustive.
+It would fail on the following input: false
+ (true: Boolean) match { case true => } // not exhaustive, but reachable
+ ^
+./tests/patmat/virtpatmat_reach_sealed_unsealed.scala:18: warning: unreachable code
+ (true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable
+ ^
+./tests/patmat/virtpatmat_reach_sealed_unsealed.scala:19: warning: unreachable code
+ (true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable
+ ^
+three warnings found
diff --git a/tests/patmat/virtpatmat_reach_sealed_unsealed.scala b/tests/patmat/virtpatmat_reach_sealed_unsealed.scala
new file mode 100644
index 000000000..13911dbd7
--- /dev/null
+++ b/tests/patmat/virtpatmat_reach_sealed_unsealed.scala
@@ -0,0 +1,21 @@
+sealed abstract class X
+sealed case class A(x: Int) extends X
+
+// test reachability on mixed sealed / non-sealed matches
+object Test extends App {
+ val B: X = A(0)
+ val C: X = A(1)
+
+ // all cases are reachable and the match is exhaustive
+ (C: X) match {
+ case B =>
+ case C =>
+ case A(_) =>
+ }
+
+ (true: Boolean) match { case true => } // not exhaustive, but reachable
+ (true: Boolean) match { case true => case false => } // exhaustive, reachable
+ (true: Boolean) match { case true => case false => case _ => } // exhaustive, last case is unreachable
+ (true: Boolean) match { case true => case false => case _: Boolean => } // exhaustive, last case is unreachable
+ (true: Boolean) match { case true => case false => case _: Any => } // exhaustive, last case is unreachable
+} \ No newline at end of file
diff --git a/tests/pending/neg/i533/Compat.scala b/tests/pending/neg/i533/Compat.scala
new file mode 100644
index 000000000..16613cf5b
--- /dev/null
+++ b/tests/pending/neg/i533/Compat.scala
@@ -0,0 +1,7 @@
+object Compat {
+ def main(args: Array[String]): Unit = {
+ val x = new Array[Int](1)
+ x(0) = 10
+ println(JA.get(x))
+ }
+}
diff --git a/tests/pending/neg/i533/JA.java b/tests/pending/neg/i533/JA.java
new file mode 100644
index 000000000..92421e5b1
--- /dev/null
+++ b/tests/pending/neg/i533/JA.java
@@ -0,0 +1,5 @@
+class JA {
+ public static <T> T get(T[] arr) {
+ return arr[0];
+ }
+} \ No newline at end of file
diff --git a/tests/pending/pos/contraImplicits.scala b/tests/pending/pos/contraImplicits.scala
deleted file mode 100644
index c4d659615..000000000
--- a/tests/pending/pos/contraImplicits.scala
+++ /dev/null
@@ -1,18 +0,0 @@
-import scala.reflect._
-// this needs to be fleshed out further
-class Contra[-T]
-
-object Test {
- def getParam[T](c: Contra[T])(implicit ct: ClassTag[T]): Unit = {
- println(ct)
- ct
- }
- def f[T](x: Contra[T]): Contra[T] = x
-
- def main(args: Array[String]): Unit = {
- val x = f(new Contra[Int])
- val y: Contra[Int] = x
- getParam(new Contra[Int])
- }
-}
-
diff --git a/tests/pending/pos/depmet_implicit_norm_ret.scala b/tests/pending/pos/depmet_implicit_norm_ret.scala
index 85be750b4..42bfb9fe1 100644
--- a/tests/pending/pos/depmet_implicit_norm_ret.scala
+++ b/tests/pending/pos/depmet_implicit_norm_ret.scala
@@ -17,6 +17,8 @@ object Test{
}
}
+ import ZipWith._
+
trait ZipWith[S] {
type T
def zipWith : S => T = sys.error("")
@@ -24,6 +26,9 @@ object Test{
// bug: inferred return type = (Stream[A]) => java.lang.Object with Test.ZipWith[B]{type T = Stream[B]}#T
// this seems incompatible with vvvvvvvvvvvvvvvvvvvvvv -- #3731
- def map[A,B](f : A => B) /* : Stream[A] => Stream[B]*/ = ZipWith(f)
- val tst: Stream[Int] = map{x: String => x.length}(Stream("a"))
+ def map1[A,B](f : A => B) = ZipWith(f)(SuccZipWith) // this typechecks but fails in -Ycheck:first
+ val tst1: Stream[Int] = map1[String, Int]{x: String => x.length}.apply(Stream("a"))
+
+ def map2[A,B](f : A => B) = ZipWith(f) // this finds ZeroZipWith where scalac finds SuccZipWith and fails typechecking in the next line.
+ val tst2: Stream[Int] = map2{x: String => x.length}.apply(Stream("a"))
}
diff --git a/tests/pending/pos/depsel.scala b/tests/pending/pos/depsel.scala
deleted file mode 100644
index 2cec4349e..000000000
--- a/tests/pending/pos/depsel.scala
+++ /dev/null
@@ -1,14 +0,0 @@
-// demonstrates selection on non-path types. Needs to be fleshed out to
-// become a real test.
-object Test {
-
- class C {
- type T
- val f: T => T = ???
- }
-
- var x = new C
- val y = x.f
-
-
-}
diff --git a/tests/pending/pos/exponential-spec.scala b/tests/pending/pos/exponential-spec.scala
index 54515c1d2..26c9ab85e 100644
--- a/tests/pending/pos/exponential-spec.scala
+++ b/tests/pending/pos/exponential-spec.scala
@@ -23,7 +23,7 @@ object Test {
compose f[T] // 8s
compose f[T] // 11s
compose f[T] // 17s
- compose f[T] // 29s
+/* compose f[T] // 29s
compose f[T] // 54s
compose f[T]
compose f[T]
@@ -42,6 +42,6 @@ object Test {
compose f[T]
compose f[T]
compose f[T]
- compose f[T]
+ compose f[T]*/
)(exp)
}
diff --git a/tests/pending/pos/generic-sigs.flags b/tests/pending/pos/generic-sigs.flags
deleted file mode 100644
index e8fb65d50..000000000
--- a/tests/pending/pos/generic-sigs.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings \ No newline at end of file
diff --git a/tests/pending/pos/infersingle.flags b/tests/pending/pos/infersingle.flags
deleted file mode 100644
index e1b37447c..000000000
--- a/tests/pending/pos/infersingle.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xexperimental \ No newline at end of file
diff --git a/tests/pending/pos/isApplicableSafe.scala b/tests/pending/pos/isApplicableSafe.scala
deleted file mode 100644
index b4cacbf28..000000000
--- a/tests/pending/pos/isApplicableSafe.scala
+++ /dev/null
@@ -1,8 +0,0 @@
-class A {
- // Any of Array[List[Symbol]], List[Array[Symbol]], or List[List[Symbol]] compile.
- var xs: Array[Array[Symbol]] = _
- var ys: Array[Map[Symbol, Set[Symbol]]] = _
-
- xs = Array(Array())
- ys = Array(Map(), Map())
-}
diff --git a/tests/pending/pos/setter-not-implicit.scala b/tests/pending/pos/setter-not-implicit.scala
deleted file mode 100644
index 9bfffc2ce..000000000
--- a/tests/pending/pos/setter-not-implicit.scala
+++ /dev/null
@@ -1,3 +0,0 @@
-object O {
- implicit var x: Int = 0
-}
diff --git a/tests/pending/pos/stable.scala b/tests/pending/pos/stable.scala
deleted file mode 100644
index 267a36fe5..000000000
--- a/tests/pending/pos/stable.scala
+++ /dev/null
@@ -1,11 +0,0 @@
-trait Base {
- val x: Int;
- val y: Int;
- var z: Int;
-}
-
-class Sub() extends Base {
- val x: Int = 1;
- val y: Int = 2;
- var z: Int = 3;
-}
diff --git a/tests/pending/pos/t1756.scala b/tests/pending/pos/t1756.scala
deleted file mode 100644
index 34bf273ab..000000000
--- a/tests/pending/pos/t1756.scala
+++ /dev/null
@@ -1,59 +0,0 @@
-
-/**
-This is a tricky issue which has to do with the fact that too much conflicting
-type information is propagated into a single implicit search, where the intended
-solution applies two implicit searches.
-
-Roughly, in x + x * y, the first x is first typed as Poly[A]. That
-means the x * y is then typed as Poly[A]. Then the second x is typed
-as Poly[A], then y is typed as Poly[Poly[A]]. The application x * y
-fails, so the coef2poly implicit conversion is applied to x. That
-means we look for an implicit conversion from type Poly[A] to type
-?{val *(x$1: ?>: Poly[Poly[A]] <: Any): Poly[A]}. Note that the result
-type Poly[A] is propagated into the implicit search. Poly[A] comes as
-expected type from x+, because the lhs x is still typed as a Poly[A].
-This means that the argument of the implicit conversion is typechecked
-with expected type A with Poly[A]. And no solution is found.
-
-To solve this, I added a fallback scheme similar to implicit arguments:
-When an implicit view that adds a method matching given arguments and result
-type fails, try again without the result type.
-
-However, troubles are not yet over. We now get an oprhan poly param C when pickling
-and, if typr printer and -Ylog:front is on, an infinite type of the form
-
- mu x. Ring[LazyRef(x) & A]
-*/
-trait Ring[T <: Ring[T]] {
- def +(that: T): T
- def *(that: T): T
-}
-
-class A extends Ring[A] {
- def +(that: A) = new A
- def *(that: A) = new A
-}
-
-class Poly[C <: Ring[C]](val c: C) extends Ring[Poly[C]] {
- def +(that: Poly[C]) = new Poly(this.c + that.c)
- def *(that: Poly[C]) = new Poly(this.c*that.c)
-}
-
-object Test extends App {
-
- implicit def coef2poly[C <: Ring[C]](c: C): Poly[C] = new Poly(c)
-
- val a = new A
- val x = new Poly(new A)
-
- println(x + a) // works
- println(a + x) // works
-
- val y = new Poly(new Poly(new A))
-
- println(x + y*x) // works
- println(x*y + x) // works
- println(y*x + x) // works
-
- println(x + x*y) // failed before
-}
diff --git a/tests/pending/pos/t3631.scala b/tests/pending/pos/t3631.scala
index e72374130..207e28cd7 100644
--- a/tests/pending/pos/t3631.scala
+++ b/tests/pending/pos/t3631.scala
@@ -1,3 +1,4 @@
+// fails Ycheck
case class X22(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int) { }
case class X23(x1: Int, x2: Int, x3: Int, x4: Int, x5: Int, x6: Int, x7: Int, x8: Int, x9: Int, x10: Int, x11: Int, x12: Int, x13: Int, x14: Int, x15: Int, x16: Int, x17: Int, x18: Int, x19: Int, x20: Int, x21: Int, x22: Int, x23: Int) { }
diff --git a/tests/pending/pos/t3960.flags b/tests/pending/pos/t3960.flags
deleted file mode 100644
index 4449dbbdf..000000000
--- a/tests/pending/pos/t3960.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ycheck:typer \ No newline at end of file
diff --git a/tests/pending/pos/t4188.scala b/tests/pending/pos/t4188.scala
index 40e7d4924..104473fc5 100644
--- a/tests/pending/pos/t4188.scala
+++ b/tests/pending/pos/t4188.scala
@@ -1,3 +1,4 @@
+// Fails Ycheck
class A {
object Ding
class B {
diff --git a/tests/pending/pos/t4579.flags b/tests/pending/pos/t4579.flags
deleted file mode 100644
index 1182725e8..000000000
--- a/tests/pending/pos/t4579.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimize \ No newline at end of file
diff --git a/tests/pending/pos/t4911.flags b/tests/pending/pos/t4911.flags
deleted file mode 100644
index 779916d58..000000000
--- a/tests/pending/pos/t4911.flags
+++ /dev/null
@@ -1 +0,0 @@
--unchecked -Xfatal-warnings \ No newline at end of file
diff --git a/tests/pending/pos/t5029.flags b/tests/pending/pos/t5029.flags
deleted file mode 100644
index e8fb65d50..000000000
--- a/tests/pending/pos/t5029.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings \ No newline at end of file
diff --git a/tests/pending/pos/t5899.flags b/tests/pending/pos/t5899.flags
deleted file mode 100644
index e8fb65d50..000000000
--- a/tests/pending/pos/t5899.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings \ No newline at end of file
diff --git a/tests/pending/pos/t5932.flags b/tests/pending/pos/t5932.flags
deleted file mode 100644
index e8fb65d50..000000000
--- a/tests/pending/pos/t5932.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings \ No newline at end of file
diff --git a/tests/pending/pos/t6123-explaintypes-implicits.flags b/tests/pending/pos/t6123-explaintypes-implicits.flags
deleted file mode 100644
index b36707c7c..000000000
--- a/tests/pending/pos/t6123-explaintypes-implicits.flags
+++ /dev/null
@@ -1 +0,0 @@
--explaintypes
diff --git a/tests/pending/pos/t6994.flags b/tests/pending/pos/t6994.flags
deleted file mode 100644
index e8fb65d50..000000000
--- a/tests/pending/pos/t6994.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings \ No newline at end of file
diff --git a/tests/pending/pos/t7011.flags b/tests/pending/pos/t7011.flags
deleted file mode 100644
index a4c161553..000000000
--- a/tests/pending/pos/t7011.flags
+++ /dev/null
@@ -1 +0,0 @@
--Ydebug -Xfatal-warnings \ No newline at end of file
diff --git a/tests/pending/pos/t7285a.flags b/tests/pending/pos/t7285a.flags
deleted file mode 100644
index e8fb65d50..000000000
--- a/tests/pending/pos/t7285a.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xfatal-warnings \ No newline at end of file
diff --git a/tests/pending/pos/t7296.scala b/tests/pending/pos/t7296.scala
index 0c078d365..fcba17c08 100644
--- a/tests/pending/pos/t7296.scala
+++ b/tests/pending/pos/t7296.scala
@@ -1,3 +1,4 @@
+// Fails Ycheck
object Test {
type A = Int
// Emits the implementation restriction but then proceeds to crash
diff --git a/tests/pending/pos/t762.scala b/tests/pending/pos/t762.scala
deleted file mode 100644
index 76860272e..000000000
--- a/tests/pending/pos/t762.scala
+++ /dev/null
@@ -1,2 +0,0 @@
-trait Foo { type T }
-trait Bar extends Foo { val x : Foo { type T <: Bar.this.T } = this : this.type }
diff --git a/tests/pending/pos/t8187.check b/tests/pending/pos/t8187.check
deleted file mode 100644
index e69de29bb..000000000
--- a/tests/pending/pos/t8187.check
+++ /dev/null
diff --git a/tests/pending/pos/t8364.check b/tests/pending/pos/t8364.check
deleted file mode 100644
index e69de29bb..000000000
--- a/tests/pending/pos/t8364.check
+++ /dev/null
diff --git a/tests/pending/pos/t8369a.check b/tests/pending/pos/t8369a.check
deleted file mode 100644
index e69de29bb..000000000
--- a/tests/pending/pos/t8369a.check
+++ /dev/null
diff --git a/tests/pending/pos/trait-force-info.flags b/tests/pending/pos/trait-force-info.flags
deleted file mode 100644
index eb4d19bcb..000000000
--- a/tests/pending/pos/trait-force-info.flags
+++ /dev/null
@@ -1 +0,0 @@
--optimise \ No newline at end of file
diff --git a/tests/pending/pos/virtpatmat_alts_subst.flags b/tests/pending/pos/virtpatmat_alts_subst.flags
deleted file mode 100644
index 3f5a3100e..000000000
--- a/tests/pending/pos/virtpatmat_alts_subst.flags
+++ /dev/null
@@ -1 +0,0 @@
- -Xexperimental
diff --git a/tests/pending/pos/virtpatmat_exist1.flags b/tests/pending/pos/virtpatmat_exist1.flags
deleted file mode 100644
index 3f5a3100e..000000000
--- a/tests/pending/pos/virtpatmat_exist1.flags
+++ /dev/null
@@ -1 +0,0 @@
- -Xexperimental
diff --git a/tests/pending/pos/virtpatmat_exist3.flags b/tests/pending/pos/virtpatmat_exist3.flags
deleted file mode 100644
index 3f5a3100e..000000000
--- a/tests/pending/pos/virtpatmat_exist3.flags
+++ /dev/null
@@ -1 +0,0 @@
- -Xexperimental
diff --git a/tests/pending/run/t2337.check b/tests/pending/run/t2337.check
deleted file mode 100644
index 18f1f66fc..000000000
--- a/tests/pending/run/t2337.check
+++ /dev/null
@@ -1,4 +0,0 @@
-(Both Int,-1,-1)
-(Both Float,1,1)
-(Float then Int,0,0)
-(Int then Float,0,0)
diff --git a/tests/pending/run/t3150.scala b/tests/pending/run/t3150.scala
index 034703b5f..dc95af373 100644
--- a/tests/pending/run/t3150.scala
+++ b/tests/pending/run/t3150.scala
@@ -1,10 +1,26 @@
-object Test {
- case object Bob { override def equals(other: Any) = true }
- def f(x: Any) = x match { case Bob => Bob }
-
- def main(args: Array[String]): Unit = {
- assert(f(Bob) eq Bob)
- assert(f(0) eq Bob)
- assert(f(Nil) eq Bob)
- }
-}
+ object Test {
+ case object Bob { override def equals(other: Any) = true }
+
+ class Bob2 {
+ override def equals(other: Any) = true
+ }
+ val Bob2 = new Bob2
+
+ def f0(x: Any) = x match { case Bob2 => Bob2 } // class cast exception at runtime, dotc only
+ def f1(x: Any) = x match { case Bob => Bob } // class cast exception at runtime, dotc only
+ def f2(x: Any): Bob.type = x match { case x @ Bob => x } // class cast exception at runtime, dotc and javac.
+
+ def main(args: Array[String]): Unit = {
+ assert(f0(Bob2) eq Bob2)
+ assert(f0(0) eq Bob2) // only dotty fails here
+ assert(f0(Nil) eq Bob2)
+
+ assert(f1(Bob) eq Bob)
+ assert(f1(0) eq Bob) // only dotty fails here
+ assert(f1(Nil) eq Bob)
+
+ assert(f2(Bob) eq Bob)
+ assert(f2(0) eq Bob) // both dotty and scalac fail here
+ assert(f2(Nil) eq Bob)
+ }
+ }
diff --git a/tests/pending/run/unapply.check b/tests/pending/run/unapply.check
deleted file mode 100644
index 847e3b381..000000000
--- a/tests/pending/run/unapply.check
+++ /dev/null
@@ -1,3 +0,0 @@
-unapply.scala:57: warning: comparing values of types Null and Null using `==' will always yield true
- assert(doMatch2(b) == null)
- ^
diff --git a/tests/pending/run/t3050.scala b/tests/pos-scala2/t3050.scala
index 160f8b664..160f8b664 100644
--- a/tests/pending/run/t3050.scala
+++ b/tests/pos-scala2/t3050.scala
diff --git a/tests/pending/pos/t7688.scala b/tests/pos-scala2/t7688.scala
index 5a846b97e..5a846b97e 100644
--- a/tests/pending/pos/t7688.scala
+++ b/tests/pos-scala2/t7688.scala
diff --git a/tests/pending/pos/t8146a.scala b/tests/pos-special/t8146a.scala
index e4eb8d3fd..e4eb8d3fd 100644
--- a/tests/pending/pos/t8146a.scala
+++ b/tests/pos-special/t8146a.scala
diff --git a/tests/pending/pos/apply-equiv.scala b/tests/pos/apply-equiv.scala
index f53b8b5ab..f53b8b5ab 100644
--- a/tests/pending/pos/apply-equiv.scala
+++ b/tests/pos/apply-equiv.scala
diff --git a/tests/pos/dependent-implicits.scala b/tests/pos/dependent-implicits.scala
new file mode 100644
index 000000000..17a323112
--- /dev/null
+++ b/tests/pos/dependent-implicits.scala
@@ -0,0 +1,7 @@
+object Test {
+ trait T { type X; val x: X }
+ implicit def f(x: T): x.X = x.x
+ val t = new T { type X = String; val x = "" }
+ val x: String = t
+ val uy: String = f(t)
+}
diff --git a/tests/pending/pos/extractor-types.scala b/tests/pos/extractor-types.scala
index 200279be6..200279be6 100644
--- a/tests/pending/pos/extractor-types.scala
+++ b/tests/pos/extractor-types.scala
diff --git a/tests/pos/for-filter.scala b/tests/pos/for-filter.scala
new file mode 100644
index 000000000..3baac4f0c
--- /dev/null
+++ b/tests/pos/for-filter.scala
@@ -0,0 +1,12 @@
+object Test {
+
+ case class C[T](xs: List[T]) {
+ def filter(p: T => Boolean) = new C(xs.filter(p))
+ def map[U](f: T => U) = new C(xs.map(f))
+ }
+
+ def main(args: Array[String]): Unit =
+ println(for (x <- C(List(1, 2, 3)) if x % 2 == 0) yield x)
+ // println(C(List(1, 2, 3)).withFilter(_ % 2 == 0)) // error
+
+}
diff --git a/tests/pending/pos/gadts2.scala b/tests/pos/gadts2.scala
index b67bafb32..bdf1c8e8a 100644
--- a/tests/pending/pos/gadts2.scala
+++ b/tests/pos/gadts2.scala
@@ -8,7 +8,7 @@ object Test {
case class Cell[a](var x: a) extends Term[a]
final case class NumTerm(val n: Number) extends Term[Number]
- def f[a](t: Term[a], c: Cell[a]): Unit = {
+ def f[A](t: Term[A], c: Cell[A]): Unit = {
t match {
case NumTerm(n) => c.x = MyDouble(1.0)
}
diff --git a/tests/pending/pos/hk-infer.scala b/tests/pos/hk-infer.scala
index c23a9d151..c23a9d151 100644
--- a/tests/pending/pos/hk-infer.scala
+++ b/tests/pos/hk-infer.scala
diff --git a/tests/pending/pos/i743.scala b/tests/pos/i743.scala
index 8fe522cee..8fe522cee 100644
--- a/tests/pending/pos/i743.scala
+++ b/tests/pos/i743.scala
diff --git a/tests/pending/pos/infersingle.scala b/tests/pos/infersingle.scala
index 60f4ff07e..60f4ff07e 100644
--- a/tests/pending/pos/infersingle.scala
+++ b/tests/pos/infersingle.scala
diff --git a/tests/pos/isApplicableSafe.scala b/tests/pos/isApplicableSafe.scala
new file mode 100644
index 000000000..c54df1f22
--- /dev/null
+++ b/tests/pos/isApplicableSafe.scala
@@ -0,0 +1,54 @@
+import reflect.ClassTag
+
+// The same problems arise in real arrays.
+class A {
+
+ class Array[T]
+ object Array {
+ def apply[T: ClassTag](xs: T*): Array[T] = ???
+ def apply(x: Int, xs: Int*): Array[Int] = ???
+ }
+
+ // Any of Array[List[Symbol]], List[Array[Symbol]], or List[List[Symbol]] compile.
+ var xs: Array[Array[Symbol]] = _
+ var ys: Array[Map[Symbol, Set[Symbol]]] = _
+
+ //xs = Array(Array())
+ // gives:
+ //
+ // isApplicableSafe.scala:15: error: type mismatch:
+ // found : A.this.Array[Nothing]
+ // required: A.this.Array[Symbol]
+ // xs = Array(Array())
+ //
+ // Here's the sequence of events that leads to this problem:
+ //
+ // 1. the outer Array.apply is overloaded, so we need to typecheck the inner one
+ // without an expected prototype
+ //
+ // 2. The inner Array.apply needs a ClassTag, so we need to instantiate
+ // its type variable, and the best instantiation is Nothing.
+ //
+ // To prevent this, we'd need to do several things:
+ //
+ // 1. Pass argument types lazily into the isApplicable call in resolveOverloaded,
+ // so that we can call constrainResult before any arguments are evaluated.
+ //
+ // 2. This is still not enough because the result type is initially an IgnoredProto.
+ // (because an implicit might have to be inserted around the call, so we cannot
+ // automatically assume that the call result is a subtype of the expected type).
+ // Hence, we need to somehow create a closure in constrainResult that does the
+ // comparison with the real expected result type "on demand".
+ //
+ // 3. When instantiating a type variable we need to categorize that some instantiations
+ // are suspicous (e.g. scalac avoids instantiating to Nothing). In these
+ // circumstances we should try to excute the delayed constrainResult closures
+ // in order to get a better instance type.
+ //
+ // Quite a lot of work. It's looking really complicated to fix this.
+
+
+ ys = Array(Map(), Map())
+
+ val zs = Array(Map())
+}
diff --git a/tests/pending/pos/lazyvals.scala b/tests/pos/lazyvals.scala
index 93a82cd0c..93a82cd0c 100644
--- a/tests/pending/pos/lazyvals.scala
+++ b/tests/pos/lazyvals.scala
diff --git a/tests/pending/pos/matthias4.scala b/tests/pos/matthias4.scala
index 18599ae71..18599ae71 100644
--- a/tests/pending/pos/matthias4.scala
+++ b/tests/pos/matthias4.scala
diff --git a/tests/pending/pos/mixins.scala b/tests/pos/mixins.scala
index 846d6a41b..846d6a41b 100644
--- a/tests/pending/pos/mixins.scala
+++ b/tests/pos/mixins.scala
diff --git a/tests/pending/pos/return_thistype.scala b/tests/pos/return_thistype.scala
index c0736c0ad..c0736c0ad 100644
--- a/tests/pending/pos/return_thistype.scala
+++ b/tests/pos/return_thistype.scala
diff --git a/tests/pos/t1500a.scala b/tests/pos/t1500a.scala
new file mode 100644
index 000000000..adf46329a
--- /dev/null
+++ b/tests/pos/t1500a.scala
@@ -0,0 +1,28 @@
+trait Step0
+trait Step1
+trait Step2
+trait Step3
+trait Step4
+trait Step5
+trait Step6
+
+object Steps {
+ implicit val Step0: Step0 = new Step0 {}
+ implicit def Step1(implicit p: Step0): Step1 = new Step1 {}
+ implicit def Step2(implicit p: Step1): Step2 = new Step2 {}
+ implicit def Step3(implicit p: Step2): Step3 = new Step3 {}
+ implicit def Step4(implicit p: Step3): Step4 = new Step4 {}
+ implicit def Step5(implicit p: Step4): Step5 = new Step5 {}
+ implicit def Step6(implicit p: Step5): Step6 = new Step6 {}
+}
+
+object StepsTest {
+ import Steps._
+
+ implicitly[Step0]
+ implicitly[Step1]
+ implicitly[Step2]
+ implicitly[Step3]
+ implicitly[Step4]
+ implicitly[Step6]
+}
diff --git a/tests/pos/t1513a.scala b/tests/pos/t1513a.scala
new file mode 100644
index 000000000..3c4c02376
--- /dev/null
+++ b/tests/pos/t1513a.scala
@@ -0,0 +1,36 @@
+object Test {
+ // Heterogeneous lists and natural numbers as defined in shapeless.
+
+ sealed trait HList
+ sealed trait ::[H, T <: HList] extends HList
+ sealed trait HNil extends HList
+
+ sealed trait Nat
+ sealed trait Succ[P <: Nat] extends Nat
+ sealed trait Zero extends Nat
+
+ // Accessor type class to compute the N'th element of an HList L.
+
+ trait Accessor[L <: HList, N <: Nat] { type Out }
+ object Accessor {
+ type Aux[L <: HList, N <: Nat, O] = Accessor[L, N] { type Out = O }
+
+ // (H :: T).At[Zero] = H
+ implicit def caseZero[H, T <: HList]: Aux[H :: T, Zero, H] = ???
+
+ // T.At[N] = O => (H :: T).At[Succ[N]] = O
+ implicit def caseN[H, T <: HList, N <: Nat, O]
+ (implicit a: Aux[T, N, O]): Aux[H :: T, Succ[N], O] = ???
+ }
+
+ case class Proxy[T]()
+
+ def at1[NN <: Nat, OO] (implicit e: Accessor.Aux[String :: HNil, NN, OO]): OO = ???
+ def at2[NN <: Nat, OO](p: Proxy[NN])(implicit e: Accessor.Aux[String :: HNil, NN, OO]): OO = ???
+
+ // N is fixed by a value
+ at2(Proxy[Zero]): String
+
+ // N is fixed as a type parameter (by name)
+ at1[NN = Zero]: String
+}
diff --git a/tests/pos/t1513b.scala b/tests/pos/t1513b.scala
new file mode 100644
index 000000000..546649383
--- /dev/null
+++ b/tests/pos/t1513b.scala
@@ -0,0 +1,25 @@
+object Test {
+ def f[
+ T1 <: String,
+ T2 <: Int,
+ T3 <: Boolean
+ ](a1: T1, a2: T2, a3: T3) = ()
+
+ f ("", 1, true)
+ f[T1 = String] ("", 1, true)
+ f[T2 = Int] ("", 1, true)
+ f[T3 = Boolean] ("", 1, true)
+ f[T1 = String, T2 = Int] ("", 1, true)
+ f[T1 = String, T3 = Boolean] ("", 1, true)
+ f[T2 = Int, T1 = String] ("", 1, true)
+ f[T2 = Int, T3 = Boolean] ("", 1, true)
+ f[T3 = Boolean, T2 = Int] ("", 1, true)
+ f[T3 = Boolean, T1 = String] ("", 1, true)
+ f[T1 = String, T2 = Int, T3 = Boolean]("", 1, true)
+ f[T1 = String, T3 = Boolean, T2 = Int] ("", 1, true)
+ f[T2 = Int, T1 = String, T3 = Boolean]("", 1, true)
+ f[T2 = Int, T3 = Boolean, T1 = String] ("", 1, true)
+ f[T3 = Boolean, T1 = String, T2 = Int] ("", 1, true)
+ f[T3 = Boolean, T2 = Int, T1 = String] ("", 1, true)
+ f[String, Int, Boolean] ("", 1, true)
+}
diff --git a/tests/pos/t1756.scala b/tests/pos/t1756.scala
new file mode 100644
index 000000000..767eb54a7
--- /dev/null
+++ b/tests/pos/t1756.scala
@@ -0,0 +1,33 @@
+trait Ring[T <: Ring[T]] {
+ def +(that: T): T
+ def *(that: T): T
+}
+
+class A extends Ring[A] {
+ def +(that: A) = new A
+ def *(that: A) = new A
+}
+
+class Poly[C <: Ring[C]](val c: C) extends Ring[Poly[C]] {
+ def +(that: Poly[C]) = new Poly(this.c + that.c)
+ def *(that: Poly[C]) = new Poly(this.c*that.c)
+}
+
+object Test extends App {
+
+ implicit def coef2poly[CI <: Ring[CI]](c: CI): Poly[CI] = new Poly(c)
+
+ val a = new A
+ val x = new Poly(new A)
+
+ println(x + a) // works
+ println(a + x) // works
+
+ val y = new Poly(new Poly(new A))
+
+ println(x + y*x) // works
+ println(x*y + x) // works
+ println(y*x + x) // works
+
+ println(x + x*y) // failed before, first with type error, after that was fixed with "orphan poly parameter CI".
+}
diff --git a/tests/pending/pos/t3494.scala b/tests/pos/t3494.scala
index dd54b0f82..dd54b0f82 100644
--- a/tests/pending/pos/t3494.scala
+++ b/tests/pos/t3494.scala
diff --git a/tests/pending/pos/t3800.scala b/tests/pos/t3800.scala
index 096502174..096502174 100644
--- a/tests/pending/pos/t3800.scala
+++ b/tests/pos/t3800.scala
diff --git a/tests/pending/pos/t3862.scala b/tests/pos/t3862.scala
index 0d978caa4..0d978caa4 100644
--- a/tests/pending/pos/t3862.scala
+++ b/tests/pos/t3862.scala
diff --git a/tests/pending/pos/t3880.scala b/tests/pos/t3880.scala
index f778eb71a..f778eb71a 100644
--- a/tests/pending/pos/t3880.scala
+++ b/tests/pos/t3880.scala
diff --git a/tests/pending/pos/t3999/a_1.scala b/tests/pos/t3999/a_1.scala
index be9cc371e..7ef67bba4 100644
--- a/tests/pending/pos/t3999/a_1.scala
+++ b/tests/pos/t3999/a_1.scala
@@ -4,6 +4,6 @@ class Outside
package object bar {
class Val(b: Boolean)
- implicit def boolean2Val(b: Boolean): foo.bar.package.Val = new Val(b)
+ implicit def boolean2Val(b: Boolean): foo.bar.`package`.Val = new Val(b)
implicit def boolean2Outside(b: Boolean): foo.Outside = new Outside
}
diff --git a/tests/pending/pos/t3999/b_2.scala b/tests/pos/t3999/b_2.scala
index 775b839d9..775b839d9 100644
--- a/tests/pending/pos/t3999/b_2.scala
+++ b/tests/pos/t3999/b_2.scala
diff --git a/tests/pending/pos/t4269.scala b/tests/pos/t4269.scala
index fe0c20103..fe0c20103 100644
--- a/tests/pending/pos/t4269.scala
+++ b/tests/pos/t4269.scala
diff --git a/tests/pos/t5070.scala b/tests/pos/t5070.scala
new file mode 100644
index 000000000..410afba14
--- /dev/null
+++ b/tests/pos/t5070.scala
@@ -0,0 +1,15 @@
+trait A {
+ type T
+}
+
+object O {
+ implicit def b(implicit x: A): x.T = error("")
+}
+
+class Test {
+ import O._
+ implicit val a: A = new A {}
+ implicitly[a.T] // works
+
+ implicitly[a.T](b(a)) // works
+}
diff --git a/tests/pending/pos/t5330.scala b/tests/pos/t5330.scala
index 24aab7733..24aab7733 100644
--- a/tests/pending/pos/t5330.scala
+++ b/tests/pos/t5330.scala
diff --git a/tests/pending/pos/t5604/ReplConfig.scala b/tests/pos/t5604/ReplConfig.scala
index 8c589eba6..8c589eba6 100644
--- a/tests/pending/pos/t5604/ReplConfig.scala
+++ b/tests/pos/t5604/ReplConfig.scala
diff --git a/tests/pending/pos/t5604/ReplReporter.scala b/tests/pos/t5604/ReplReporter.scala
index 9423efd8a..9423efd8a 100644
--- a/tests/pending/pos/t5604/ReplReporter.scala
+++ b/tests/pos/t5604/ReplReporter.scala
diff --git a/tests/pending/pos/t5726.scala b/tests/pos/t5726.scala
index 1ef14ac79..1ef14ac79 100644
--- a/tests/pending/pos/t5726.scala
+++ b/tests/pos/t5726.scala
diff --git a/tests/pending/pos/t5769.scala b/tests/pos/t5769.scala
index d7ec23a56..d7ec23a56 100644
--- a/tests/pending/pos/t5769.scala
+++ b/tests/pos/t5769.scala
diff --git a/tests/pending/pos/t578.scala b/tests/pos/t578.scala
index 6f95dd8ce..6f95dd8ce 100644
--- a/tests/pending/pos/t578.scala
+++ b/tests/pos/t578.scala
diff --git a/tests/pending/pos/t5899.scala b/tests/pos/t5899.scala
index 852b4e3e7..852b4e3e7 100644
--- a/tests/pending/pos/t5899.scala
+++ b/tests/pos/t5899.scala
diff --git a/tests/pending/pos/t6084.scala b/tests/pos/t6084.scala
index 1aa1fed39..1aa1fed39 100644
--- a/tests/pending/pos/t6084.scala
+++ b/tests/pos/t6084.scala
diff --git a/tests/pending/pos/t6722.scala b/tests/pos/t6722.scala
index 576746c91..576746c91 100644
--- a/tests/pending/pos/t6722.scala
+++ b/tests/pos/t6722.scala
diff --git a/tests/pending/pos/t6815_import.scala b/tests/pos/t6815_import.scala
index 56f4358d5..56f4358d5 100644
--- a/tests/pending/pos/t6815_import.scala
+++ b/tests/pos/t6815_import.scala
diff --git a/tests/pending/pos/t6948.scala b/tests/pos/t6948.scala
index 12a1d7eaf..12a1d7eaf 100644
--- a/tests/pending/pos/t6948.scala
+++ b/tests/pos/t6948.scala
diff --git a/tests/pending/pos/t7294.scala b/tests/pos/t7294.scala
index ccac2b140..ccac2b140 100644
--- a/tests/pending/pos/t7294.scala
+++ b/tests/pos/t7294.scala
diff --git a/tests/pending/pos/t7426.scala b/tests/pos/t7426.scala
index 8e42ad181..8e42ad181 100644
--- a/tests/pending/pos/t7426.scala
+++ b/tests/pos/t7426.scala
diff --git a/tests/pending/pos/t7517.scala b/tests/pos/t7517.scala
index d0462c48d..d0462c48d 100644
--- a/tests/pending/pos/t7517.scala
+++ b/tests/pos/t7517.scala
diff --git a/tests/pos/t762.scala b/tests/pos/t762.scala
new file mode 100644
index 000000000..c5bf39b0c
--- /dev/null
+++ b/tests/pos/t762.scala
@@ -0,0 +1,4 @@
+trait Foo { type T }
+trait Bar1 extends Foo { val x : Foo { type T <: Bar1.this.T } = this }
+trait Bar2 extends Foo { val x : Foo { type T = Bar2.this.T } = this }
+trait Bar3 extends Foo { val x : Foo { type T >: Bar3.this.T } = this }
diff --git a/tests/pending/pos/t7668.scala b/tests/pos/t7668.scala
index 222a13d03..6657ffab6 100644
--- a/tests/pending/pos/t7668.scala
+++ b/tests/pos/t7668.scala
@@ -8,5 +8,5 @@ trait Extractor {
}
class Sub extends Extractor {
- def extract(s: Space) = s.x
+ def extract(ss: Space) = ss.x
}
diff --git a/tests/pending/pos/t7902.scala b/tests/pos/t7902.scala
index 7793d3723..7793d3723 100644
--- a/tests/pending/pos/t7902.scala
+++ b/tests/pos/t7902.scala
diff --git a/tests/pending/pos/t8046c.scala b/tests/pos/t8046c.scala
index f05b4c15b..f05b4c15b 100644
--- a/tests/pending/pos/t8046c.scala
+++ b/tests/pos/t8046c.scala
diff --git a/tests/pending/pos/t807.scala b/tests/pos/t807.scala
index 0eeb92ea2..0eeb92ea2 100644
--- a/tests/pending/pos/t807.scala
+++ b/tests/pos/t807.scala
diff --git a/tests/pending/pos/t8300-patmat-a.scala b/tests/pos/t8300-patmat-a.scala
index ab3a3c960..ab3a3c960 100644
--- a/tests/pending/pos/t8300-patmat-a.scala
+++ b/tests/pos/t8300-patmat-a.scala
diff --git a/tests/pending/pos/t8300-patmat-b.scala b/tests/pos/t8300-patmat-b.scala
index 0acad4406..0acad4406 100644
--- a/tests/pending/pos/t8300-patmat-b.scala
+++ b/tests/pos/t8300-patmat-b.scala
diff --git a/tests/pending/pos/t8301b.scala b/tests/pos/t8301b.scala
index 4dd39139d..4dd39139d 100644
--- a/tests/pending/pos/t8301b.scala
+++ b/tests/pos/t8301b.scala
diff --git a/tests/pending/pos/t8364.scala b/tests/pos/t8364.scala
index 7a7ea1ff1..7a7ea1ff1 100644
--- a/tests/pending/pos/t8364.scala
+++ b/tests/pos/t8364.scala
diff --git a/tests/pending/pos/trait-force-info.scala b/tests/pos/trait-force-info.scala
index c2b33869c..c2b33869c 100644
--- a/tests/pending/pos/trait-force-info.scala
+++ b/tests/pos/trait-force-info.scala
diff --git a/tests/pos/tryWithoutHandler.scala b/tests/pos/tryWithoutHandler.scala
new file mode 100644
index 000000000..ffe334984
--- /dev/null
+++ b/tests/pos/tryWithoutHandler.scala
@@ -0,0 +1,7 @@
+object Test {
+ def main(args: Array[String]): Unit = {
+ try {
+ println("hello")
+ }
+ }
+}
diff --git a/tests/pending/run/applydynamic_sip.check b/tests/run/applydynamic_sip.check
index 6d04dc452..6d04dc452 100644
--- a/tests/pending/run/applydynamic_sip.check
+++ b/tests/run/applydynamic_sip.check
diff --git a/tests/pending/run/applydynamic_sip.flags b/tests/run/applydynamic_sip.flags
index ba6d37305..ba6d37305 100644
--- a/tests/pending/run/applydynamic_sip.flags
+++ b/tests/run/applydynamic_sip.flags
diff --git a/tests/pending/run/applydynamic_sip.scala b/tests/run/applydynamic_sip.scala
index a163ab960..7f81a644a 100644
--- a/tests/pending/run/applydynamic_sip.scala
+++ b/tests/run/applydynamic_sip.scala
@@ -1,3 +1,4 @@
+import scala.language.dynamics
object Test extends dotty.runtime.LegacyApp {
object stubUpdate {
def update(as: Any*) = println(".update"+as.toList.mkString("(",", ", ")"))
diff --git a/tests/pending/run/dynamic-anyval.check b/tests/run/dynamic-anyval.check
index dee7bef8e..dee7bef8e 100644
--- a/tests/pending/run/dynamic-anyval.check
+++ b/tests/run/dynamic-anyval.check
diff --git a/tests/pending/run/dynamic-anyval.scala b/tests/run/dynamic-anyval.scala
index 605503d37..605503d37 100644
--- a/tests/pending/run/dynamic-anyval.scala
+++ b/tests/run/dynamic-anyval.scala
diff --git a/tests/run/dynamicDynamicTests.scala b/tests/run/dynamicDynamicTests.scala
index 3f8da8298..05b878f1c 100644
--- a/tests/run/dynamicDynamicTests.scala
+++ b/tests/run/dynamicDynamicTests.scala
@@ -23,7 +23,16 @@ class Baz extends scala.Dynamic {
def updateDynamic(name: String)(value: String): String = "updateDynamic(" + name + ")(" + value + ")"
}
+class Qux extends scala.Dynamic {
+ def selectDynamic[T](name: String): String = "selectDynamic(" + name + ")"
+ def applyDynamic[T](name: String)(args: String*): String = "applyDynamic(" + name + ")" + args.mkString("(", ", ", ")")
+ def applyDynamicNamed[T](name: String)(args: (String, Any)*): String = "applyDynamicNamed(" + name + ")" + args.mkString("(", ", ", ")")
+ def updateDynamic[T](name: String)(value: T): String = "updateDynamic(" + name + ")(" + value + ")"
+}
+
object Test {
+ val qux = new Qux
+
implicit class StringUpdater(str: String) {
def update(name: String, v: String) = s"$str.update(" + name + ", " + v + ")"
}
@@ -42,6 +51,7 @@ object Test {
runFooTests2()
runBarTests()
runBazTests()
+ runQuxTests()
assert(!failed)
}
@@ -161,4 +171,35 @@ object Test {
assertEquals("selectDynamic(bazSelectUpdate).update(7, value)", baz.bazSelectUpdate(7) = "value")
assertEquals("selectDynamic(bazSelectUpdate).update(7, 10)", baz.bazSelectUpdate(7) = 10)
}
+
+ /** Test correct lifting of type parameters */
+ def runQuxTests() = {
+ implicit def intToString(n: Int): String = n.toString
+
+ val qux = new Qux
+
+ assertEquals("selectDynamic(quxSelect)", qux.quxSelect)
+ assertEquals("selectDynamic(quxSelect)", qux.quxSelect[Int])
+
+ assertEquals("applyDynamic(quxApply)()", qux.quxApply())
+ assertEquals("applyDynamic(quxApply)()", qux.quxApply[Int]())
+ assertEquals("applyDynamic(quxApply)(1)", qux.quxApply(1))
+ assertEquals("applyDynamic(quxApply)(1)", qux.quxApply[Int](1))
+ assertEquals("applyDynamic(quxApply)(1, 2, 3)", qux.quxApply(1, 2, 3))
+ assertEquals("applyDynamic(quxApply)(1, 2, 3)", qux.quxApply[Int](1, 2, 3))
+ assertEquals("applyDynamic(quxApply)(1, 2, a)", qux.quxApply(1, 2, "a"))
+ assertEquals("applyDynamic(quxApply)(1, 2, a)", qux.quxApply[Int](1, 2, "a"))
+
+ assertEquals("applyDynamicNamed(quxApplyNamed)((a,1))", qux.quxApplyNamed(a = 1))
+ assertEquals("applyDynamicNamed(quxApplyNamed)((a,1))", qux.quxApplyNamed[Int](a = 1))
+ assertEquals("applyDynamicNamed(quxApplyNamed)((a,1), (b,2))", qux.quxApplyNamed(a = 1, b = "2"))
+ assertEquals("applyDynamicNamed(quxApplyNamed)((a,1), (b,2))", qux.quxApplyNamed[Int](a = 1, b = "2"))
+ assertEquals("applyDynamicNamed(quxApplyNamed)((a,1), (,abc))", qux.quxApplyNamed(a = 1, "abc"))
+ assertEquals("applyDynamicNamed(quxApplyNamed)((a,1), (,abc))", qux.quxApplyNamed[Int](a = 1, "abc"))
+
+ assertEquals("updateDynamic(quxUpdate)(abc)", qux.quxUpdate = "abc")
+
+ assertEquals("selectDynamic(quxSelectUpdate).update(key, value)", qux.quxSelectUpdate("key") = "value")
+ assertEquals("selectDynamic(quxSelectUpdate).update(key, value)", qux.quxSelectUpdate[Int]("key") = "value")
+ }
}
diff --git a/tests/run/i1490.check b/tests/run/i1490.check
new file mode 100644
index 000000000..9e8a46acf
--- /dev/null
+++ b/tests/run/i1490.check
@@ -0,0 +1,3 @@
+true
+true
+false
diff --git a/tests/run/i1490.scala b/tests/run/i1490.scala
new file mode 100644
index 000000000..554bc3940
--- /dev/null
+++ b/tests/run/i1490.scala
@@ -0,0 +1,13 @@
+class Base {
+ type T = Int | Boolean
+ def test(x: Object) = x.isInstanceOf[T]
+}
+
+object Test {
+ def main(args: Array[String]) = {
+ val b = new Base
+ println(b.test(Int.box(3)))
+ println(b.test(Boolean.box(false)))
+ println(b.test(Double.box(3.4)))
+ }
+} \ No newline at end of file
diff --git a/tests/run/t1335.scala b/tests/run/t1335.scala
new file mode 100644
index 000000000..047f7b566
--- /dev/null
+++ b/tests/run/t1335.scala
@@ -0,0 +1,11 @@
+case class MyTuple(a: Int, b: Int)
+
+object Test {
+ def main(args: Array[String]): Unit =
+ try {
+ val mt: MyTuple = null
+ val MyTuple(a, b) = mt
+ } catch {
+ case e: MatchError => ()
+ }
+}
diff --git a/tests/run/t1500b.scala b/tests/run/t1500b.scala
new file mode 100644
index 000000000..8b52731a5
--- /dev/null
+++ b/tests/run/t1500b.scala
@@ -0,0 +1,21 @@
+sealed trait Nat
+sealed trait Succ[Prev <: Nat] extends Nat
+sealed trait Zero extends Nat
+
+case class ToInt[N <: Nat](value: Int)
+
+object ToInt {
+ implicit val caseZero: ToInt[Zero] = ToInt(0)
+ implicit def caseSucc[Prev <: Nat](implicit e: ToInt[Prev]): ToInt[Succ[Prev]] = ToInt(e.value + 1)
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(implicitly[ToInt[Zero]].value == 0)
+ assert(implicitly[ToInt[Succ[Zero]]].value == 1)
+ assert(implicitly[ToInt[Succ[Succ[Zero]]]].value == 2)
+ assert(implicitly[ToInt[Succ[Succ[Succ[Zero]]]]].value == 3)
+ assert(implicitly[ToInt[Succ[Succ[Succ[Succ[Zero]]]]]].value == 4)
+ assert(implicitly[ToInt[Succ[Succ[Succ[Succ[Succ[Zero]]]]]]].value == 5)
+ }
+}
diff --git a/tests/run/t1500c.scala b/tests/run/t1500c.scala
new file mode 100644
index 000000000..5c33b7a2f
--- /dev/null
+++ b/tests/run/t1500c.scala
@@ -0,0 +1,19 @@
+sealed trait HList
+sealed trait HNil extends HList
+sealed trait ::[H, T <: HList] extends HList
+
+case class Size[L <: HList](value: Int)
+
+object Size {
+ implicit val caseHNil: Size[HNil] = Size(0)
+ implicit def caseHCons[H, T <: HList](implicit e: Size[T]): Size[H :: T] = Size(e.value + 1)
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ assert(implicitly[Size[HNil]].value == 0)
+ assert(implicitly[Size[Int :: HNil]].value == 1)
+ assert(implicitly[Size[Int :: Int :: HNil]].value == 2)
+ assert(implicitly[Size[Int :: Int :: Int :: HNil]].value == 3)
+ }
+}
diff --git a/tests/pending/run/t298.check b/tests/run/t298.check
index 1cd1d2266..1cd1d2266 100644
--- a/tests/pending/run/t298.check
+++ b/tests/run/t298.check
diff --git a/tests/pending/run/t298.scala b/tests/run/t298.scala
index 5728bb6c9..5728bb6c9 100644
--- a/tests/pending/run/t298.scala
+++ b/tests/run/t298.scala
diff --git a/tests/pending/run/t3026.check b/tests/run/t3026.check
index 8c29b615f..8c29b615f 100644
--- a/tests/pending/run/t3026.check
+++ b/tests/run/t3026.check
diff --git a/tests/pending/run/t3026.scala b/tests/run/t3026.scala
index 22dde9cc0..22dde9cc0 100755
--- a/tests/pending/run/t3026.scala
+++ b/tests/run/t3026.scala
diff --git a/tests/pending/run/t3353.check b/tests/run/t3353.check
index 8b4ae1fe6..8b4ae1fe6 100644
--- a/tests/pending/run/t3353.check
+++ b/tests/run/t3353.check
diff --git a/tests/pending/run/t3353.scala b/tests/run/t3353.scala
index 472723b3c..472723b3c 100644
--- a/tests/pending/run/t3353.scala
+++ b/tests/run/t3353.scala
diff --git a/tests/pending/run/t4536.check b/tests/run/t4536.check
index 0c5a72ada..0c5a72ada 100644
--- a/tests/pending/run/t4536.check
+++ b/tests/run/t4536.check
diff --git a/tests/pending/run/t4536.flags b/tests/run/t4536.flags
index 1141f9750..1141f9750 100644
--- a/tests/pending/run/t4536.flags
+++ b/tests/run/t4536.flags
diff --git a/tests/pending/run/t4536.scala b/tests/run/t4536.scala
index 6661eae6a..89a93a5e0 100644
--- a/tests/pending/run/t4536.scala
+++ b/tests/run/t4536.scala
@@ -1,8 +1,4 @@
-
-
-
-
-
+import scala.language.dynamics
object dynamicObject extends Dynamic {
def applyDynamic(m: String)() = println("obj: " + m);
@@ -38,7 +34,7 @@ object dynamicMixin extends dynamicAbstractClass with dynamicTrait {
object Test {
- def main(args: Array[String]) {
+ def main(args: Array[String]) = {
val cls = new dynamicClass
dynamicMixin
}
diff --git a/tests/pending/run/t5040.check b/tests/run/t5040.check
index 3f7b5908a..3f7b5908a 100644
--- a/tests/pending/run/t5040.check
+++ b/tests/run/t5040.check
diff --git a/tests/pending/run/t5040.flags b/tests/run/t5040.flags
index 1141f9750..1141f9750 100644
--- a/tests/pending/run/t5040.flags
+++ b/tests/run/t5040.flags
diff --git a/tests/pending/run/t5040.scala b/tests/run/t5040.scala
index 6813c1b27..58d054412 100644
--- a/tests/pending/run/t5040.scala
+++ b/tests/run/t5040.scala
@@ -1,3 +1,4 @@
+import scala.language.dynamics // originaly used the flag -language:dynamics in t5040.flags, .flags are currently ignored
abstract class Prova2 extends Dynamic {
def applyDynamic(m: String)(): Unit
private def privateMethod() = println("private method")
diff --git a/tests/pending/run/t5733.check b/tests/run/t5733.check
index e697046a9..e697046a9 100644
--- a/tests/pending/run/t5733.check
+++ b/tests/run/t5733.check
diff --git a/tests/pending/run/t5733.scala b/tests/run/t5733.scala
index a9e58d77e..a9e58d77e 100644
--- a/tests/pending/run/t5733.scala
+++ b/tests/run/t5733.scala
diff --git a/tests/pending/run/t6353.check b/tests/run/t6353.check
index 5676bed24..5676bed24 100644
--- a/tests/pending/run/t6353.check
+++ b/tests/run/t6353.check
diff --git a/tests/pending/run/t6353.scala b/tests/run/t6353.scala
index 7077eaeda..7077eaeda 100644
--- a/tests/pending/run/t6353.scala
+++ b/tests/run/t6353.scala
diff --git a/tests/pending/run/t6355.check b/tests/run/t6355.check
index ce74ab38a..ce74ab38a 100644
--- a/tests/pending/run/t6355.check
+++ b/tests/run/t6355.check
diff --git a/tests/pending/run/t6355.scala b/tests/run/t6355.scala
index f1921391a..f1921391a 100644
--- a/tests/pending/run/t6355.scala
+++ b/tests/run/t6355.scala
diff --git a/tests/pending/run/t6663.check b/tests/run/t6663.check
index d81cc0710..d81cc0710 100644
--- a/tests/pending/run/t6663.check
+++ b/tests/run/t6663.check
diff --git a/tests/pending/run/t6663.flags b/tests/run/t6663.flags
index ea7fc37e1..ea7fc37e1 100644
--- a/tests/pending/run/t6663.flags
+++ b/tests/run/t6663.flags
diff --git a/tests/pending/run/t6663.scala b/tests/run/t6663.scala
index bfe464ad6..bfe464ad6 100644
--- a/tests/pending/run/t6663.scala
+++ b/tests/run/t6663.scala
diff --git a/tests/pending/run/unapply.scala b/tests/run/unapply.scala
index 43f02b9f3..7b10030ba 100644
--- a/tests/pending/run/unapply.scala
+++ b/tests/run/unapply.scala
@@ -87,8 +87,8 @@ object Mas {
object LisSeqArr {
def run(): Unit = {
- assert((1,2) == ((List(1,2,3): Any) match { case List(x,y,_*) => (x,y)}))
- assert((1,2) == ((List(1,2,3): Any) match { case Seq(x,y,_*) => (x,y)}))
+ assert((1,2) == ((List(1,2,3): Any) match { case List(x,y,_: _*) => (x,y)}))
+ assert((1,2) == ((List(1,2,3): Any) match { case Seq(x,y,_: _*) => (x,y)}))
}
}
diff --git a/tests/untried/neg/applydynamic_sip.check b/tests/untried/neg/applydynamic_sip.check
deleted file mode 100644
index 2cb2e7f09..000000000
--- a/tests/untried/neg/applydynamic_sip.check
+++ /dev/null
@@ -1,73 +0,0 @@
-applydynamic_sip.scala:7: error: applyDynamic does not support passing a vararg parameter
- qual.sel(a, a2: _*)
- ^
-applydynamic_sip.scala:8: error: applyDynamicNamed does not support passing a vararg parameter
- qual.sel(arg = a, a2: _*)
- ^
-applydynamic_sip.scala:8: error: not found: value arg
- qual.sel(arg = a, a2: _*)
- ^
-applydynamic_sip.scala:9: error: applyDynamicNamed does not support passing a vararg parameter
- qual.sel(arg, arg2 = "a2", a2: _*)
- ^
-applydynamic_sip.scala:9: error: not found: value arg
- qual.sel(arg, arg2 = "a2", a2: _*)
- ^
-applydynamic_sip.scala:9: error: not found: value arg2
- qual.sel(arg, arg2 = "a2", a2: _*)
- ^
-applydynamic_sip.scala:18: error: type mismatch;
- found : String("sel")
- required: Int
-error after rewriting to Test.this.bad1.selectDynamic("sel")
-possible cause: maybe a wrong Dynamic method signature?
- bad1.sel
- ^
-applydynamic_sip.scala:19: error: type mismatch;
- found : String("sel")
- required: Int
-error after rewriting to Test.this.bad1.applyDynamic("sel")
-possible cause: maybe a wrong Dynamic method signature?
- bad1.sel(1)
- ^
-applydynamic_sip.scala:20: error: type mismatch;
- found : String("sel")
- required: Int
-error after rewriting to Test.this.bad1.applyDynamicNamed("sel")
-possible cause: maybe a wrong Dynamic method signature?
- bad1.sel(a = 1)
- ^
-applydynamic_sip.scala:20: error: reassignment to val
- bad1.sel(a = 1)
- ^
-applydynamic_sip.scala:21: error: type mismatch;
- found : String("sel")
- required: Int
-error after rewriting to Test.this.bad1.updateDynamic("sel")
-possible cause: maybe a wrong Dynamic method signature?
- bad1.sel = 1
- ^
-applydynamic_sip.scala:29: error: Int does not take parameters
-error after rewriting to Test.this.bad2.selectDynamic("sel")
-possible cause: maybe a wrong Dynamic method signature?
- bad2.sel
- ^
-applydynamic_sip.scala:30: error: Int does not take parameters
-error after rewriting to Test.this.bad2.applyDynamic("sel")
-possible cause: maybe a wrong Dynamic method signature?
- bad2.sel(1)
- ^
-applydynamic_sip.scala:31: error: Int does not take parameters
-error after rewriting to Test.this.bad2.applyDynamicNamed("sel")
-possible cause: maybe a wrong Dynamic method signature?
- bad2.sel(a = 1)
- ^
-applydynamic_sip.scala:31: error: reassignment to val
- bad2.sel(a = 1)
- ^
-applydynamic_sip.scala:32: error: Int does not take parameters
-error after rewriting to Test.this.bad2.updateDynamic("sel")
-possible cause: maybe a wrong Dynamic method signature?
- bad2.sel = 1
- ^
-16 errors found
diff --git a/tests/untried/neg/applydynamic_sip.scala b/tests/untried/neg/applydynamic_sip.scala
deleted file mode 100644
index ee4432ebe..000000000
--- a/tests/untried/neg/applydynamic_sip.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-object Test extends App {
- val qual: Dynamic = ???
- val expr = "expr"
- val a = "a"
- val a2 = "a2"
-
- qual.sel(a, a2: _*)
- qual.sel(arg = a, a2: _*)
- qual.sel(arg, arg2 = "a2", a2: _*)
-
- val bad1 = new Dynamic {
- def selectDynamic(n: Int) = n
- def applyDynamic(n: Int) = n
- def applyDynamicNamed(n: Int) = n
- def updateDynamic(n: Int) = n
-
- }
- bad1.sel
- bad1.sel(1)
- bad1.sel(a = 1)
- bad1.sel = 1
-
- val bad2 = new Dynamic {
- def selectDynamic = 1
- def applyDynamic = 1
- def applyDynamicNamed = 1
- def updateDynamic = 1
- }
- bad2.sel
- bad2.sel(1)
- bad2.sel(a = 1)
- bad2.sel = 1
-}