aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/dotty/DottyPredef.scala32
-rw-r--r--src/dotty/annotation/internal/Body.scala8
-rw-r--r--src/dotty/annotation/internal/Child.scala12
-rw-r--r--src/dotty/annotation/internal/InlineParam.scala6
-rw-r--r--src/dotty/annotation/internal/SourceFile.scala10
-rw-r--r--src/dotty/language.scala16
-rw-r--r--src/dotty/runtime/Arrays.scala35
-rw-r--r--src/dotty/runtime/LazyVals.scala27
-rw-r--r--src/dotty/tools/backend/jvm/CollectEntryPoints.scala2
-rw-r--r--src/dotty/tools/backend/jvm/CollectSuperCalls.scala42
-rw-r--r--src/dotty/tools/backend/jvm/DottyBackendInterface.scala60
-rw-r--r--src/dotty/tools/backend/jvm/GenBCode.scala12
-rw-r--r--src/dotty/tools/backend/sjs/JSCodeGen.scala44
-rw-r--r--src/dotty/tools/backend/sjs/JSPrimitives.scala12
-rw-r--r--src/dotty/tools/dotc/Bench.scala4
-rw-r--r--src/dotty/tools/dotc/Compiler.scala115
-rw-r--r--src/dotty/tools/dotc/Driver.scala4
-rw-r--r--src/dotty/tools/dotc/FromTasty.scala7
-rw-r--r--src/dotty/tools/dotc/Main.scala3
-rw-r--r--src/dotty/tools/dotc/Resident.scala5
-rw-r--r--src/dotty/tools/dotc/Run.scala63
-rw-r--r--src/dotty/tools/dotc/ast/Desugar.scala177
-rw-r--r--src/dotty/tools/dotc/ast/NavigateAST.scala9
-rw-r--r--src/dotty/tools/dotc/ast/Positioned.scala84
-rw-r--r--src/dotty/tools/dotc/ast/TreeInfo.scala102
-rw-r--r--src/dotty/tools/dotc/ast/TreeTypeMap.scala16
-rw-r--r--src/dotty/tools/dotc/ast/Trees.scala203
-rw-r--r--src/dotty/tools/dotc/ast/tpd.scala110
-rw-r--r--src/dotty/tools/dotc/ast/untpd.scala148
-rw-r--r--src/dotty/tools/dotc/config/CompilerCommand.scala12
-rw-r--r--src/dotty/tools/dotc/config/Config.scala22
-rw-r--r--src/dotty/tools/dotc/config/PathResolver.scala7
-rw-r--r--src/dotty/tools/dotc/config/Printers.scala4
-rw-r--r--src/dotty/tools/dotc/config/ScalaSettings.scala88
-rw-r--r--src/dotty/tools/dotc/config/Settings.scala16
-rw-r--r--src/dotty/tools/dotc/core/Annotations.scala36
-rw-r--r--src/dotty/tools/dotc/core/CheckRealizable.scala1
-rw-r--r--src/dotty/tools/dotc/core/Comments.scala458
-rw-r--r--src/dotty/tools/dotc/core/Constants.scala15
-rw-r--r--src/dotty/tools/dotc/core/Constraint.scala16
-rw-r--r--src/dotty/tools/dotc/core/ConstraintHandling.scala125
-rw-r--r--src/dotty/tools/dotc/core/ConstraintRunInfo.scala3
-rw-r--r--src/dotty/tools/dotc/core/Contexts.scala47
-rw-r--r--src/dotty/tools/dotc/core/Decorators.scala93
-rw-r--r--src/dotty/tools/dotc/core/Definitions.scala136
-rw-r--r--src/dotty/tools/dotc/core/Denotations.scala373
-rw-r--r--src/dotty/tools/dotc/core/Flags.scala25
-rw-r--r--src/dotty/tools/dotc/core/Mode.scala (renamed from src/dotty/tools/dotc/typer/Mode.scala)16
-rw-r--r--src/dotty/tools/dotc/core/NameOps.scala34
-rw-r--r--src/dotty/tools/dotc/core/Names.scala5
-rw-r--r--src/dotty/tools/dotc/core/OrderingConstraint.scala82
-rw-r--r--src/dotty/tools/dotc/core/Phases.scala8
-rw-r--r--src/dotty/tools/dotc/core/Signature.scala37
-rw-r--r--src/dotty/tools/dotc/core/StdNames.scala25
-rw-r--r--src/dotty/tools/dotc/core/Substituters.scala29
-rw-r--r--src/dotty/tools/dotc/core/SymDenotations.scala168
-rw-r--r--src/dotty/tools/dotc/core/SymbolLoaders.scala6
-rw-r--r--src/dotty/tools/dotc/core/Symbols.scala48
-rw-r--r--src/dotty/tools/dotc/core/TypeApplications.scala668
-rw-r--r--src/dotty/tools/dotc/core/TypeComparer.scala709
-rw-r--r--src/dotty/tools/dotc/core/TypeErasure.scala64
-rw-r--r--src/dotty/tools/dotc/core/TypeOps.scala345
-rw-r--r--src/dotty/tools/dotc/core/TypeParamInfo.scala40
-rw-r--r--src/dotty/tools/dotc/core/TyperState.scala33
-rw-r--r--src/dotty/tools/dotc/core/Types.scala1137
-rw-r--r--src/dotty/tools/dotc/core/Uniques.scala4
-rw-r--r--src/dotty/tools/dotc/core/classfile/ClassfileParser.scala62
-rw-r--r--src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala36
-rw-r--r--src/dotty/tools/dotc/core/tasty/PositionPickler.scala92
-rw-r--r--src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala40
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyFormat.scala60
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyPickler.scala1
-rw-r--r--src/dotty/tools/dotc/core/tasty/TastyPrinter.scala4
-rw-r--r--src/dotty/tools/dotc/core/tasty/TreePickler.scala52
-rw-r--r--src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala439
-rw-r--r--src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala111
-rw-r--r--src/dotty/tools/dotc/parsing/JavaParsers.scala176
-rw-r--r--src/dotty/tools/dotc/parsing/JavaScanners.scala1
-rw-r--r--src/dotty/tools/dotc/parsing/Parsers.scala402
-rw-r--r--src/dotty/tools/dotc/parsing/Scanners.scala89
-rw-r--r--src/dotty/tools/dotc/parsing/Tokens.scala6
-rw-r--r--src/dotty/tools/dotc/printing/Disambiguation.scala86
-rw-r--r--src/dotty/tools/dotc/printing/Formatting.scala258
-rw-r--r--src/dotty/tools/dotc/printing/Highlighting.scala77
-rw-r--r--src/dotty/tools/dotc/printing/PlainPrinter.scala86
-rw-r--r--src/dotty/tools/dotc/printing/Printer.scala3
-rw-r--r--src/dotty/tools/dotc/printing/RefinedPrinter.scala151
-rw-r--r--src/dotty/tools/dotc/printing/Showable.scala6
-rw-r--r--src/dotty/tools/dotc/printing/SyntaxHighlighting.scala304
-rw-r--r--src/dotty/tools/dotc/repl/AmmoniteReader.scala82
-rw-r--r--src/dotty/tools/dotc/repl/CompilingInterpreter.scala305
-rw-r--r--src/dotty/tools/dotc/repl/InteractiveReader.scala23
-rw-r--r--src/dotty/tools/dotc/repl/Interpreter.scala17
-rw-r--r--src/dotty/tools/dotc/repl/InterpreterLoop.scala119
-rw-r--r--src/dotty/tools/dotc/repl/Main.scala3
-rw-r--r--src/dotty/tools/dotc/repl/REPL.scala75
-rw-r--r--src/dotty/tools/dotc/repl/SimpleReader.scala1
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/Ansi.scala256
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/Filter.scala61
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/FilterTools.scala80
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/LICENSE25
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/Protocol.scala30
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/SpecialKeys.scala81
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/Terminal.scala320
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/Utils.scala169
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/filters/BasicFilters.scala163
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/filters/GUILikeFilters.scala170
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/filters/HistoryFilter.scala334
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/filters/ReadlineFilters.scala165
-rw-r--r--src/dotty/tools/dotc/repl/ammonite/filters/UndoFilter.scala157
-rw-r--r--src/dotty/tools/dotc/reporting/ConsoleReporter.scala150
-rw-r--r--src/dotty/tools/dotc/reporting/Diagnostic.scala47
-rw-r--r--src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala7
-rw-r--r--src/dotty/tools/dotc/reporting/Reporter.scala158
-rw-r--r--src/dotty/tools/dotc/reporting/StoreReporter.scala17
-rw-r--r--src/dotty/tools/dotc/reporting/ThrowingReporter.scala8
-rw-r--r--src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala23
-rw-r--r--src/dotty/tools/dotc/reporting/diagnostic/Message.scala106
-rw-r--r--src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala74
-rw-r--r--src/dotty/tools/dotc/reporting/diagnostic/messages.scala277
-rw-r--r--src/dotty/tools/dotc/rewrite/Rewrites.scala2
-rw-r--r--src/dotty/tools/dotc/sbt/ExtractAPI.scala518
-rw-r--r--src/dotty/tools/dotc/sbt/ExtractDependencies.scala268
-rw-r--r--src/dotty/tools/dotc/sbt/ShowAPI.scala156
-rw-r--r--src/dotty/tools/dotc/sbt/ThunkHolder.scala61
-rw-r--r--src/dotty/tools/dotc/transform/ArrayConstructors.scala59
-rw-r--r--src/dotty/tools/dotc/transform/CheckReentrant.scala4
-rw-r--r--src/dotty/tools/dotc/transform/CheckStatic.scala22
-rw-r--r--src/dotty/tools/dotc/transform/CollectEntryPoints.scala2
-rw-r--r--src/dotty/tools/dotc/transform/Constructors.scala4
-rw-r--r--src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled2
-rw-r--r--src/dotty/tools/dotc/transform/DropInlined.scala15
-rw-r--r--src/dotty/tools/dotc/transform/ElimByName.scala3
-rw-r--r--src/dotty/tools/dotc/transform/ElimErasedValueType.scala3
-rw-r--r--src/dotty/tools/dotc/transform/ElimRepeated.scala2
-rw-r--r--src/dotty/tools/dotc/transform/ElimStaticThis.scala8
-rw-r--r--src/dotty/tools/dotc/transform/Erasure.scala130
-rw-r--r--src/dotty/tools/dotc/transform/ExpandPrivate.scala29
-rw-r--r--src/dotty/tools/dotc/transform/ExpandSAMs.scala3
-rw-r--r--src/dotty/tools/dotc/transform/ExplicitOuter.scala12
-rw-r--r--src/dotty/tools/dotc/transform/ExplicitSelf.scala14
-rw-r--r--src/dotty/tools/dotc/transform/ExtensionMethods.scala34
-rw-r--r--src/dotty/tools/dotc/transform/FirstTransform.scala23
-rw-r--r--src/dotty/tools/dotc/transform/FullParameterization.scala42
-rw-r--r--src/dotty/tools/dotc/transform/GetClass.scala3
-rw-r--r--src/dotty/tools/dotc/transform/Getters.scala1
-rw-r--r--src/dotty/tools/dotc/transform/InterceptedMethods.scala1
-rw-r--r--src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala168
-rw-r--r--src/dotty/tools/dotc/transform/LambdaLift.scala33
-rw-r--r--src/dotty/tools/dotc/transform/LazyVals.scala36
-rw-r--r--src/dotty/tools/dotc/transform/Memoize.scala30
-rw-r--r--src/dotty/tools/dotc/transform/Mixin.scala33
-rw-r--r--src/dotty/tools/dotc/transform/MixinOps.scala18
-rw-r--r--src/dotty/tools/dotc/transform/MoveStatics.scala77
-rw-r--r--src/dotty/tools/dotc/transform/PatternMatcher.scala411
-rw-r--r--src/dotty/tools/dotc/transform/Pickler.scala22
-rw-r--r--src/dotty/tools/dotc/transform/PostTyper.scala54
-rw-r--r--src/dotty/tools/dotc/transform/RestoreScopes.scala1
-rw-r--r--src/dotty/tools/dotc/transform/SelectStatic.scala56
-rw-r--r--src/dotty/tools/dotc/transform/Splitter.scala53
-rw-r--r--src/dotty/tools/dotc/transform/SuperAccessors.scala8
-rw-r--r--src/dotty/tools/dotc/transform/SyntheticMethods.scala8
-rw-r--r--src/dotty/tools/dotc/transform/TailRec.scala107
-rw-r--r--src/dotty/tools/dotc/transform/TreeChecker.scala149
-rw-r--r--src/dotty/tools/dotc/transform/TreeTransform.scala106
-rw-r--r--src/dotty/tools/dotc/transform/TryCatchPatterns.scala99
-rw-r--r--src/dotty/tools/dotc/transform/TypeTestsCasts.scala31
-rw-r--r--src/dotty/tools/dotc/transform/VCInlineMethods.scala2
-rw-r--r--src/dotty/tools/dotc/transform/patmat/Space.scala619
-rw-r--r--src/dotty/tools/dotc/typer/Applications.scala404
-rw-r--r--src/dotty/tools/dotc/typer/Checking.scala177
-rw-r--r--src/dotty/tools/dotc/typer/Docstrings.scala56
-rw-r--r--src/dotty/tools/dotc/typer/Dynamic.scala104
-rw-r--r--src/dotty/tools/dotc/typer/ErrorReporting.scala99
-rw-r--r--src/dotty/tools/dotc/typer/EtaExpansion.scala8
-rw-r--r--src/dotty/tools/dotc/typer/FrontEnd.scala9
-rw-r--r--src/dotty/tools/dotc/typer/Implicits.scala189
-rw-r--r--src/dotty/tools/dotc/typer/ImportInfo.scala7
-rw-r--r--src/dotty/tools/dotc/typer/Inferencing.scala62
-rw-r--r--src/dotty/tools/dotc/typer/Inliner.scala521
-rw-r--r--src/dotty/tools/dotc/typer/Namer.scala267
-rw-r--r--src/dotty/tools/dotc/typer/ProtoTypes.scala85
-rw-r--r--src/dotty/tools/dotc/typer/ReTyper.scala24
-rw-r--r--src/dotty/tools/dotc/typer/RefChecks.scala21
-rw-r--r--src/dotty/tools/dotc/typer/TypeAssigner.scala154
-rw-r--r--src/dotty/tools/dotc/typer/Typer.scala762
-rw-r--r--src/dotty/tools/dotc/typer/VarianceChecker.scala6
-rw-r--r--src/dotty/tools/dotc/typer/Variances.scala19
-rw-r--r--src/dotty/tools/dotc/util/Attachment.scala4
-rw-r--r--src/dotty/tools/dotc/util/CommentParsing.scala239
-rw-r--r--src/dotty/tools/dotc/util/DiffUtil.scala174
-rw-r--r--src/dotty/tools/dotc/util/Property.scala10
-rw-r--r--src/dotty/tools/dotc/util/SourceFile.scala10
-rw-r--r--src/dotty/tools/dotc/util/SourcePosition.scala23
-rw-r--r--src/dotty/tools/dotc/util/Stats.scala17
-rw-r--r--src/scala/Eq.scala14
-rw-r--r--src/scala/Function23.scala21
-rw-r--r--src/scala/Function24.scala21
-rw-r--r--src/scala/Function25.scala21
-rw-r--r--src/scala/Function26.scala20
-rw-r--r--src/scala/Function27.scala20
-rw-r--r--src/scala/Function28.scala20
-rw-r--r--src/scala/Function29.scala20
-rw-r--r--src/scala/Function30.scala20
-rw-r--r--src/scala/compat/java8/JFunction.java87
-rw-r--r--src/scala/compat/java8/JFunction1.java76
-rw-r--r--src/scala/compat/java8/JFunction10.java2
-rw-r--r--src/scala/compat/java8/JFunction11.java2
-rw-r--r--src/scala/compat/java8/JFunction12.java2
-rw-r--r--src/scala/compat/java8/JFunction13.java2
-rw-r--r--src/scala/compat/java8/JFunction14.java2
-rw-r--r--src/scala/compat/java8/JFunction15.java2
-rw-r--r--src/scala/compat/java8/JFunction16.java2
-rw-r--r--src/scala/compat/java8/JFunction17.java2
-rw-r--r--src/scala/compat/java8/JFunction18.java2
-rw-r--r--src/scala/compat/java8/JFunction19.java2
-rw-r--r--src/scala/compat/java8/JFunction2.java164
-rw-r--r--src/scala/compat/java8/JFunction20.java2
-rw-r--r--src/scala/compat/java8/JFunction21.java2
-rw-r--r--src/scala/compat/java8/JFunction22.java2
-rw-r--r--src/scala/compat/java8/JFunction3.java2
-rw-r--r--src/scala/compat/java8/JFunction4.java2
-rw-r--r--src/scala/compat/java8/JFunction5.java2
-rw-r--r--src/scala/compat/java8/JFunction6.java2
-rw-r--r--src/scala/compat/java8/JFunction7.java2
-rw-r--r--src/scala/compat/java8/JFunction8.java2
-rw-r--r--src/scala/compat/java8/JFunction9.java2
-rw-r--r--src/strawman/collections/CollectionStrawMan4.scala115
-rw-r--r--src/strawman/collections/CollectionStrawMan5.scala522
-rw-r--r--src/strawman/collections/CollectionStrawMan6.scala1045
230 files changed, 16810 insertions, 4757 deletions
diff --git a/src/dotty/DottyPredef.scala b/src/dotty/DottyPredef.scala
index 9170da476..cd90c4882 100644
--- a/src/dotty/DottyPredef.scala
+++ b/src/dotty/DottyPredef.scala
@@ -3,6 +3,7 @@ package dotty
import scala.reflect.runtime.universe.TypeTag
import scala.reflect.ClassTag
import scala.Predef.???
+import scala.collection.Seq
/** unimplemented implicit for TypeTag */
object DottyPredef {
@@ -10,4 +11,35 @@ object DottyPredef {
implicit def arrayTag[T](implicit ctag: ClassTag[T]): ClassTag[Array[T]] =
ctag.wrap
+
+ /** A fall-back implicit to compare values of any types.
+ * The compiler will restrict implicit instances of `eqAny`. An instance
+ * `eqAny[T, U]` is _valid_ if `T <: U` or `U <: T` or both `T` and `U` are
+ * Eq-free. A type `S` is Eq-free if there is no implicit instance of `Eq[S, S]`.
+ * An implicit search will fail instead of returning an invalid `eqAny` instance.
+ */
+ implicit def eqAny[L, R]: Eq[L, R] = Eq
+
+ implicit def eqNumber : Eq[Number, Number] = Eq
+ implicit def eqString : Eq[String, String] = Eq
+
+ // true asymmetry, modeling the (somewhat problematic) nature of equals on Proxies
+ implicit def eqProxy : Eq[Proxy, Any] = Eq
+
+ implicit def eqSeq[T, U](implicit eq: Eq[T, U]): Eq[Seq[T], Seq[U]] = Eq
+
+ implicit def eqByteNum : Eq[Byte, Number] = Eq
+ implicit def eqNumByte : Eq[Number, Byte] = Eq
+ implicit def eqCharNum : Eq[Char, Number] = Eq
+ implicit def eqNumChar : Eq[Number, Char] = Eq
+ implicit def eqShortNum : Eq[Short, Number] = Eq
+ implicit def eqNumShort : Eq[Number, Short] = Eq
+ implicit def eqIntNum : Eq[Int, Number] = Eq
+ implicit def eqNumInt : Eq[Number, Int] = Eq
+ implicit def eqLongNum : Eq[Long, Number] = Eq
+ implicit def eqNumLong : Eq[Number, Long] = Eq
+ implicit def eqFloatNum : Eq[Float, Number] = Eq
+ implicit def eqNumFloat : Eq[Number, Float] = Eq
+ implicit def eqDoubleNum: Eq[Double, Number] = Eq
+ implicit def eqNumDouble: Eq[Number, Double] = Eq
}
diff --git a/src/dotty/annotation/internal/Body.scala b/src/dotty/annotation/internal/Body.scala
new file mode 100644
index 000000000..7e26b02f2
--- /dev/null
+++ b/src/dotty/annotation/internal/Body.scala
@@ -0,0 +1,8 @@
+package dotty.annotation.internal
+
+import scala.annotation.Annotation
+
+/** The class associated with a `BodyAnnotation`, which indicates
+ * an inline method's right hand side
+ */
+final class Body() extends Annotation
diff --git a/src/dotty/annotation/internal/Child.scala b/src/dotty/annotation/internal/Child.scala
index 23ff2a97c..9295de73e 100644
--- a/src/dotty/annotation/internal/Child.scala
+++ b/src/dotty/annotation/internal/Child.scala
@@ -2,5 +2,15 @@ package dotty.annotation.internal
import scala.annotation.Annotation
-/** An annotation to indicate a child class or object of the annotated class. */
+/** An annotation to indicate a child class or object of the annotated class.
+ * E.g. if we have
+ *
+ * sealed class A
+ * case class B() extends A
+ * case class C() extends A
+ *
+ * Then the class symbol `A` would carry the annotations
+ * `@Child[Bref] @Child[Cref]` where `Bref`, `Cref` are TypeRefs
+ * referring to the class symbols of `B` and `C`
+ */
class Child[T] extends Annotation
diff --git a/src/dotty/annotation/internal/InlineParam.scala b/src/dotty/annotation/internal/InlineParam.scala
new file mode 100644
index 000000000..a144f9edb
--- /dev/null
+++ b/src/dotty/annotation/internal/InlineParam.scala
@@ -0,0 +1,6 @@
+package dotty.annotation.internal
+
+import scala.annotation.Annotation
+
+/** An annotation produced by Namer to indicate an inline parameter */
+final class InlineParam() extends Annotation
diff --git a/src/dotty/annotation/internal/SourceFile.scala b/src/dotty/annotation/internal/SourceFile.scala
new file mode 100644
index 000000000..c49fc2c8d
--- /dev/null
+++ b/src/dotty/annotation/internal/SourceFile.scala
@@ -0,0 +1,10 @@
+package dotty.annotation.internal
+
+import scala.annotation.Annotation
+
+/** An annotation to record a Scala2 pickled alias.
+ * @param aliased A TermRef pointing to the aliased field.
+ */
+class SourceFile(path: String) extends Annotation {
+
+}
diff --git a/src/dotty/language.scala b/src/dotty/language.scala
deleted file mode 100644
index 96250a9f2..000000000
--- a/src/dotty/language.scala
+++ /dev/null
@@ -1,16 +0,0 @@
-package dotty
-
-object language {
-
- class Feature
-
- /** Allow higher-kinded type syntax (not yet checked) */
- val higherKinds = new Feature
-
- /** Keep union types */
- val keepUnions = new Feature
-
- /** No auto tupling */
- val noAutoTupling = new Feature
-
-}
diff --git a/src/dotty/runtime/Arrays.scala b/src/dotty/runtime/Arrays.scala
index 4469dced7..9ec5512ad 100644
--- a/src/dotty/runtime/Arrays.scala
+++ b/src/dotty/runtime/Arrays.scala
@@ -2,6 +2,8 @@ package dotty.runtime
import scala.reflect.ClassTag
+import java.lang.{reflect => jlr}
+
/** All but the first two operations should be short-circuited and implemented specially by
* the backend.
*/
@@ -22,35 +24,8 @@ object Arrays {
arr
}
- /** Create an array of type T. T must be of form Array[E], with
- * E being a reference type.
+ /** Create an array of a reference type T.
*/
- def newRefArray[T](length: Int): T = ???
-
- /** Create a Byte[] array */
- def newByteArray(length: Int): Array[Byte] = ???
-
- /** Create a Short[] array */
- def newShortArray(length: Int): Array[Short] = ???
-
- /** Create a Char[] array */
- def newCharArray(length: Int): Array[Char] = ???
-
- /** Create an Int[] array */
- def newIntArray(length: Int): Array[Int] = ???
-
- /** Create a Long[] array */
- def newLongArray(length: Int): Array[Long] = ???
-
- /** Create a Float[] array */
- def newFloatArray(length: Int): Array[Float] = ???
-
- /** Create a Double[] array */
- def newDoubleArray(length: Int): Array[Double] = ???
-
- /** Create a Boolean[] array */
- def newBooleanArray(length: Int): Array[Boolean] = ???
-
- /** Create a scala.runtime.BoxedUnit[] array */
- def newUnitArray(length: Int): Array[Unit] = ???
+ def newArray[Arr](componentType: Class[_], returnType: Class[Arr], dimensions: Array[Int]): Arr =
+ jlr.Array.newInstance(componentType, dimensions: _*).asInstanceOf[Arr]
}
diff --git a/src/dotty/runtime/LazyVals.scala b/src/dotty/runtime/LazyVals.scala
index f09e96f57..4dea0d34d 100644
--- a/src/dotty/runtime/LazyVals.scala
+++ b/src/dotty/runtime/LazyVals.scala
@@ -10,14 +10,24 @@ object LazyVals {
final val BITS_PER_LAZY_VAL = 2L
final val LAZY_VAL_MASK = 3L
+ final val debug = false
- @inline def STATE(cur: Long, ord: Int) = (cur >> (ord * BITS_PER_LAZY_VAL)) & LAZY_VAL_MASK
+ @inline def STATE(cur: Long, ord: Int) = {
+ val r = (cur >> (ord * BITS_PER_LAZY_VAL)) & LAZY_VAL_MASK
+ if (debug)
+ println(s"STATE($cur, $ord) = $r")
+ r
+ }
@inline def CAS(t: Object, offset: Long, e: Long, v: Int, ord: Int) = {
+ if (debug)
+ println(s"CAS($t, $offset, $e, $v, $ord)")
val mask = ~(LAZY_VAL_MASK << ord * BITS_PER_LAZY_VAL)
val n = (e & mask) | (v << (ord * BITS_PER_LAZY_VAL))
compareAndSet(t, offset, e, n)
}
@inline def setFlag(t: Object, offset: Long, v: Int, ord: Int) = {
+ if (debug)
+ println(s"setFlag($t, $offset, $v, $ord)")
var retry = true
while (retry) {
val cur = get(t, offset)
@@ -35,6 +45,8 @@ object LazyVals {
}
}
@inline def wait4Notification(t: Object, offset: Long, cur: Long, ord: Int) = {
+ if (debug)
+ println(s"wait4Notification($t, $offset, $cur, $ord)")
var retry = true
while (retry) {
val cur = get(t, offset)
@@ -51,7 +63,11 @@ object LazyVals {
}
@inline def compareAndSet(t: Object, off: Long, e: Long, v: Long) = unsafe.compareAndSwapLong(t, off, e, v)
- @inline def get(t: Object, off: Long) = unsafe.getLongVolatile(t, off)
+ @inline def get(t: Object, off: Long) = {
+ if (debug)
+ println(s"get($t, $off)")
+ unsafe.getLongVolatile(t, off)
+ }
val processors: Int = java.lang.Runtime.getRuntime.availableProcessors()
val base: Int = 8 * processors * processors
@@ -68,7 +84,12 @@ object LazyVals {
monitors(id)
}
- @inline def getOffset(clz: Class[_], name: String) = unsafe.objectFieldOffset(clz.getDeclaredField(name))
+ @inline def getOffset(clz: Class[_], name: String) = {
+ val r = unsafe.objectFieldOffset(clz.getDeclaredField(name))
+ if (debug)
+ println(s"getOffset($clz, $name) = $r")
+ r
+ }
object Names {
final val state = "STATE"
diff --git a/src/dotty/tools/backend/jvm/CollectEntryPoints.scala b/src/dotty/tools/backend/jvm/CollectEntryPoints.scala
index 3ed232bc7..2ee1b6011 100644
--- a/src/dotty/tools/backend/jvm/CollectEntryPoints.scala
+++ b/src/dotty/tools/backend/jvm/CollectEntryPoints.scala
@@ -107,7 +107,7 @@ object CollectEntryPoints{
else (possibles exists(x=> isJavaMainMethod(x.symbol))) || {
possibles exists { m =>
toDenot(m.symbol).info match {
- case t:PolyType =>
+ case t: PolyType =>
fail("main methods cannot be generic.")
case t@MethodType(paramNames, paramTypes) =>
if (t.resultType :: paramTypes exists (_.typeSymbol.isAbstractType))
diff --git a/src/dotty/tools/backend/jvm/CollectSuperCalls.scala b/src/dotty/tools/backend/jvm/CollectSuperCalls.scala
new file mode 100644
index 000000000..8285bfe4b
--- /dev/null
+++ b/src/dotty/tools/backend/jvm/CollectSuperCalls.scala
@@ -0,0 +1,42 @@
+package dotty.tools.backend.jvm
+
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Flags.Trait
+import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+
+/** Collect all super calls to trait members.
+ *
+ * For each super reference to trait member, register a call from the current class to the
+ * owner of the referenced member.
+ *
+ * This information is used to know if it is safe to remove a redundant mixin class.
+ * A redundant mixin class is one that is implemented by another mixin class. As the
+ * methods in a redundant mixin class could be implemented with a default abstract method,
+ * the redundant mixin class could be required as a parent by the JVM.
+ */
+class CollectSuperCalls extends MiniPhaseTransform {
+ import tpd._
+
+ def phaseName: String = "collectSuperCalls"
+
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ tree.qualifier match {
+ case sup: Super =>
+ if (tree.symbol.owner.is(Trait))
+ registerSuperCall(ctx.owner.enclosingClass.asClass, tree.symbol.owner.asClass)
+ case _ =>
+ }
+ tree
+ }
+
+ private def registerSuperCall(sym: ClassSymbol, calls: ClassSymbol)(implicit ctx: Context) = {
+ ctx.genBCodePhase match {
+ case genBCodePhase: GenBCode =>
+ genBCodePhase.registerSuperCall(sym, calls)
+ case _ =>
+ }
+ }
+}
diff --git a/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
index ef8e4997f..2d60d851c 100644
--- a/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
+++ b/src/dotty/tools/backend/jvm/DottyBackendInterface.scala
@@ -12,7 +12,7 @@ import scala.collection.generic.Clearable
import scala.collection.mutable
import scala.reflect.ClassTag
import scala.reflect.internal.util.WeakHashSet
-import scala.reflect.io.{Directory, PlainDirectory, AbstractFile}
+import scala.reflect.io.{AbstractFile, Directory, PlainDirectory}
import scala.tools.asm.{AnnotationVisitor, ClassVisitor, FieldVisitor, MethodVisitor}
import scala.tools.nsc.backend.jvm.{BCodeHelpers, BackendInterface}
import dotty.tools.dotc.core._
@@ -24,15 +24,21 @@ import Symbols._
import Denotations._
import Phases._
import java.lang.AssertionError
-import dotty.tools.dotc.util.{Positions, DotClass}
+
+import dotty.tools.dotc.util.{DotClass, Positions}
import Decorators._
import tpd._
+
import scala.tools.asm
import NameOps._
import StdNames.nme
import NameOps._
+import dotty.tools.dotc.core
+import dotty.tools.dotc.core.Names.TypeName
+
+import scala.annotation.tailrec
-class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context) extends BackendInterface{
+class DottyBackendInterface(outputDirectory: AbstractFile, val superCallsMap: Map[Symbol, Set[ClassSymbol]])(implicit ctx: Context) extends BackendInterface{
type Symbol = Symbols.Symbol
type Type = Types.Type
type Tree = tpd.Tree
@@ -64,7 +70,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
type Bind = tpd.Bind
type New = tpd.New
type Super = tpd.Super
- type Modifiers = tpd.Modifiers
+ type Modifiers = Null
type Annotation = Annotations.Annotation
type ArrayValue = tpd.JavaSeqLiteral
type ApplyDynamic = Null
@@ -153,15 +159,8 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
}.toMap
def unboxMethods: Map[Symbol, Symbol] = defn.ScalaValueClasses().map(x => (x, Erasure.Boxing.unboxMethod(x.asClass))).toMap
- private val mkArrayNames: Set[Name] = Set("Byte", "Float", "Char", "Double", "Boolean", "Unit", "Long", "Int", "Short", "Ref").map{ x=>
- ("new" + x + "Array").toTermName
- }
-
- val dottyArraysModuleClass = toDenot(defn.DottyArraysModule).moduleClass.asClass
-
-
override def isSyntheticArrayConstructor(s: Symbol) = {
- (toDenot(s).maybeOwner eq dottyArraysModuleClass) && mkArrayNames.contains(s.name)
+ s eq defn.newArrayMethod
}
def isBox(sym: Symbol): Boolean = Erasure.Boxing.isBox(sym)
@@ -640,7 +639,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
toDenot(sym)(shiftedContext).isStatic(shiftedContext)
}
- def isStaticConstructor: Boolean = isStaticMember && isClassConstructor
+ def isStaticConstructor: Boolean = (isStaticMember && isClassConstructor) || (sym.name eq core.Names.STATIC_CONSTRUCTOR)
// navigation
@@ -649,12 +648,14 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
originalOwner
}
def originalOwner: Symbol = {
+ // used to populate the EnclosingMethod attribute.
+ // it is very tricky in presence of classes(and annonymous classes) defined inside supper calls.
try {
if (sym.exists) {
val original = toDenot(sym).initial
val validity = original.validFor
val shiftedContext = ctx.withPhase(validity.phaseId)
- val r = toDenot(sym)(shiftedContext).maybeOwner.enclosingClass(shiftedContext)
+ val r = toDenot(sym)(shiftedContext).maybeOwner.lexicallyEnclosingClass(shiftedContext)
r
} else NoSymbol
} catch {
@@ -723,7 +724,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
toDenot(sym).info.decls.filter(p => p.isTerm && !p.is(Flags.Method)).toList
}
def methodSymbols: List[Symbol] =
- for (f <- toDenot(sym).info.decls.toList if !f.isMethod && f.isTerm && !f.isModule) yield f
+ for (f <- toDenot(sym).info.decls.toList if f.isMethod && f.isTerm && !f.isModule) yield f
def serialVUID: Option[Long] = None
@@ -742,9 +743,18 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
/**
* All interfaces implemented by a class, except for those inherited through the superclass.
- *
+ * Redundant interfaces are removed unless there is a super call to them.
*/
- def superInterfaces: List[Symbol] = decorateSymbol(sym).directlyInheritedTraits
+ def superInterfaces: List[Symbol] = {
+ val directlyInheritedTraits = decorateSymbol(sym).directlyInheritedTraits
+ val directlyInheritedTraitsSet = directlyInheritedTraits.toSet
+ val allBaseClasses = directlyInheritedTraits.iterator.flatMap(_.symbol.asClass.baseClasses.drop(1)).toSet
+ val superCalls = superCallsMap.getOrElse(sym, Set.empty)
+ val additional = (superCalls -- directlyInheritedTraitsSet).filter(_.is(Flags.Trait))
+// if (additional.nonEmpty)
+// println(s"$fullName: adding supertraits $additional")
+ directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCalls(t)) ++ additional
+ }
/**
* True for module classes of package level objects. The backend will generate a mirror class for
@@ -868,11 +878,11 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
"If possible, please file a bug on issues.scala-lang.org.")
tp match {
- case ThisType(ArrayClass) => ObjectReference.asInstanceOf[ct.bTypes.ClassBType] // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test
- case ThisType(sym) => storage.getClassBTypeAndRegisterInnerClass(sym.asInstanceOf[ct.int.Symbol])
- // case t: SingletonType => primitiveOrClassToBType(t.classSymbol)
- case t: SingletonType => t.underlying.toTypeKind(ct)(storage)
- case t: RefinedType => t.parent.toTypeKind(ct)(storage) //parents.map(_.toTypeKind(ct)(storage).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b))
+ case tp: ThisType if tp.cls == ArrayClass => ObjectReference.asInstanceOf[ct.bTypes.ClassBType] // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test
+ case tp: ThisType => storage.getClassBTypeAndRegisterInnerClass(tp.cls.asInstanceOf[ct.int.Symbol])
+ // case t: SingletonType => primitiveOrClassToBType(t.classSymbol)
+ case t: SingletonType => t.underlying.toTypeKind(ct)(storage)
+ case t: RefinedType => t.parent.toTypeKind(ct)(storage) //parents.map(_.toTypeKind(ct)(storage).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b))
}
}
}
@@ -934,7 +944,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
}
object ValDef extends ValDefDeconstructor {
- def _1: Modifiers = field.mods
+ def _1: Modifiers = null
def _2: Name = field.name
def _3: Tree = field.tpt
def _4: Tree = field.rhs
@@ -1045,7 +1055,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
}
object DefDef extends DefDefDeconstructor {
- def _1: Modifiers = field.mods
+ def _1: Modifiers = null
def _2: Name = field.name
def _3: List[TypeDef] = field.tparams
def _4: List[List[ValDef]] = field.vparamss
@@ -1071,7 +1081,7 @@ class DottyBackendInterface(outputDirectory: AbstractFile)(implicit ctx: Context
}
object ClassDef extends ClassDefDeconstructor {
- def _1: Modifiers = field.mods
+ def _1: Modifiers = null
def _2: Name = field.name
def _4: Template = field.rhs.asInstanceOf[Template]
def _3: List[TypeDef] = Nil
diff --git a/src/dotty/tools/backend/jvm/GenBCode.scala b/src/dotty/tools/backend/jvm/GenBCode.scala
index 8cec93977..902f73ae2 100644
--- a/src/dotty/tools/backend/jvm/GenBCode.scala
+++ b/src/dotty/tools/backend/jvm/GenBCode.scala
@@ -4,6 +4,7 @@ import dotty.tools.dotc.CompilationUnit
import dotty.tools.dotc.ast.Trees.{ValDef, PackageDef}
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.core.Phases.Phase
+import dotty.tools.dotc.core.Names.TypeName
import scala.collection.mutable
import scala.tools.asm.{CustomAttr, ClassVisitor, MethodVisitor, FieldVisitor}
@@ -41,11 +42,18 @@ class GenBCode extends Phase {
private val entryPoints = new mutable.HashSet[Symbol]()
def registerEntryPoint(sym: Symbol) = entryPoints += sym
+ private val superCallsMap = new mutable.HashMap[Symbol, Set[ClassSymbol]]()
+ def registerSuperCall(sym: Symbol, calls: ClassSymbol) = {
+ val old = superCallsMap.getOrElse(sym, Set.empty)
+ superCallsMap.put(sym, old + calls)
+ }
+
def outputDir(implicit ctx: Context): AbstractFile =
new PlainDirectory(new Directory(new JFile(ctx.settings.d.value)))
def run(implicit ctx: Context): Unit = {
- new GenBCodePipeline(entryPoints.toList, new DottyBackendInterface(outputDir)(ctx))(ctx).run(ctx.compilationUnit.tpdTree)
+ new GenBCodePipeline(entryPoints.toList,
+ new DottyBackendInterface(outputDir, superCallsMap.toMap)(ctx))(ctx).run(ctx.compilationUnit.tpdTree)
entryPoints.clear()
}
}
@@ -389,6 +397,8 @@ class GenBCodePipeline(val entryPoints: List[Symbol], val int: DottyBackendInter
val className = jclassName.replace('/', '.')
if (ctx.compilerCallback != null)
ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(outFile), className)
+ if (ctx.sbtCallback != null)
+ ctx.sbtCallback.generatedClass(sourceFile.jfile.orElse(null), outFile.file, className)
}
catch {
case e: FileConflictException =>
diff --git a/src/dotty/tools/backend/sjs/JSCodeGen.scala b/src/dotty/tools/backend/sjs/JSCodeGen.scala
index ec75a1c4d..401e01784 100644
--- a/src/dotty/tools/backend/sjs/JSCodeGen.scala
+++ b/src/dotty/tools/backend/sjs/JSCodeGen.scala
@@ -718,9 +718,9 @@ class JSCodeGen()(implicit ctx: Context) {
if (sym.is(Module)) {
assert(!sym.is(Package), "Cannot use package as value: " + tree)
genLoadModule(sym)
- } else /*if (sym.isStaticMember) {
- genStaticMember(sym)
- } else if (paramAccessorLocals contains sym) {
+ } else if (sym.is(JavaStatic)) {
+ genLoadStaticField(sym)
+ } else /*if (paramAccessorLocals contains sym) {
paramAccessorLocals(sym).ref
} else if (isScalaJSDefinedJSClass(sym.owner)) {
val genQual = genExpr(qualifier)
@@ -1036,8 +1036,6 @@ class JSCodeGen()(implicit ctx: Context) {
genStringConcat(tree, receiver, args)
else if (code == HASH)
genScalaHash(tree, receiver)
- else if (isArrayNew(code))
- genArrayNew(tree, code)
else if (isArrayOp(code))
genArrayOp(tree, code)
else if (code == SYNCHRONIZED)
@@ -1409,24 +1407,6 @@ class JSCodeGen()(implicit ctx: Context) {
List(genExpr(receiver)))
}
- /** Gen JS code for a new array operation. */
- private def genArrayNew(tree: Tree, code: Int): js.Tree = {
- import scala.tools.nsc.backend.ScalaPrimitives._
-
- implicit val pos: Position = tree.pos
-
- val Apply(fun, args) = tree
- val genLength = genExpr(args.head)
-
- toIRType(tree.tpe) match {
- case arrayType: jstpe.ArrayType =>
- js.NewArray(arrayType, List(genLength))
-
- case irTpe =>
- throw new FatalError(s"ArrayNew $tree must have an array type but was $irTpe")
- }
- }
-
/** Gen JS code for an array operation (get, set or length) */
private def genArrayOp(tree: Tree, code: Int): js.Tree = {
import scala.tools.nsc.backend.ScalaPrimitives._
@@ -2328,6 +2308,24 @@ class JSCodeGen()(implicit ctx: Context) {
}
}
+ /** Gen JS code for loading a Java static field.
+ */
+ private def genLoadStaticField(sym: Symbol)(implicit pos: Position): js.Tree = {
+ /* Actually, there is no static member in Scala.js. If we come here, that
+ * is because we found the symbol in a Java-emitted .class in the
+ * classpath. But the corresponding implementation in Scala.js will
+ * actually be a val in the companion module.
+ */
+
+ if (sym == defn.BoxedUnit_UNIT) {
+ js.Undefined()
+ } else {
+ val instance = genLoadModule(sym.owner)
+ val method = encodeStaticMemberSym(sym)
+ js.Apply(instance, method, Nil)(toIRType(sym.info))
+ }
+ }
+
/** Gen JS code for loading a module.
*
* Can be given either the module symbol, or its module class symbol.
diff --git a/src/dotty/tools/backend/sjs/JSPrimitives.scala b/src/dotty/tools/backend/sjs/JSPrimitives.scala
index 52b5dc4b9..6c3c5715c 100644
--- a/src/dotty/tools/backend/sjs/JSPrimitives.scala
+++ b/src/dotty/tools/backend/sjs/JSPrimitives.scala
@@ -80,18 +80,6 @@ class JSPrimitives(ctx: Context) extends DottyPrimitives(ctx) {
val jsdefn = JSDefinitions.jsdefn
- // For some reason, the JVM primitive set does not register those
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newBooleanArray")), NEW_ZARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newByteArray")), NEW_BARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newShortArray")), NEW_SARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newCharArray")), NEW_CARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newIntArray")), NEW_IARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newLongArray")), NEW_LARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newFloatArray")), NEW_FARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newDoubleArray")), NEW_DARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newRefArray")), NEW_OARRAY)
- addPrimitive(defn.DottyArraysModule.requiredMethod(Names.termName("newUnitArray")), NEW_OARRAY)
-
addPrimitive(defn.Any_getClass, GETCLASS)
for (i <- 0 to 22)
diff --git a/src/dotty/tools/dotc/Bench.scala b/src/dotty/tools/dotc/Bench.scala
index 2fc38d78c..56b6dabbe 100644
--- a/src/dotty/tools/dotc/Bench.scala
+++ b/src/dotty/tools/dotc/Bench.scala
@@ -8,6 +8,10 @@ package dotc
import core.Contexts.Context
import reporting.Reporter
+/** A main class for running compiler benchmarks. Can instantiate a given
+ * number of compilers and run each (sequentially) a given number of times
+ * on the same sources.
+ */
object Bench extends Driver {
@sharable private var numRuns = 1
diff --git a/src/dotty/tools/dotc/Compiler.scala b/src/dotty/tools/dotc/Compiler.scala
index fe16243bb..178cba7c4 100644
--- a/src/dotty/tools/dotc/Compiler.scala
+++ b/src/dotty/tools/dotc/Compiler.scala
@@ -7,7 +7,7 @@ import Periods._
import Symbols._
import Types._
import Scopes._
-import typer.{FrontEnd, Typer, Mode, ImportInfo, RefChecks}
+import typer.{FrontEnd, Typer, ImportInfo, RefChecks}
import reporting.{Reporter, ConsoleReporter}
import Phases.Phase
import transform._
@@ -15,14 +15,17 @@ import transform.TreeTransforms.{TreeTransform, TreeTransformer}
import core.DenotTransformers.DenotTransformer
import core.Denotations.SingleDenotation
-import dotty.tools.backend.jvm.{LabelDefs, GenBCode}
+import dotty.tools.backend.jvm.{LabelDefs, GenBCode, CollectSuperCalls}
import dotty.tools.backend.sjs.GenSJSIR
+/** The central class of the dotc compiler. The job of a compiler is to create
+ * runs, which process given `phases` in a given `rootContext`.
+ */
class Compiler {
/** Meta-ordering constraint:
*
- * DenotTransformers that change the signature of their denotation's info must go
+ * DenotTransformers that change the signature of their denotation's info must go
* after erasure. The reason is that denotations are permanently referred to by
* TermRefs which contain a signature. If the signature of a symbol would change,
* all refs to it would become outdated - they could not be dereferenced in the
@@ -38,54 +41,64 @@ class Compiler {
*/
def phases: List[List[Phase]] =
List(
- List(new FrontEnd),
- List(new PostTyper),
- List(new Pickler),
- List(new FirstTransform,
- new CheckReentrant),
- List(new RefChecks,
- new CheckStatic,
- new ElimRepeated,
- new NormalizeFlags,
- new ExtensionMethods,
- new ExpandSAMs,
- new TailRec,
- new LiftTry,
- new ClassOf),
- List(new PatternMatcher,
- new ExplicitOuter,
- new ExplicitSelf,
- new CrossCastAnd,
- new Splitter),
- List(new VCInlineMethods,
- new SeqLiterals,
- new InterceptedMethods,
- new Getters,
- new ElimByName,
- new AugmentScala2Traits,
- new ResolveSuper),
- List(new Erasure),
- List(new ElimErasedValueType,
- new VCElideAllocations,
- new Mixin,
- new LazyVals,
- new Memoize,
- new LinkScala2ImplClasses,
- new NonLocalReturns,
- new CapturedVars, // capturedVars has a transformUnit: no phases should introduce local mutable vars here
- new Constructors, // constructors changes decls in transformTemplate, no InfoTransformers should be added after it
- new FunctionalInterfaces,
- new GetClass), // getClass transformation should be applied to specialized methods
- List(new LambdaLift, // in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here
- new ElimStaticThis,
- new Flatten,
- // new DropEmptyCompanions,
- new RestoreScopes),
- List(new ExpandPrivate,
- new CollectEntryPoints,
- new LabelDefs),
- List(new GenSJSIR),
- List(new GenBCode)
+ List(new FrontEnd), // Compiler frontend: scanner, parser, namer, typer
+ List(new sbt.ExtractDependencies), // Sends information on classes' dependencies to sbt via callbacks
+ List(new PostTyper), // Additional checks and cleanups after type checking
+ List(new sbt.ExtractAPI), // Sends a representation of the API of classes to sbt via callbacks
+ List(new Pickler), // Generate TASTY info
+ List(new FirstTransform, // Some transformations to put trees into a canonical form
+ new CheckReentrant), // Internal use only: Check that compiled program has no data races involving global vars
+ List(new RefChecks, // Various checks mostly related to abstract members and overriding
+ new CheckStatic, // Check restrictions that apply to @static members
+ new ElimRepeated, // Rewrite vararg parameters and arguments
+ new NormalizeFlags, // Rewrite some definition flags
+ new ExtensionMethods, // Expand methods of value classes with extension methods
+ new ExpandSAMs, // Expand single abstract method closures to anonymous classes
+ new TailRec, // Rewrite tail recursion to loops
+ new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods
+ new ClassOf), // Expand `Predef.classOf` calls.
+ List(new TryCatchPatterns, // Compile cases in try/catch
+ new PatternMatcher, // Compile pattern matches
+ new ExplicitOuter, // Add accessors to outer classes from nested ones.
+ new ExplicitSelf, // Make references to non-trivial self types explicit as casts
+ new CrossCastAnd, // Normalize selections involving intersection types.
+ new Splitter), // Expand selections involving union types into conditionals
+ List(new VCInlineMethods, // Inlines calls to value class methods
+ new IsInstanceOfEvaluator, // Issues warnings when unreachable statements are present in match/if expressions
+ new SeqLiterals, // Express vararg arguments as arrays
+ new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods
+ new Getters, // Replace non-private vals and vars with getter defs (fields are added later)
+ new ElimByName, // Expand by-name parameters and arguments
+ new AugmentScala2Traits, // Expand traits defined in Scala 2.11 to simulate old-style rewritings
+ new ResolveSuper, // Implement super accessors and add forwarders to trait methods
+ new ArrayConstructors), // Intercept creation of (non-generic) arrays and intrinsify.
+ List(new Erasure), // Rewrite types to JVM model, erasing all type parameters, abstract types and refinements.
+ List(new ElimErasedValueType, // Expand erased value types to their underlying implmementation types
+ new VCElideAllocations, // Peep-hole optimization to eliminate unnecessary value class allocations
+ new Mixin, // Expand trait fields and trait initializers
+ new LazyVals, // Expand lazy vals
+ new Memoize, // Add private fields to getters and setters
+ new LinkScala2ImplClasses, // Forward calls to the implementation classes of traits defined by Scala 2.11
+ new NonLocalReturns, // Expand non-local returns
+ new CapturedVars, // Represent vars captured by closures as heap objects
+ new Constructors, // Collect initialization code in primary constructors
+ // Note: constructors changes decls in transformTemplate, no InfoTransformers should be added after it
+ new FunctionalInterfaces, // Rewrites closures to implement @specialized types of Functions.
+ new GetClass), // Rewrites getClass calls on primitive types.
+ List(new LambdaLift, // Lifts out nested functions to class scope, storing free variables in environments
+ // Note: in this mini-phase block scopes are incorrect. No phases that rely on scopes should be here
+ new ElimStaticThis, // Replace `this` references to static objects by global identifiers
+ new Flatten, // Lift all inner classes to package scope
+ new RestoreScopes), // Repair scopes rendered invalid by moving definitions in prior phases of the group
+ List(new ExpandPrivate, // Widen private definitions accessed from nested classes
+ new SelectStatic, // get rid of selects that would be compiled into GetStatic
+ new CollectEntryPoints, // Find classes with main methods
+ new CollectSuperCalls, // Find classes that are called with super
+ new DropInlined, // Drop Inlined nodes, since backend has no use for them
+ new MoveStatics, // Move static methods to companion classes
+ new LabelDefs), // Converts calls to labels to jumps
+ List(new GenSJSIR), // Generate .js code
+ List(new GenBCode) // Generate JVM bytecode
)
var runId = 1
diff --git a/src/dotty/tools/dotc/Driver.scala b/src/dotty/tools/dotc/Driver.scala
index 887274fa8..f54a23ad2 100644
--- a/src/dotty/tools/dotc/Driver.scala
+++ b/src/dotty/tools/dotc/Driver.scala
@@ -15,8 +15,6 @@ import scala.util.control.NonFatal
*/
abstract class Driver extends DotClass {
- val prompt = "\ndotc> "
-
protected def newCompiler(implicit ctx: Context): Compiler
protected def emptyReporter: Reporter = new StoreReporter(null)
@@ -33,7 +31,7 @@ abstract class Driver extends DotClass {
ctx.error(ex.getMessage) // signals that we should fail compilation.
ctx.reporter
}
- else emptyReporter
+ else ctx.reporter
protected def initCtx = (new ContextBase).initialCtx
diff --git a/src/dotty/tools/dotc/FromTasty.scala b/src/dotty/tools/dotc/FromTasty.scala
index 8f29c882c..b060a2054 100644
--- a/src/dotty/tools/dotc/FromTasty.scala
+++ b/src/dotty/tools/dotc/FromTasty.scala
@@ -64,6 +64,9 @@ object FromTasty extends Driver {
}
class ReadTastyTreesFromClasses extends FrontEnd {
+
+ override def isTyper = false
+
override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] =
units.map(readTASTY)
@@ -83,8 +86,8 @@ object FromTasty extends Driver {
case info: ClassfileLoader =>
info.load(clsd) match {
case Some(unpickler: DottyUnpickler) =>
- val (List(unpickled), source) = unpickler.body(readPositions = true)
- val unit1 = new CompilationUnit(source)
+ val List(unpickled) = unpickler.body(ctx.addMode(Mode.ReadPositions))
+ val unit1 = new CompilationUnit(new SourceFile(clsd.symbol.sourceFile, Seq()))
unit1.tpdTree = unpickled
unit1.unpicklers += (clsd.classSymbol -> unpickler.unpickler)
force.traverse(unit1.tpdTree)
diff --git a/src/dotty/tools/dotc/Main.scala b/src/dotty/tools/dotc/Main.scala
index 6c473d8bb..a6844fbbc 100644
--- a/src/dotty/tools/dotc/Main.scala
+++ b/src/dotty/tools/dotc/Main.scala
@@ -3,8 +3,7 @@ package dotc
import core.Contexts.Context
-/* To do:
- */
+/** Main class of the `dotc` batch compiler. */
object Main extends Driver {
override def newCompiler(implicit ctx: Context): Compiler = new Compiler
}
diff --git a/src/dotty/tools/dotc/Resident.scala b/src/dotty/tools/dotc/Resident.scala
index 18bb2ff4f..56f6684d0 100644
--- a/src/dotty/tools/dotc/Resident.scala
+++ b/src/dotty/tools/dotc/Resident.scala
@@ -6,7 +6,9 @@ import reporting.Reporter
import java.io.EOFException
import scala.annotation.tailrec
-/** A compiler which stays resident between runs.
+/** A compiler which stays resident between runs. This is more of a PoC than
+ * something that's expected to be used often
+ *
* Usage:
*
* > scala dotty.tools.dotc.Resident <options> <initial files>
@@ -31,6 +33,7 @@ class Resident extends Driver {
private val quit = ":q"
private val reset = ":reset"
+ private val prompt = "dotc> "
private def getLine() = {
Console.print(prompt)
diff --git a/src/dotty/tools/dotc/Run.scala b/src/dotty/tools/dotc/Run.scala
index ee808323a..f5ba56a7e 100644
--- a/src/dotty/tools/dotc/Run.scala
+++ b/src/dotty/tools/dotc/Run.scala
@@ -2,17 +2,25 @@ package dotty.tools
package dotc
import core._
-import Contexts._, Periods._, Symbols._, Phases._, Decorators._
+import Contexts._
+import Periods._
+import Symbols._
+import Phases._
+import Decorators._
import dotty.tools.dotc.transform.TreeTransforms.TreeTransformer
import io.PlainFile
-import util.{SourceFile, NoSource, Stats, SimpleMap}
+import scala.io.Codec
+import util._
import reporting.Reporter
import transform.TreeChecker
import rewrite.Rewrites
import java.io.{BufferedWriter, OutputStreamWriter}
+
+import scala.annotation.tailrec
import scala.reflect.io.VirtualFile
import scala.util.control.NonFatal
+/** A compiler run. Exports various methods to compile source files */
class Run(comp: Compiler)(implicit ctx: Context) {
assert(comp.phases.last.last.id <= Periods.MaxPossiblePhaseId)
@@ -22,8 +30,13 @@ class Run(comp: Compiler)(implicit ctx: Context) {
def getSource(fileName: String): SourceFile = {
val f = new PlainFile(fileName)
- if (f.exists) new SourceFile(f)
- else {
+ if (f.isDirectory) {
+ ctx.error(s"expected file, received directory '$fileName'")
+ NoSource
+ } else if (f.exists) {
+ val encoding = ctx.settings.encoding.value
+ new SourceFile(f, Codec(encoding))
+ } else {
ctx.error(s"not found: $fileName")
NoSource
}
@@ -34,7 +47,7 @@ class Run(comp: Compiler)(implicit ctx: Context) {
compileSources(sources)
} catch {
case NonFatal(ex) =>
- ctx.println(i"exception occurred while compiling $units%, %")
+ ctx.echo(i"exception occurred while compiling $units%, %")
throw ex
}
@@ -55,26 +68,50 @@ class Run(comp: Compiler)(implicit ctx: Context) {
val phases = ctx.squashPhases(ctx.phasePlan,
ctx.settings.Yskip.value, ctx.settings.YstopBefore.value, ctx.settings.YstopAfter.value, ctx.settings.Ycheck.value)
ctx.usePhases(phases)
+ var lastPrintedTree: PrintedTree = NoPrintedTree
for (phase <- ctx.allPhases)
if (!ctx.reporter.hasErrors) {
val start = System.currentTimeMillis
units = phase.runOn(units)
- def foreachUnit(op: Context => Unit)(implicit ctx: Context): Unit =
- for (unit <- units) op(ctx.fresh.setPhase(phase.next).setCompilationUnit(unit))
- if (ctx.settings.Xprint.value.containsPhase(phase))
- foreachUnit(printTree)
+ if (ctx.settings.Xprint.value.containsPhase(phase)) {
+ for (unit <- units) {
+ lastPrintedTree =
+ printTree(lastPrintedTree)(ctx.fresh.setPhase(phase.next).setCompilationUnit(unit))
+ }
+ }
ctx.informTime(s"$phase ", start)
}
if (!ctx.reporter.hasErrors) Rewrites.writeBack()
}
- private def printTree(ctx: Context) = {
+ private sealed trait PrintedTree
+ private final case class SomePrintedTree(phase: String, tree: String) extends PrintedTree
+ private object NoPrintedTree extends PrintedTree
+
+ private def printTree(last: PrintedTree)(implicit ctx: Context): PrintedTree = {
val unit = ctx.compilationUnit
val prevPhase = ctx.phase.prev // can be a mini-phase
val squashedPhase = ctx.squashed(prevPhase)
+ val treeString = unit.tpdTree.show
+
+ ctx.echo(s"result of $unit after $squashedPhase:")
- ctx.println(s"result of $unit after ${squashedPhase}:")
- ctx.println(unit.tpdTree.show(ctx))
+ last match {
+ case SomePrintedTree(phase, lastTreeSting) if lastTreeSting != treeString =>
+ val msg =
+ if (!ctx.settings.XprintDiff.value && !ctx.settings.XprintDiffDel.value) treeString
+ else DiffUtil.mkColoredCodeDiff(treeString, lastTreeSting, ctx.settings.XprintDiffDel.value)
+ ctx.echo(msg)
+ SomePrintedTree(squashedPhase.toString, treeString)
+
+ case SomePrintedTree(phase, lastTreeSting) =>
+ ctx.echo(" Unchanged since " + phase)
+ last
+
+ case NoPrintedTree =>
+ ctx.echo(treeString)
+ SomePrintedTree(squashedPhase.toString, treeString)
+ }
}
def compile(sourceCode: String): Unit = {
@@ -82,7 +119,7 @@ class Run(comp: Compiler)(implicit ctx: Context) {
val writer = new BufferedWriter(new OutputStreamWriter(virtualFile.output, "UTF-8")) // buffering is still advised by javadoc
writer.write(sourceCode)
writer.close()
- compileSources(List(new SourceFile(virtualFile)))
+ compileSources(List(new SourceFile(virtualFile, Codec.UTF8)))
}
/** The context created for this run */
diff --git a/src/dotty/tools/dotc/ast/Desugar.scala b/src/dotty/tools/dotc/ast/Desugar.scala
index 2ab33a120..af34164dc 100644
--- a/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/src/dotty/tools/dotc/ast/Desugar.scala
@@ -8,21 +8,22 @@ import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._
import Decorators._
import language.higherKinds
import collection.mutable.ListBuffer
-import config.Printers._
-import typer.Mode
+import util.Property
+import reporting.diagnostic.messages._
object desugar {
-
- /** Are we using the new unboxed pair scheme? */
- private final val unboxedPairs = false
-
import untpd._
+ /** Tags a .withFilter call generated by desugaring a for expression.
+ * Such calls can alternatively be rewritten to use filter.
+ */
+ val MaybeFilter = new Property.Key[Unit]
+
/** Info of a variable in a pattern: The named tree and its type */
private type VarInfo = (NameTree, Tree)
/** Names of methods that are added unconditionally to case classes */
- def isDesugaredCaseClassMethodName(name: Name)(implicit ctx: Context) =
+ def isDesugaredCaseClassMethodName(name: Name)(implicit ctx: Context): Boolean =
name == nme.isDefined ||
name == nme.copy ||
name == nme.productArity ||
@@ -47,7 +48,11 @@ object desugar {
*/
override def ensureCompletions(implicit ctx: Context) =
if (!(ctx.owner is Package))
- if (ctx.owner is ModuleClass) ctx.owner.linkedClass.ensureCompleted()
+ if (ctx.owner.isClass) {
+ ctx.owner.ensureCompleted()
+ if (ctx.owner is ModuleClass)
+ ctx.owner.linkedClass.ensureCompleted()
+ }
else ensureCompletions(ctx.outer)
/** Return info of original symbol, where all references to siblings of the
@@ -63,11 +68,13 @@ object desugar {
val relocate = new TypeMap {
val originalOwner = sym.owner
def apply(tp: Type) = tp match {
- case tp: NamedType if tp.symbol.owner eq originalOwner =>
+ case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) =>
val defctx = ctx.outersIterator.dropWhile(_.scope eq ctx.scope).next
var local = defctx.denotNamed(tp.name).suchThat(_ is ParamOrAccessor).symbol
if (local.exists) (defctx.owner.thisType select local).dealias
- else throw new Error(s"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope}")
+ else throw new java.lang.Error(
+ s"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope}"
+ )
case _ =>
mapOver(tp)
}
@@ -257,13 +264,27 @@ object desugar {
// prefixed by type or val). `tparams` and `vparamss` are the type parameters that
// go in `constr`, the constructor after desugaring.
+ /** Does `tree' look like a reference to AnyVal? Temporary test before we have inline classes */
+ def isAnyVal(tree: Tree): Boolean = tree match {
+ case Ident(tpnme.AnyVal) => true
+ case Select(qual, tpnme.AnyVal) => isScala(qual)
+ case _ => false
+ }
+ def isScala(tree: Tree): Boolean = tree match {
+ case Ident(nme.scala_) => true
+ case Select(Ident(nme.ROOTPKG), nme.scala_) => true
+ case _ => false
+ }
+
val isCaseClass = mods.is(Case) && !mods.is(Module)
+ val isValueClass = parents.nonEmpty && isAnyVal(parents.head)
+ // This is not watertight, but `extends AnyVal` will be replaced by `inline` later.
val constrTparams = constr1.tparams map toDefParam
val constrVparamss =
if (constr1.vparamss.isEmpty) { // ensure parameter list is non-empty
if (isCaseClass)
- ctx.error("case class needs to have at least one parameter list", cdef.pos)
+ ctx.error(CaseClassMissingParamList(cdef), cdef.namePos)
ListOfNil
}
else constr1.vparamss.nestedMap(toDefParam)
@@ -399,7 +420,9 @@ object desugar {
companionDefs(parent, applyMeths ::: unapplyMeth :: defaultGetters)
}
else if (defaultGetters.nonEmpty)
- companionDefs(anyRef, defaultGetters)
+ companionDefs(anyRef, defaultGetters)
+ else if (isValueClass)
+ companionDefs(anyRef, Nil)
else Nil
@@ -481,7 +504,7 @@ object desugar {
val clsTmpl = cpy.Template(tmpl)(self = clsSelf, body = tmpl.body)
val cls = TypeDef(clsName, clsTmpl)
.withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags)
- Thicket(modul, classDef(cls))
+ Thicket(modul, classDef(cls).withPos(mdef.pos))
}
}
@@ -492,7 +515,7 @@ object desugar {
def patDef(pdef: PatDef)(implicit ctx: Context): Tree = {
val PatDef(mods, pats, tpt, rhs) = pdef
val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt))
- flatTree(pats1 map (makePatDef(mods, _, rhs)))
+ flatTree(pats1 map (makePatDef(pdef, mods, _, rhs)))
}
/** If `pat` is a variable pattern,
@@ -510,9 +533,9 @@ object desugar {
* If the original pattern variable carries a type annotation, so does the corresponding
* ValDef or DefDef.
*/
- def makePatDef(mods: Modifiers, pat: Tree, rhs: Tree)(implicit ctx: Context): Tree = pat match {
+ def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(implicit ctx: Context): Tree = pat match {
case VarPattern(named, tpt) =>
- derivedValDef(named, tpt, rhs, mods)
+ derivedValDef(original, named, tpt, rhs, mods)
case _ =>
val rhsUnchecked = makeAnnotated(defn.UncheckedAnnot, rhs)
val vars = getVariables(pat)
@@ -529,7 +552,7 @@ object desugar {
case Nil =>
matchExpr
case (named, tpt) :: Nil =>
- derivedValDef(named, tpt, matchExpr, mods)
+ derivedValDef(original, named, tpt, matchExpr, mods)
case _ =>
val tmpName = ctx.freshName().toTermName
val patMods = mods & (AccessFlags | Lazy) | Synthetic
@@ -540,8 +563,8 @@ object desugar {
val restDefs =
for (((named, tpt), n) <- vars.zipWithIndex)
yield
- if (mods is Lazy) derivedDefDef(named, tpt, selector(n), mods &~ Lazy)
- else derivedValDef(named, tpt, selector(n), mods)
+ if (mods is Lazy) derivedDefDef(original, named, tpt, selector(n), mods &~ Lazy)
+ else derivedValDef(original, named, tpt, selector(n), mods)
flatTree(firstDef :: restDefs)
}
}
@@ -587,26 +610,36 @@ object desugar {
* ==>
* def $anonfun(params) = body
* Closure($anonfun)
+ *
+ * If `inlineable` is true, tag $anonfun with an @inline annotation.
*/
- def makeClosure(params: List[ValDef], body: Tree, tpt: Tree = TypeTree()) =
+ def makeClosure(params: List[ValDef], body: Tree, tpt: Tree = TypeTree(), inlineable: Boolean)(implicit ctx: Context) = {
+ var mods = synthetic
+ if (inlineable)
+ mods = mods.withAddedAnnotation(New(ref(defn.InlineAnnotType), Nil).withPos(body.pos))
Block(
- DefDef(nme.ANON_FUN, Nil, params :: Nil, tpt, body).withMods(synthetic),
+ DefDef(nme.ANON_FUN, Nil, params :: Nil, tpt, body).withMods(mods),
Closure(Nil, Ident(nme.ANON_FUN), EmptyTree))
+ }
/** If `nparams` == 1, expand partial function
*
* { cases }
* ==>
- * x$1 => x$1 match { cases }
+ * x$1 => (x$1 @unchecked) match { cases }
*
* If `nparams` != 1, expand instead to
*
- * (x$1, ..., x$n) => (x$0, ..., x${n-1}) match { cases }
+ * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked) match { cases }
*/
- def makeCaseLambda(cases: List[CaseDef], nparams: Int = 1)(implicit ctx: Context) = {
+ def makeCaseLambda(cases: List[CaseDef], nparams: Int = 1, unchecked: Boolean = true)(implicit ctx: Context) = {
val params = (1 to nparams).toList.map(makeSyntheticParameter(_))
val selector = makeTuple(params.map(p => Ident(p.name)))
- Function(params, Match(selector, cases))
+
+ if (unchecked)
+ Function(params, Match(Annotated(selector, New(ref(defn.UncheckedAnnotType))), cases))
+ else
+ Function(params, Match(selector, cases))
}
/** Map n-ary function `(p1, ..., pn) => body` where n != 1 to unary function as follows:
@@ -633,13 +666,20 @@ object desugar {
* tree @cls
*/
def makeAnnotated(cls: Symbol, tree: Tree)(implicit ctx: Context) =
- Annotated(untpd.New(untpd.TypeTree(cls.typeRef), Nil), tree)
+ Annotated(tree, untpd.New(untpd.TypeTree(cls.typeRef), Nil))
- private def derivedValDef(named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers) =
- ValDef(named.name.asTermName, tpt, rhs).withMods(mods).withPos(named.pos)
+ private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit ctx: Context) = {
+ val vdef = ValDef(named.name.asTermName, tpt, rhs)
+ .withMods(mods)
+ .withPos(original.pos.withPoint(named.pos.start))
+ val mayNeedSetter = valDef(vdef)
+ mayNeedSetter
+ }
- private def derivedDefDef(named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers) =
- DefDef(named.name.asTermName, Nil, Nil, tpt, rhs).withMods(mods).withPos(named.pos)
+ private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers) =
+ DefDef(named.name.asTermName, Nil, Nil, tpt, rhs)
+ .withMods(mods)
+ .withPos(original.pos.withPoint(named.pos.start))
/** Main desugaring method */
def apply(tree: Tree)(implicit ctx: Context): Tree = {
@@ -667,7 +707,7 @@ object desugar {
Apply(Select(left, op), args)
} else {
val x = ctx.freshName().toTermName
- Block(
+ new InfixOpBlock(
ValDef(x, TypeTree(), left).withMods(synthetic),
Apply(Select(right, op), Ident(x)))
}
@@ -729,9 +769,9 @@ object desugar {
*/
def makeLambda(pat: Tree, body: Tree): Tree = pat match {
case VarPattern(named, tpt) =>
- Function(derivedValDef(named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body)
+ Function(derivedValDef(pat, named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body)
case _ =>
- makeCaseLambda(CaseDef(pat, EmptyTree, body) :: Nil)
+ makeCaseLambda(CaseDef(pat, EmptyTree, body) :: Nil, unchecked = false)
}
/** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap
@@ -747,6 +787,12 @@ object desugar {
(Bind(name, pat), Ident(name))
}
+ /** Add MaybeFilter attachment */
+ def orFilter(tree: Tree): tree.type = {
+ tree.putAttachment(MaybeFilter, ())
+ tree
+ }
+
/** Make a pattern filter:
* rhs.withFilter { case pat => true case _ => false }
*
@@ -777,7 +823,7 @@ object desugar {
val cases = List(
CaseDef(pat, EmptyTree, Literal(Constant(true))),
CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))))
- Apply(Select(rhs, nme.withFilter), Match(EmptyTree, cases))
+ Apply(orFilter(Select(rhs, nme.withFilter)), makeCaseLambda(cases))
}
/** Is pattern `pat` irrefutable when matched against `rhs`?
@@ -826,13 +872,13 @@ object desugar {
val rhss = valeqs map { case GenAlias(_, rhs) => rhs }
val (defpat0, id0) = makeIdPat(pat)
val (defpats, ids) = (pats map makeIdPat).unzip
- val pdefs = (defpats, rhss).zipped map (makePatDef(Modifiers(), _, _))
+ val pdefs = (valeqs, defpats, rhss).zipped.map(makePatDef(_, Modifiers(), _, _))
val rhs1 = makeFor(nme.map, nme.flatMap, GenFrom(defpat0, rhs) :: Nil, Block(pdefs, makeTuple(id0 :: ids)))
val allpats = pat :: pats
val vfrom1 = new IrrefutableGenFrom(makeTuple(allpats), rhs1)
makeFor(mapName, flatMapName, vfrom1 :: rest1, body)
case (gen: GenFrom) :: test :: rest =>
- val filtered = Apply(rhsSelect(gen, nme.withFilter), makeLambda(gen.pat, test))
+ val filtered = Apply(orFilter(rhsSelect(gen, nme.withFilter)), makeLambda(gen.pat, test))
val genFrom =
if (isIrrefutableGenFrom(gen)) new IrrefutableGenFrom(gen.pat, filtered)
else GenFrom(gen.pat, filtered)
@@ -848,7 +894,15 @@ object desugar {
Apply(
ref(defn.SymbolClass.companionModule.termRef),
Literal(Constant(str)) :: Nil)
- case InterpolatedString(id, strs, elems) =>
+ case InterpolatedString(id, segments) =>
+ val strs = segments map {
+ case ts: Thicket => ts.trees.head
+ case t => t
+ }
+ val elems = segments flatMap {
+ case ts: Thicket => ts.trees.tail
+ case t => Nil
+ }
Apply(Select(Apply(Ident(nme.StringContext), strs), id), elems)
case InfixOp(l, op, r) =>
if (ctx.mode is Mode.Type)
@@ -863,8 +917,8 @@ object desugar {
if ((ctx.mode is Mode.Type) && op == nme.raw.STAR) {
val seqType = if (ctx.compilationUnit.isJava) defn.ArrayType else defn.SeqType
Annotated(
- New(ref(defn.RepeatedAnnotType), Nil :: Nil),
- AppliedTypeTree(ref(seqType), t))
+ AppliedTypeTree(ref(seqType), t),
+ New(ref(defn.RepeatedAnnotType), Nil :: Nil))
} else {
assert(ctx.mode.isExpr || ctx.reporter.hasErrors, ctx.mode)
Select(t, op)
@@ -874,25 +928,15 @@ object desugar {
case Parens(t) =>
t
case Tuple(ts) =>
- if (unboxedPairs) {
- def PairTypeTree(l: Tree, r: Tree) =
- AppliedTypeTree(ref(defn.PairType), l :: r :: Nil)
- if (ctx.mode is Mode.Type) ts.reduceRight(PairTypeTree)
- else if (ts.isEmpty) unitLiteral
- else ts.reduceRight(Pair(_, _))
- }
- else {
- val arity = ts.length
- def tupleTypeRef = defn.TupleType(arity)
- if (arity > Definitions.MaxTupleArity) {
- ctx.error(s"tuple too long (max allowed: ${Definitions.MaxTupleArity})", tree.pos)
- unitLiteral
- }
- else if (arity == 1) ts.head
- else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts)
- else if (arity == 0) unitLiteral
- else Apply(ref(tupleTypeRef.classSymbol.companionModule.valRef), ts)
- }
+ val arity = ts.length
+ def tupleTypeRef = defn.TupleType(arity)
+ if (arity > Definitions.MaxTupleArity) {
+ ctx.error(s"tuple too long (max allowed: ${Definitions.MaxTupleArity})", tree.pos)
+ unitLiteral
+ } else if (arity == 1) ts.head
+ else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts)
+ else if (arity == 0) unitLiteral
+ else Apply(ref(tupleTypeRef.classSymbol.companionModule.valRef), ts)
case WhileDo(cond, body) =>
// { <label> def while$(): Unit = if (cond) { body; while$() } ; while$() }
val call = Apply(Ident(nme.WHILE_PREFIX), Nil)
@@ -909,7 +953,7 @@ object desugar {
makeFor(nme.map, nme.flatMap, enums, body) orElse tree
case PatDef(mods, pats, tpt, rhs) =>
val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt))
- flatTree(pats1 map (makePatDef(mods, _, rhs)))
+ flatTree(pats1 map (makePatDef(tree, mods, _, rhs)))
case ParsedTry(body, handler, finalizer) =>
handler match {
case Match(EmptyTree, cases) => Try(body, cases, finalizer)
@@ -936,7 +980,7 @@ object desugar {
* Example: Given
*
* class C
- * type T1 extends C { type T <: A }
+ * type T1 = C { type T <: A }
*
* the refined type
*
@@ -985,8 +1029,8 @@ object desugar {
def add(named: NameTree, t: Tree): Unit =
if (!seenName(named.name)) buf += ((named, t))
def collect(tree: Tree): Unit = tree match {
- case Bind(nme.WILDCARD, _) =>
- collect(tree)
+ case Bind(nme.WILDCARD, tree1) =>
+ collect(tree1)
case tree @ Bind(_, Typed(tree1, tpt)) if !mayBeTypePat(tpt) =>
add(tree, tpt)
collect(tree1)
@@ -999,9 +1043,6 @@ object desugar {
add(id, TypeTree())
case Apply(_, args) =>
args foreach collect
- case Pair(left, right) =>
- collect(left)
- collect(right)
case Typed(expr, _) =>
collect(expr)
case NamedArg(_, arg) =>
@@ -1011,10 +1052,10 @@ object desugar {
case Alternative(trees) =>
for (tree <- trees; (vble, _) <- getVariables(tree))
ctx.error("illegal variable in pattern alternative", vble.pos)
- case Annotated(annot, arg) =>
+ case Annotated(arg, _) =>
collect(arg)
- case InterpolatedString(_, _, elems) =>
- elems foreach collect
+ case InterpolatedString(_, segments) =>
+ segments foreach collect
case InfixOp(left, _, right) =>
collect(left)
collect(right)
diff --git a/src/dotty/tools/dotc/ast/NavigateAST.scala b/src/dotty/tools/dotc/ast/NavigateAST.scala
index 782866bad..33aa87d8e 100644
--- a/src/dotty/tools/dotc/ast/NavigateAST.scala
+++ b/src/dotty/tools/dotc/ast/NavigateAST.scala
@@ -19,10 +19,9 @@ object NavigateAST {
case _ =>
val loosePath = untypedPath(tree, exactMatch = false)
throw new
- Error(i"""no untyped tree for $tree, pos = ${tree.pos}, envelope = ${tree.envelope}
+ Error(i"""no untyped tree for $tree, pos = ${tree.pos}
|best matching path =\n$loosePath%\n====\n%
- |path positions = ${loosePath.map(_.pos)}
- |path envelopes = ${loosePath.map(_.envelope)}""".stripMargin)
+ |path positions = ${loosePath.map(_.pos)}""")
}
/** The reverse path of untyped trees starting with a tree that closest matches
@@ -40,7 +39,7 @@ object NavigateAST {
def untypedPath(tree: tpd.Tree, exactMatch: Boolean = false)(implicit ctx: Context): List[Positioned] =
tree match {
case tree: MemberDef[_] =>
- untypedPath(tree.envelope) match {
+ untypedPath(tree.pos) match {
case path @ (last: DefTree[_]) :: _ => path
case path if !exactMatch => path
case _ => Nil
@@ -76,7 +75,7 @@ object NavigateAST {
path
}
def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] =
- if (p.envelope contains pos) childPath(p.productIterator, p :: path)
+ if (p.pos contains pos) childPath(p.productIterator, p :: path)
else path
singlePath(from, Nil)
}
diff --git a/src/dotty/tools/dotc/ast/Positioned.scala b/src/dotty/tools/dotc/ast/Positioned.scala
index e7f5de591..8d364d439 100644
--- a/src/dotty/tools/dotc/ast/Positioned.scala
+++ b/src/dotty/tools/dotc/ast/Positioned.scala
@@ -3,6 +3,10 @@ package ast
import util.Positions._
import util.DotClass
+import core.Contexts.Context
+import core.Decorators._
+import core.Flags.JavaDefined
+import core.StdNames.nme
/** A base class for things that have positions (currently: modifiers and trees)
*/
@@ -16,7 +20,7 @@ abstract class Positioned extends DotClass with Product {
*/
def pos: Position = curPos
- /** Destructively update `curPos` to given position. Also, set any missing
+ /** Destructively update `curPos` to given position. Also, set any missing
* positions in children.
*/
protected def setPos(pos: Position): Unit = {
@@ -24,11 +28,6 @@ abstract class Positioned extends DotClass with Product {
if (pos.exists) setChildPositions(pos.toSynthetic)
}
- /** The envelope containing the item in its entirety. Envelope is different from
- * `pos` for definitions (instances of MemberDef).
- */
- def envelope: Position = pos.toSynthetic
-
/** A positioned item like this one with the position set to `pos`.
* if the positioned item is source-derived, a clone is returned.
* If the positioned item is synthetic, the position is updated
@@ -106,8 +105,7 @@ abstract class Positioned extends DotClass with Product {
}
}
- /** The initial, synthetic position. This is usually the union of all positioned children's
- * envelopes.
+ /** The initial, synthetic position. This is usually the union of all positioned children's positions.
*/
protected def initialPos: Position = {
var n = productArity
@@ -115,7 +113,7 @@ abstract class Positioned extends DotClass with Product {
while (n > 0) {
n -= 1
productElement(n) match {
- case p: Positioned => pos = pos union p.envelope
+ case p: Positioned => pos = pos union p.pos
case xs: List[_] => pos = unionPos(pos, xs)
case _ =>
}
@@ -124,7 +122,7 @@ abstract class Positioned extends DotClass with Product {
}
private def unionPos(pos: Position, xs: List[_]): Position = xs match {
- case (p: Positioned) :: xs1 => unionPos(pos union p.envelope, xs1)
+ case (p: Positioned) :: xs1 => unionPos(pos union p.pos, xs1)
case _ => pos
}
@@ -138,7 +136,7 @@ abstract class Positioned extends DotClass with Product {
false
}
(this eq that) ||
- (this.envelope contains that.pos) && {
+ (this.pos contains that.pos) && {
var n = productArity
var found = false
while (n > 0 && !found) {
@@ -148,4 +146,68 @@ abstract class Positioned extends DotClass with Product {
found
}
}
+
+ /** Check that all positioned items in this tree satisfy the following conditions:
+ * - Parent positions contain child positions
+ * - If item is a non-empty tree, it has a position
+ */
+ def checkPos(nonOverlapping: Boolean)(implicit ctx: Context): Unit = try {
+ import untpd._
+ var lastPositioned: Positioned = null
+ var lastPos = NoPosition
+ def check(p: Any): Unit = p match {
+ case p: Positioned =>
+ assert(pos contains p.pos,
+ s"""position error, parent position does not contain child positon
+ |parent = $this,
+ |parent position = $pos,
+ |child = $p,
+ |child position = ${p.pos}""".stripMargin)
+ p match {
+ case tree: Tree if !tree.isEmpty =>
+ assert(tree.pos.exists,
+ s"position error: position not set for $tree # ${tree.uniqueId}")
+ case _ =>
+ }
+ if (nonOverlapping) {
+ this match {
+ case _: WildcardFunction
+ if lastPositioned.isInstanceOf[ValDef] && !p.isInstanceOf[ValDef] =>
+ // ignore transition from last wildcard parameter to body
+ case _ =>
+ assert(!lastPos.exists || !p.pos.exists || lastPos.end <= p.pos.start,
+ s"""position error, child positions overlap or in wrong order
+ |parent = $this
+ |1st child = $lastPositioned
+ |1st child position = $lastPos
+ |2nd child = $p
+ |2nd child position = ${p.pos}""".stripMargin)
+ }
+ lastPositioned = p
+ lastPos = p.pos
+ }
+ p.checkPos(nonOverlapping)
+ case xs: List[_] =>
+ xs.foreach(check)
+ case _ =>
+ }
+ this match {
+ case tree: DefDef if tree.name == nme.CONSTRUCTOR && tree.mods.is(JavaDefined) =>
+ // Special treatment for constructors coming from Java:
+ // Leave out tparams, they are copied with wrong positions from parent class
+ check(tree.mods)
+ check(tree.vparamss)
+ case _ =>
+ val end = productArity
+ var n = 0
+ while (n < end) {
+ check(productElement(n))
+ n += 1
+ }
+ }
+ } catch {
+ case ex: AssertionError =>
+ println(i"error while checking $this")
+ throw ex
+ }
}
diff --git a/src/dotty/tools/dotc/ast/TreeInfo.scala b/src/dotty/tools/dotc/ast/TreeInfo.scala
index c1efd0b0b..7911840c6 100644
--- a/src/dotty/tools/dotc/ast/TreeInfo.scala
+++ b/src/dotty/tools/dotc/ast/TreeInfo.scala
@@ -88,12 +88,6 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] =>
case mp => mp
}
- /** If tree is a closure, it's body, otherwise tree itself */
- def closureBody(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match {
- case Block((meth @ DefDef(nme.ANON_FUN, _, _, _, _)) :: Nil, Closure(_, _, _)) => meth.rhs
- case _ => tree
- }
-
/** If this is an application, its function part, stripping all
* Apply nodes (but leaving TypeApply nodes in). Otherwise the tree itself.
*/
@@ -182,8 +176,8 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] =>
case OrTypeTree(tpt1, tpt2) => mayBeTypePat(tpt1) || mayBeTypePat(tpt2)
case RefinedTypeTree(tpt, refinements) => mayBeTypePat(tpt) || refinements.exists(_.isInstanceOf[Bind])
case AppliedTypeTree(tpt, args) => mayBeTypePat(tpt) || args.exists(_.isInstanceOf[Bind])
- case SelectFromTypeTree(tpt, _) => mayBeTypePat(tpt)
- case Annotated(_, tpt) => mayBeTypePat(tpt)
+ case Select(tpt, _) => mayBeTypePat(tpt)
+ case Annotated(tpt, _) => mayBeTypePat(tpt)
case _ => false
}
@@ -249,17 +243,6 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] =>
/** Is this case guarded? */
def isGuardedCase(cdef: CaseDef) = cdef.guard ne EmptyTree
- /** True iff definition is a val or def with no right-hand-side, or it
- * is an abstract typoe declaration
- */
- def lacksDefinition(mdef: MemberDef)(implicit ctx: Context) = mdef match {
- case mdef: ValOrDefDef =>
- mdef.unforcedRhs == EmptyTree && !mdef.name.isConstructorName && !mdef.mods.is(ParamAccessor)
- case mdef: TypeDef =>
- mdef.rhs.isEmpty || mdef.rhs.isInstanceOf[TypeBoundsTree]
- case _ => false
- }
-
/** The underlying pattern ignoring any bindings */
def unbind(x: Tree): Tree = unsplice(x) match {
case Bind(_, y) => unbind(y)
@@ -278,7 +261,30 @@ trait TreeInfo[T >: Untyped <: Type] { self: Trees.Instance[T] =>
}
trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] =>
- // todo: fill with methods from TreeInfo that only apply to untpd.Tree's
+ import TreeInfo._
+ import untpd._
+
+ /** True iff definition is a val or def with no right-hand-side, or it
+ * is an abstract typoe declaration
+ */
+ def lacksDefinition(mdef: MemberDef)(implicit ctx: Context) = mdef match {
+ case mdef: ValOrDefDef =>
+ mdef.unforcedRhs == EmptyTree && !mdef.name.isConstructorName && !mdef.mods.is(ParamAccessor)
+ case mdef: TypeDef =>
+ mdef.rhs.isEmpty || mdef.rhs.isInstanceOf[TypeBoundsTree]
+ case _ => false
+ }
+
+ def isFunctionWithUnknownParamType(tree: Tree) = tree match {
+ case Function(args, _) =>
+ args.exists {
+ case ValDef(_, tpt, _) => tpt.isEmpty
+ case _ => false
+ }
+ case _ => false
+ }
+
+ // todo: fill with other methods from TreeInfo that only apply to untpd.Tree's
}
trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
@@ -296,9 +302,11 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
| DefDef(_, _, _, _, _) =>
Pure
case vdef @ ValDef(_, _, _) =>
- if (vdef.mods is Mutable) Impure else exprPurity(vdef.rhs)
+ if (vdef.symbol.flags is Mutable) Impure else exprPurity(vdef.rhs)
case _ =>
Impure
+ // TODO: It seem like this should be exprPurity(tree)
+ // But if we do that the repl/vars test break. Need to figure out why that's the case.
}
/** The purity level of this expression.
@@ -315,13 +323,13 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
case EmptyTree
| This(_)
| Super(_, _)
- | Literal(_) =>
+ | Literal(_)
+ | Closure(_, _, _) =>
Pure
case Ident(_) =>
refPurity(tree)
case Select(qual, _) =>
- refPurity(tree).min(
- if (tree.symbol.is(Inline)) Pure else exprPurity(qual))
+ refPurity(tree).min(exprPurity(qual))
case TypeApply(fn, _) =>
exprPurity(fn)
/*
@@ -423,6 +431,36 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
}
}
+ /** Decompose a call fn[targs](vargs_1)...(vargs_n)
+ * into its constituents (where targs, vargss may be empty)
+ */
+ def decomposeCall(tree: Tree): (Tree, List[Tree], List[List[Tree]]) = tree match {
+ case Apply(fn, args) =>
+ val (meth, targs, argss) = decomposeCall(fn)
+ (meth, targs, argss :+ args)
+ case TypeApply(fn, targs) =>
+ val (meth, Nil, Nil) = decomposeCall(fn)
+ (meth, targs, Nil)
+ case _ =>
+ (tree, Nil, Nil)
+ }
+
+ /** An extractor for closures, either contained in a block or standalone.
+ */
+ object closure {
+ def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match {
+ case Block(_, Closure(env, meth, tpt)) => Some(env, meth, tpt)
+ case Closure(env, meth, tpt) => Some(env, meth, tpt)
+ case _ => None
+ }
+ }
+
+ /** If tree is a closure, its body, otherwise tree itself */
+ def closureBody(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match {
+ case Block((meth @ DefDef(nme.ANON_FUN, _, _, _, _)) :: Nil, Closure(_, _, _)) => meth.rhs
+ case _ => tree
+ }
+
/** The variables defined by a pattern, in reverse order of their appearance. */
def patVars(tree: Tree)(implicit ctx: Context): List[Symbol] = {
val acc = new TreeAccumulator[List[Symbol]] {
@@ -469,7 +507,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] =>
require(sym.pos.exists)
object accum extends TreeAccumulator[List[Tree]] {
def apply(x: List[Tree], tree: Tree)(implicit ctx: Context): List[Tree] = {
- if (tree.envelope.contains(sym.pos))
+ if (tree.pos.contains(sym.pos))
if (definedSym(tree) == sym) tree :: x
else {
val x1 = foldOver(x, tree)
@@ -619,20 +657,6 @@ object TreeInfo {
}
}
- def isApplyDynamicName(name: Name) = (name == nme.updateDynamic) || (name == nme.selectDynamic) || (name == nme.applyDynamic) || (name == nme.applyDynamicNamed)
-
- class DynamicApplicationExtractor(nameTest: Name => Boolean) {
- def unapply(tree: Tree) = tree match {
- case Apply(TypeApply(Select(qual, oper), _), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name))
- case Apply(Select(qual, oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((qual, name))
- case Apply(Ident(oper), List(Literal(Constant(name)))) if nameTest(oper) => Some((EmptyTree(), name))
- case _ => None
- }
- }
- object DynamicUpdate extends DynamicApplicationExtractor(_ == nme.updateDynamic)
- object DynamicApplication extends DynamicApplicationExtractor(isApplyDynamicName)
- object DynamicApplicationNamed extends DynamicApplicationExtractor(_ == nme.applyDynamicNamed)
-
object MacroImplReference {
private def refPart(tree: Tree): Tree = tree match {
case TypeApply(fun, _) => refPart(fun)
diff --git a/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/src/dotty/tools/dotc/ast/TreeTypeMap.scala
index a35fe2e8f..cf529dfda 100644
--- a/src/dotty/tools/dotc/ast/TreeTypeMap.scala
+++ b/src/dotty/tools/dotc/ast/TreeTypeMap.scala
@@ -92,11 +92,20 @@ final class TreeTypeMap(
case ddef @ DefDef(name, tparams, vparamss, tpt, _) =>
val (tmap1, tparams1) = transformDefs(ddef.tparams)
val (tmap2, vparamss1) = tmap1.transformVParamss(vparamss)
- cpy.DefDef(ddef)(name, tparams1, vparamss1, tmap2.transform(tpt), tmap2.transform(ddef.rhs))
+ val res = cpy.DefDef(ddef)(name, tparams1, vparamss1, tmap2.transform(tpt), tmap2.transform(ddef.rhs))
+ res.symbol.transformAnnotations {
+ case ann: BodyAnnotation => ann.derivedAnnotation(res.rhs)
+ case ann => ann
+ }
+ res
case blk @ Block(stats, expr) =>
val (tmap1, stats1) = transformDefs(stats)
val expr1 = tmap1.transform(expr)
cpy.Block(blk)(stats1, expr1)
+ case inlined @ Inlined(call, bindings, expanded) =>
+ val (tmap1, bindings1) = transformDefs(bindings)
+ val expanded1 = tmap1.transform(expanded)
+ cpy.Inlined(inlined)(call, bindings1, expanded1)
case cdef @ CaseDef(pat, guard, rhs) =>
val tmap = withMappedSyms(patVars(pat))
val pat1 = tmap.transform(pat)
@@ -127,10 +136,7 @@ final class TreeTypeMap(
def apply[ThisTree <: tpd.Tree](tree: ThisTree): ThisTree = transform(tree).asInstanceOf[ThisTree]
- def apply(annot: Annotation): Annotation = {
- val tree1 = apply(annot.tree)
- if (tree1 eq annot.tree) annot else ConcreteAnnotation(tree1)
- }
+ def apply(annot: Annotation): Annotation = annot.derivedAnnotation(apply(annot.tree))
/** The current tree map composed with a substitution [from -> to] */
def withSubstitution(from: List[Symbol], to: List[Symbol]): TreeTypeMap =
diff --git a/src/dotty/tools/dotc/ast/Trees.scala b/src/dotty/tools/dotc/ast/Trees.scala
index d0197b443..2c02e7d1e 100644
--- a/src/dotty/tools/dotc/ast/Trees.scala
+++ b/src/dotty/tools/dotc/ast/Trees.scala
@@ -3,8 +3,8 @@ package dotc
package ast
import core._
-import Types._, Names._, Flags._, util.Positions._, Contexts._, Constants._, SymDenotations._, Symbols._
-import Denotations._, StdNames._
+import Types._, Names._, Flags._, util.Positions._, Contexts._, Constants._
+import SymDenotations._, Symbols._, Denotations._, StdNames._, Comments._
import annotation.tailrec
import language.higherKinds
import collection.IndexedSeqOptimized
@@ -12,7 +12,7 @@ import collection.immutable.IndexedSeq
import collection.mutable.ListBuffer
import parsing.Tokens.Token
import printing.Printer
-import util.{Stats, Attachment, DotClass}
+import util.{Stats, Attachment, Property, DotClass}
import annotation.unchecked.uncheckedVariance
import language.implicitConversions
@@ -29,51 +29,10 @@ object Trees {
/** The total number of created tree nodes, maintained if Stats.enabled */
@sharable var ntrees = 0
- /** Modifiers and annotations for definitions
- * @param flags The set flags
- * @param privateWithin If a private or protected has is followed by a
- * qualifier [q], the name q, "" as a typename otherwise.
- * @param annotations The annotations preceding the modifiers
- */
- case class Modifiers[-T >: Untyped] (
- flags: FlagSet = EmptyFlags,
- privateWithin: TypeName = tpnme.EMPTY,
- annotations: List[Tree[T]] = Nil) extends Positioned with Cloneable {
-
- def is(fs: FlagSet): Boolean = flags is fs
- def is(fc: FlagConjunction): Boolean = flags is fc
-
- def | (fs: FlagSet): Modifiers[T] = withFlags(flags | fs)
- def & (fs: FlagSet): Modifiers[T] = withFlags(flags & fs)
- def &~(fs: FlagSet): Modifiers[T] = withFlags(flags &~ fs)
-
- def toTypeFlags: Modifiers[T] = withFlags(flags.toTypeFlags)
- def toTermFlags: Modifiers[T] = withFlags(flags.toTermFlags)
-
- def withFlags(flags: FlagSet) =
- if (this.flags == flags) this
- else copy(flags = flags)
-
- def withAddedAnnotation[U >: Untyped <: T](annot: Tree[U]): Modifiers[U] =
- if (annotations.exists(_ eq annot)) this
- else withAnnotations(annotations :+ annot)
-
- def withAnnotations[U >: Untyped <: T](annots: List[Tree[U]]): Modifiers[U] =
- if (annots eq annotations) this
- else copy(annotations = annots)
-
- def withPrivateWithin(pw: TypeName) =
- if (pw.isEmpty) this
- else copy(privateWithin = pw)
-
- def hasFlags = flags != EmptyFlags
- def hasAnnotations = annotations.nonEmpty
- def hasPrivateWithin = privateWithin != tpnme.EMPTY
-
- def tokenPos: Seq[(Token, Position)] = ???
- }
+ /** Property key for trees with documentation strings attached */
+ val DocComment = new Property.Key[Comment]
- @sharable private var nextId = 0 // for debugging
+ @sharable private var nextId = 0 // for debugging
type LazyTree = AnyRef /* really: Tree | Lazy[Tree] */
type LazyTreeList = AnyRef /* really: List[Tree] | Lazy[List[Tree]] */
@@ -89,7 +48,7 @@ object Trees {
* the existing tree transparently, assigning its `tpe` field,
* provided it was `null` before.
* - It is impossible to embed untyped trees in typed ones.
- * - Typed trees can be embedded untyped ones provided they are rooted
+ * - Typed trees can be embedded in untyped ones provided they are rooted
* in a TypedSplice node.
* - Type checking an untyped tree should remove all embedded `TypedSplice`
* nodes.
@@ -316,22 +275,39 @@ object Trees {
abstract class MemberDef[-T >: Untyped] extends NameTree[T] with DefTree[T] {
type ThisTree[-T >: Untyped] <: MemberDef[T]
- private[this] var myMods: Modifiers[T] = null
+ private[this] var myMods: untpd.Modifiers = null
- private[ast] def rawMods: Modifiers[T] =
- if (myMods == null) genericEmptyModifiers else myMods
+ private[dotc] def rawMods: untpd.Modifiers =
+ if (myMods == null) untpd.EmptyModifiers else myMods
- def withMods(mods: Modifiers[Untyped]): ThisTree[Untyped] = {
+ def rawComment: Option[Comment] = getAttachment(DocComment)
+
+ def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = {
val tree = if (myMods == null || (myMods == mods)) this else clone.asInstanceOf[MemberDef[Untyped]]
tree.setMods(mods)
tree.asInstanceOf[ThisTree[Untyped]]
}
- def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(Modifiers(flags))
+ def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags))
+
+ def setComment(comment: Option[Comment]): ThisTree[Untyped] = {
+ comment.map(putAttachment(DocComment, _))
+ asInstanceOf[ThisTree[Untyped]]
+ }
- protected def setMods(mods: Modifiers[T @uncheckedVariance]) = myMods = mods
+ protected def setMods(mods: untpd.Modifiers) = myMods = mods
- override def envelope: Position = rawMods.pos.union(pos).union(initialPos)
+ /** The position of the name defined by this definition.
+ * This is a point position if the definition is synthetic, or a range position
+ * if the definition comes from source.
+ * It might also be that the definition does not have a position (for instance when synthesized by
+ * a calling chain from `viewExists`), in that case the return position is NoPosition.
+ */
+ def namePos =
+ if (pos.exists)
+ if (rawMods.is(Synthetic)) Position(pos.point, pos.point)
+ else Position(pos.point, pos.point + name.length, pos.point)
+ else pos
}
/** A ValDef or DefDef tree */
@@ -355,7 +331,7 @@ object Trees {
override def toString = s"BackquotedIdent($name)"
}
- /** qualifier.name */
+ /** qualifier.name, or qualifier#name, if qualifier is a type */
case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)
extends RefTree[T] {
type ThisTree[-T >: Untyped] = Select[T]
@@ -419,15 +395,6 @@ object Trees {
type ThisTree[-T >: Untyped] = New[T]
}
- /** (left, right) */
- case class Pair[-T >: Untyped] private[ast] (left: Tree[T], right: Tree[T])
- extends TermTree[T] {
- type ThisTree[-T >: Untyped] = Pair[T]
- override def isTerm = left.isTerm && right.isTerm
- override def isType = left.isType && right.isType
- override def isPattern = !isTerm && (left.isPattern || left.isTerm) && (right.isPattern || right.isTerm)
- }
-
/** expr : tpt */
case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])
extends ProxyTree[T] with TermTree[T] {
@@ -533,6 +500,25 @@ object Trees {
override def toString = s"JavaSeqLiteral($elems, $elemtpt)"
}
+ /** A tree representing inlined code.
+ *
+ * @param call The original call that was inlined
+ * @param bindings Bindings for proxies to be used in the inlined code
+ * @param expansion The inlined tree, minus bindings.
+ *
+ * The full inlined code is equivalent to
+ *
+ * { bindings; expansion }
+ *
+ * The reason to keep `bindings` separate is because they are typed in a
+ * different context: `bindings` represent the arguments to the inlined
+ * call, whereas `expansion` represents the body of the inlined function.
+ */
+ case class Inlined[-T >: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])
+ extends Tree[T] {
+ type ThisTree[-T >: Untyped] = Inlined[T]
+ }
+
/** A type tree that represents an existing or inferred type */
case class TypeTree[-T >: Untyped] private[ast] (original: Tree[T])
extends DenotingTree[T] with TypTree[T] {
@@ -549,12 +535,6 @@ object Trees {
type ThisTree[-T >: Untyped] = SingletonTypeTree[T]
}
- /** qualifier # name */
- case class SelectFromTypeTree[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)
- extends RefTree[T] {
- type ThisTree[-T >: Untyped] = SelectFromTypeTree[T]
- }
-
/** left & right */
case class AndTypeTree[-T >: Untyped] private[ast] (left: Tree[T], right: Tree[T])
extends TypTree[T] {
@@ -581,6 +561,12 @@ object Trees {
def forwardTo = tpt
}
+ /** [typeparams] -> tpt */
+ case class PolyTypeTree[-T >: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])
+ extends TypTree[T] {
+ type ThisTree[-T >: Untyped] = PolyTypeTree[T]
+ }
+
/** => T */
case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])
extends TypTree[T] {
@@ -599,7 +585,6 @@ object Trees {
type ThisTree[-T >: Untyped] = Bind[T]
override def isType = name.isTypeName
override def isTerm = name.isTermName
- override def envelope: Position = pos union initialPos
}
/** tree_1 | ... | tree_n */
@@ -677,7 +662,7 @@ object Trees {
/** import expr.selectors
* where a selector is either an untyped `Ident`, `name` or
- * an untyped `Pair` `name => rename`
+ * an untyped thicket consisting of `name` and `rename`.
*/
case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[Tree[Untyped]])
extends DenotingTree[T] {
@@ -692,7 +677,7 @@ object Trees {
}
/** arg @annot */
- case class Annotated[-T >: Untyped] private[ast] (annot: Tree[T], arg: Tree[T])
+ case class Annotated[-T >: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])
extends ProxyTree[T] {
type ThisTree[-T >: Untyped] = Annotated[T]
def forwardTo = arg
@@ -720,6 +705,7 @@ object Trees {
val newTrees = trees.map(_.withPos(pos))
new Thicket[T](newTrees).asInstanceOf[this.type]
}
+ override def pos = (NoPosition /: trees) ((pos, t) => pos union t.pos)
override def foreachInThicket(op: Tree[T] => Unit): Unit =
trees foreach (_.foreachInThicket(op))
}
@@ -727,16 +713,14 @@ object Trees {
class EmptyValDef[T >: Untyped] extends ValDef[T](
nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T]) with WithoutTypeOrPos[T] {
override def isEmpty: Boolean = true
- setMods(Modifiers[T](PrivateLocal))
+ setMods(untpd.Modifiers(PrivateLocal))
}
@sharable val theEmptyTree: Thicket[Type] = Thicket(Nil)
@sharable val theEmptyValDef = new EmptyValDef[Type]
- @sharable val theEmptyModifiers = new Modifiers()
def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]]
def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]]
- def genericEmptyModifiers[T >: Untyped]: Modifiers[T] = theEmptyModifiers.asInstanceOf[Modifiers[T]]
def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = {
var buf: ListBuffer[Tree[T]] = null
@@ -795,7 +779,6 @@ object Trees {
abstract class Instance[T >: Untyped <: Type] extends DotClass { inst =>
- type Modifiers = Trees.Modifiers[T]
type Tree = Trees.Tree[T]
type TypTree = Trees.TypTree[T]
type TermTree = Trees.TermTree[T]
@@ -818,7 +801,6 @@ object Trees {
type TypeApply = Trees.TypeApply[T]
type Literal = Trees.Literal[T]
type New = Trees.New[T]
- type Pair = Trees.Pair[T]
type Typed = Trees.Typed[T]
type NamedArg = Trees.NamedArg[T]
type Assign = Trees.Assign[T]
@@ -831,13 +813,14 @@ object Trees {
type Try = Trees.Try[T]
type SeqLiteral = Trees.SeqLiteral[T]
type JavaSeqLiteral = Trees.JavaSeqLiteral[T]
+ type Inlined = Trees.Inlined[T]
type TypeTree = Trees.TypeTree[T]
type SingletonTypeTree = Trees.SingletonTypeTree[T]
- type SelectFromTypeTree = Trees.SelectFromTypeTree[T]
type AndTypeTree = Trees.AndTypeTree[T]
type OrTypeTree = Trees.OrTypeTree[T]
type RefinedTypeTree = Trees.RefinedTypeTree[T]
type AppliedTypeTree = Trees.AppliedTypeTree[T]
+ type PolyTypeTree = Trees.PolyTypeTree[T]
type ByNameTypeTree = Trees.ByNameTypeTree[T]
type TypeBoundsTree = Trees.TypeBoundsTree[T]
type Bind = Trees.Bind[T]
@@ -854,14 +837,9 @@ object Trees {
@sharable val EmptyTree: Thicket = genericEmptyTree
@sharable val EmptyValDef: ValDef = genericEmptyValDef
- @sharable val EmptyModifiers: Modifiers = genericEmptyModifiers
// ----- Auxiliary creation methods ------------------
- def Modifiers(flags: FlagSet = EmptyFlags,
- privateWithin: TypeName = tpnme.EMPTY,
- annotations: List[Tree] = Nil) = new Modifiers(flags, privateWithin, annotations)
-
def Thicket(trees: List[Tree]): Thicket = new Thicket(trees)
def Thicket(): Thicket = EmptyTree
def Thicket(x1: Tree, x2: Tree): Thicket = Thicket(x1 :: x2 :: Nil)
@@ -871,16 +849,11 @@ object Trees {
case ys => Thicket(ys)
}
- // ----- Accessing modifiers ----------------------------------------------------
-
- abstract class ModsDeco { def mods: Modifiers }
- implicit def modsDeco(mdef: MemberDef)(implicit ctx: Context): ModsDeco
-
// ----- Helper classes for copying, transforming, accumulating -----------------
val cpy: TreeCopier
- /** A class for copying trees. The copy methods avid creating a new tree
+ /** A class for copying trees. The copy methods avoid creating a new tree
* If all arguments stay the same.
*
* Note: Some of the copy methods take a context.
@@ -936,10 +909,6 @@ object Trees {
case tree: New if tpt eq tree.tpt => tree
case _ => finalize(tree, untpd.New(tpt))
}
- def Pair(tree: Tree)(left: Tree, right: Tree)(implicit ctx: Context): Pair = tree match {
- case tree: Pair if (left eq tree.left) && (right eq tree.right) => tree
- case _ => finalize(tree, untpd.Pair(left, right))
- }
def Typed(tree: Tree)(expr: Tree, tpt: Tree)(implicit ctx: Context): Typed = tree match {
case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree
case _ => finalize(tree, untpd.Typed(expr, tpt))
@@ -987,6 +956,10 @@ object Trees {
case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree
case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt))
}
+ def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(implicit ctx: Context): Inlined = tree match {
+ case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree
+ case _ => finalize(tree, untpd.Inlined(call, bindings, expansion))
+ }
def TypeTree(tree: Tree)(original: Tree): TypeTree = tree match {
case tree: TypeTree if original eq tree.original => tree
case _ => finalize(tree, untpd.TypeTree(original))
@@ -995,10 +968,6 @@ object Trees {
case tree: SingletonTypeTree if ref eq tree.ref => tree
case _ => finalize(tree, untpd.SingletonTypeTree(ref))
}
- def SelectFromTypeTree(tree: Tree)(qualifier: Tree, name: Name): SelectFromTypeTree = tree match {
- case tree: SelectFromTypeTree if (qualifier eq tree.qualifier) && (name == tree.name) => tree
- case _ => finalize(tree, untpd.SelectFromTypeTree(qualifier, name))
- }
def AndTypeTree(tree: Tree)(left: Tree, right: Tree): AndTypeTree = tree match {
case tree: AndTypeTree if (left eq tree.left) && (right eq tree.right) => tree
case _ => finalize(tree, untpd.AndTypeTree(left, right))
@@ -1015,6 +984,10 @@ object Trees {
case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree
case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args))
}
+ def PolyTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree): PolyTypeTree = tree match {
+ case tree: PolyTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree
+ case _ => finalize(tree, untpd.PolyTypeTree(tparams, body))
+ }
def ByNameTypeTree(tree: Tree)(result: Tree): ByNameTypeTree = tree match {
case tree: ByNameTypeTree if result eq tree.result => tree
case _ => finalize(tree, untpd.ByNameTypeTree(result))
@@ -1059,9 +1032,9 @@ object Trees {
case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree
case _ => finalize(tree, untpd.PackageDef(pid, stats))
}
- def Annotated(tree: Tree)(annot: Tree, arg: Tree)(implicit ctx: Context): Annotated = tree match {
- case tree: Annotated if (annot eq tree.annot) && (arg eq tree.arg) => tree
- case _ => finalize(tree, untpd.Annotated(annot, arg))
+ def Annotated(tree: Tree)(arg: Tree, annot: Tree)(implicit ctx: Context): Annotated = tree match {
+ case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree
+ case _ => finalize(tree, untpd.Annotated(arg, annot))
}
def Thicket(tree: Tree)(trees: List[Tree]): Thicket = tree match {
case tree: Thicket if trees eq tree.trees => tree
@@ -1109,8 +1082,6 @@ object Trees {
tree
case New(tpt) =>
cpy.New(tree)(transform(tpt))
- case Pair(left, right) =>
- cpy.Pair(tree)(transform(left), transform(right))
case Typed(expr, tpt) =>
cpy.Typed(tree)(transform(expr), transform(tpt))
case NamedArg(name, arg) =>
@@ -1133,12 +1104,12 @@ object Trees {
cpy.Try(tree)(transform(block), transformSub(cases), transform(finalizer))
case SeqLiteral(elems, elemtpt) =>
cpy.SeqLiteral(tree)(transform(elems), transform(elemtpt))
+ case Inlined(call, bindings, expansion) =>
+ cpy.Inlined(tree)(call, transformSub(bindings), transform(expansion))
case TypeTree(original) =>
tree
case SingletonTypeTree(ref) =>
cpy.SingletonTypeTree(tree)(transform(ref))
- case SelectFromTypeTree(qualifier, name) =>
- cpy.SelectFromTypeTree(tree)(transform(qualifier), name)
case AndTypeTree(left, right) =>
cpy.AndTypeTree(tree)(transform(left), transform(right))
case OrTypeTree(left, right) =>
@@ -1147,6 +1118,8 @@ object Trees {
cpy.RefinedTypeTree(tree)(transform(tpt), transformSub(refinements))
case AppliedTypeTree(tpt, args) =>
cpy.AppliedTypeTree(tree)(transform(tpt), transform(args))
+ case PolyTypeTree(tparams, body) =>
+ cpy.PolyTypeTree(tree)(transformSub(tparams), transform(body))
case ByNameTypeTree(result) =>
cpy.ByNameTypeTree(tree)(transform(result))
case TypeBoundsTree(lo, hi) =>
@@ -1173,8 +1146,8 @@ object Trees {
cpy.Import(tree)(transform(expr), selectors)
case PackageDef(pid, stats) =>
cpy.PackageDef(tree)(transformSub(pid), transformStats(stats))
- case Annotated(annot, arg) =>
- cpy.Annotated(tree)(transform(annot), transform(arg))
+ case Annotated(arg, annot) =>
+ cpy.Annotated(tree)(transform(arg), transform(annot))
case Thicket(trees) =>
val trees1 = transform(trees)
if (trees1 eq trees) tree else Thicket(trees1)
@@ -1213,8 +1186,6 @@ object Trees {
x
case New(tpt) =>
this(x, tpt)
- case Pair(left, right) =>
- this(this(x, left), right)
case Typed(expr, tpt) =>
this(this(x, expr), tpt)
case NamedArg(name, arg) =>
@@ -1237,12 +1208,12 @@ object Trees {
this(this(this(x, block), handler), finalizer)
case SeqLiteral(elems, elemtpt) =>
this(this(x, elems), elemtpt)
+ case Inlined(call, bindings, expansion) =>
+ this(this(x, bindings), expansion)
case TypeTree(original) =>
x
case SingletonTypeTree(ref) =>
this(x, ref)
- case SelectFromTypeTree(qualifier, name) =>
- this(x, qualifier)
case AndTypeTree(left, right) =>
this(this(x, left), right)
case OrTypeTree(left, right) =>
@@ -1251,6 +1222,9 @@ object Trees {
this(this(x, tpt), refinements)
case AppliedTypeTree(tpt, args) =>
this(this(x, tpt), args)
+ case PolyTypeTree(tparams, body) =>
+ implicit val ctx: Context = localCtx
+ this(this(x, tparams), body)
case ByNameTypeTree(result) =>
this(x, result)
case TypeBoundsTree(lo, hi) =>
@@ -1276,8 +1250,8 @@ object Trees {
this(x, expr)
case PackageDef(pid, stats) =>
this(this(x, pid), stats)(localCtx)
- case Annotated(annot, arg) =>
- this(this(x, annot), arg)
+ case Annotated(arg, annot) =>
+ this(this(x, arg), annot)
case Thicket(ts) =>
this(x, ts)
}
@@ -1315,7 +1289,6 @@ object Trees {
case tree: DefDef => cpy.DefDef(tree)(name = newName.asTermName)
case tree: untpd.PolyTypeDef => untpd.cpy.PolyTypeDef(tree)(newName.asTypeName, tree.tparams, tree.rhs).withMods(tree.rawMods)
case tree: TypeDef => cpy.TypeDef(tree)(name = newName.asTypeName)
- case tree: SelectFromTypeTree => cpy.SelectFromTypeTree(tree)(tree.qualifier, newName)
}
}.asInstanceOf[tree.ThisTree[T]]
}
diff --git a/src/dotty/tools/dotc/ast/tpd.scala b/src/dotty/tools/dotc/ast/tpd.scala
index a6d97478b..d8db3306c 100644
--- a/src/dotty/tools/dotc/ast/tpd.scala
+++ b/src/dotty/tools/dotc/ast/tpd.scala
@@ -9,34 +9,26 @@ import core._
import util.Positions._, Types._, Contexts._, Constants._, Names._, Flags._
import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Symbols._
import Denotations._, Decorators._, DenotTransformers._
-import config.Printers._
-import typer.Mode
import collection.mutable
+import util.{Property, SourceFile, NoSource}
import typer.ErrorReporting._
import scala.annotation.tailrec
+import scala.io.Codec
/** Some creators for typed trees */
object tpd extends Trees.Instance[Type] with TypedTreeInfo {
private def ta(implicit ctx: Context) = ctx.typeAssigner
- def Modifiers(sym: Symbol)(implicit ctx: Context): Modifiers = Modifiers(
- sym.flags & ModifierFlags,
- if (sym.privateWithin.exists) sym.privateWithin.asType.name else tpnme.EMPTY,
- sym.annotations map (_.tree))
-
def Ident(tp: NamedType)(implicit ctx: Context): Ident =
ta.assignType(untpd.Ident(tp.name), tp)
def Select(qualifier: Tree, name: Name)(implicit ctx: Context): Select =
ta.assignType(untpd.Select(qualifier, name), qualifier)
- def SelectFromTypeTree(qualifier: Tree, name: Name)(implicit ctx: Context): SelectFromTypeTree =
- ta.assignType(untpd.SelectFromTypeTree(qualifier, name), qualifier)
-
- def SelectFromTypeTree(qualifier: Tree, tp: NamedType)(implicit ctx: Context): SelectFromTypeTree =
- untpd.SelectFromTypeTree(qualifier, tp.name).withType(tp)
+ def Select(qualifier: Tree, tp: NamedType)(implicit ctx: Context): Select =
+ untpd.Select(qualifier, tp.name).withType(tp)
def This(cls: ClassSymbol)(implicit ctx: Context): This =
untpd.This(cls.name).withType(cls.thisType)
@@ -61,9 +53,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
def New(tp: Type)(implicit ctx: Context): New = New(TypeTree(tp))
- def Pair(left: Tree, right: Tree)(implicit ctx: Context): Pair =
- ta.assignType(untpd.Pair(left, right), left, right)
-
def Typed(expr: Tree, tpt: Tree)(implicit ctx: Context): Typed =
ta.assignType(untpd.Typed(expr, tpt), tpt)
@@ -126,8 +115,11 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
def SeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit ctx: Context): SeqLiteral =
ta.assignType(untpd.SeqLiteral(elems, elemtpt), elems, elemtpt)
- def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit ctx: Context): SeqLiteral =
- ta.assignType(new untpd.JavaSeqLiteral(elems, elemtpt), elems, elemtpt)
+ def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree)(implicit ctx: Context): JavaSeqLiteral =
+ ta.assignType(new untpd.JavaSeqLiteral(elems, elemtpt), elems, elemtpt).asInstanceOf[JavaSeqLiteral]
+
+ def Inlined(call: Tree, bindings: List[MemberDef], expansion: Tree)(implicit ctx: Context): Inlined =
+ ta.assignType(untpd.Inlined(call, bindings, expansion), bindings, expansion)
def TypeTree(original: Tree)(implicit ctx: Context): TypeTree =
TypeTree(original.tpe, original)
@@ -294,8 +286,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
def PackageDef(pid: RefTree, stats: List[Tree])(implicit ctx: Context): PackageDef =
ta.assignType(untpd.PackageDef(pid, stats), pid)
- def Annotated(annot: Tree, arg: Tree)(implicit ctx: Context): Annotated =
- ta.assignType(untpd.Annotated(annot, arg), annot, arg)
+ def Annotated(arg: Tree, annot: Tree)(implicit ctx: Context): Annotated =
+ ta.assignType(untpd.Annotated(arg, annot), arg, annot)
def Throw(expr: Tree)(implicit ctx: Context): Tree =
ref(defn.throwMethod).appliedTo(expr)
@@ -338,7 +330,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
Ident(tp)
else tp.prefix match {
case pre: SingletonType => followOuterLinks(singleton(pre)).select(tp)
- case pre => SelectFromTypeTree(TypeTree(pre), tp)
+ case pre => Select(TypeTree(pre), tp)
} // no checks necessary
def ref(sym: Symbol)(implicit ctx: Context): Tree =
@@ -363,18 +355,16 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
* kind for the given element type in `typeArg`. No type arguments or
* `length` arguments are given.
*/
- def newArray(typeArg: Tree, pos: Position)(implicit ctx: Context): Tree = {
- val elemType = typeArg.tpe
- val elemClass = elemType.classSymbol
- def newArr(kind: String) =
- ref(defn.DottyArraysModule).select(s"new${kind}Array".toTermName).withPos(pos)
- if (TypeErasure.isUnboundedGeneric(elemType))
- newArr("Generic").appliedToTypeTrees(typeArg :: Nil)
- else if (elemClass.isPrimitiveValueClass)
- newArr(elemClass.name.toString)
- else
- newArr("Ref").appliedToTypeTrees(
- TypeTree(defn.ArrayOf(elemType)).withPos(typeArg.pos) :: Nil)
+ def newArray(elemTpe: Type, returnTpe: Type, pos: Position, dims: JavaSeqLiteral)(implicit ctx: Context): Tree = {
+ val elemClass = elemTpe.classSymbol
+ def newArr =
+ ref(defn.DottyArraysModule).select(defn.newArrayMethod).withPos(pos)
+
+ if (!ctx.erasedTypes) {
+ assert(!TypeErasure.isUnboundedGeneric(elemTpe)) //needs to be done during typer. See Applications.convertNewGenericArray
+ newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withPos(pos)
+ } else // after erasure
+ newArr.appliedToArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withPos(pos)
}
// ------ Creating typed equivalents of trees that exist only in untyped form -------
@@ -456,10 +446,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
} else foldOver(sym, tree)
}
- implicit class modsDeco(mdef: MemberDef)(implicit ctx: Context) extends ModsDeco {
- def mods = if (mdef.hasType) Modifiers(mdef.symbol) else mdef.rawMods
- }
-
override val cpy = new TypedTreeCopier
class TypedTreeCopier extends TreeCopier {
@@ -497,14 +483,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
override def New(tree: Tree)(tpt: Tree)(implicit ctx: Context): New =
ta.assignType(untpd.cpy.New(tree)(tpt), tpt)
- override def Pair(tree: Tree)(left: Tree, right: Tree)(implicit ctx: Context): Pair = {
- val tree1 = untpd.cpy.Pair(tree)(left, right)
- tree match {
- case tree: Pair if (left.tpe eq tree.left.tpe) && (right.tpe eq tree.right.tpe) => tree1.withTypeUnchecked(tree.tpe)
- case _ => ta.assignType(tree1, left, right)
- }
- }
-
override def Typed(tree: Tree)(expr: Tree, tpt: Tree)(implicit ctx: Context): Typed =
ta.assignType(untpd.cpy.Typed(tree)(expr, tpt), tpt)
@@ -572,11 +550,11 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
}
}
- override def Annotated(tree: Tree)(annot: Tree, arg: Tree)(implicit ctx: Context): Annotated = {
- val tree1 = untpd.cpy.Annotated(tree)(annot, arg)
+ override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(implicit ctx: Context): Annotated = {
+ val tree1 = untpd.cpy.Annotated(tree)(arg, annot)
tree match {
case tree: Annotated if (arg.tpe eq tree.arg.tpe) && (annot eq tree.annot) => tree1.withTypeUnchecked(tree.tpe)
- case _ => ta.assignType(tree1, annot, arg)
+ case _ => ta.assignType(tree1, arg, annot)
}
}
@@ -836,7 +814,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
case tpnme.Float => TYPE(defn.BoxedFloatModule)
case tpnme.Double => TYPE(defn.BoxedDoubleModule)
case tpnme.Unit => TYPE(defn.BoxedUnitModule)
- case _ => Literal(Constant(TypeErasure.erasure(tp)))
+ case _ =>
+ if(ctx.erasedTypes || !tp.derivesFrom(defn.ArrayClass))
+ Literal(Constant(TypeErasure.erasure(tp)))
+ else Literal(Constant(tp))
}
}
@@ -847,9 +828,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
assert(denot.exists, i"no member $receiver . $method, members = ${receiver.tpe.decls}")
val selected =
if (denot.isOverloaded) {
- val allAlts = denot.alternatives.map(_.termRef)
- val alternatives =
- ctx.typer.resolveOverloaded(allAlts, proto, Nil)
+ def typeParamCount(tp: Type) = tp.widen match {
+ case tp: PolyType => tp.paramBounds.length
+ case _ => 0
+ }
+ var allAlts = denot.alternatives
+ .map(_.termRef).filter(tr => typeParamCount(tr) == targs.length)
+ if (targs.isEmpty) allAlts = allAlts.filterNot(_.widen.isInstanceOf[PolyType])
+ val alternatives = ctx.typer.resolveOverloaded(allAlts, proto)
assert(alternatives.size == 1,
i"${if (alternatives.isEmpty) "no" else "multiple"} overloads available for " +
i"$method on ${receiver.tpe.widenDealias} with targs: $targs%, %; args: $args%, % of types ${args.tpes}%, %; expectedType: $expectedType." +
@@ -937,8 +923,26 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo {
}
}
- // ensure that constructors are fully applied?
- // ensure that normal methods are fully applied?
+ /** A key to be used in a context property that tracks enclosing inlined calls */
+ private val InlinedCalls = new Property.Key[List[Tree]]
+
+ /** A context derived form `ctx` that records `call` as innermost enclosing
+ * call for which the inlined version is currently processed.
+ */
+ def inlineContext(call: Tree)(implicit ctx: Context): Context =
+ ctx.fresh.setProperty(InlinedCalls, call :: enclosingInlineds)
+
+ /** All enclosing calls that are currently inlined, from innermost to outermost */
+ def enclosingInlineds(implicit ctx: Context): List[Tree] =
+ ctx.property(InlinedCalls).getOrElse(Nil)
+ /** The source file where the symbol of the `@inline` method referred to by `call`
+ * is defined
+ */
+ def sourceFile(call: Tree)(implicit ctx: Context) = {
+ val file = call.symbol.sourceFile
+ val encoding = ctx.settings.encoding.value
+ if (file != null && file.exists) new SourceFile(file, Codec(encoding)) else NoSource
+ }
}
diff --git a/src/dotty/tools/dotc/ast/untpd.scala b/src/dotty/tools/dotc/ast/untpd.scala
index c7a7036c3..852c3a346 100644
--- a/src/dotty/tools/dotc/ast/untpd.scala
+++ b/src/dotty/tools/dotc/ast/untpd.scala
@@ -6,7 +6,7 @@ import core._
import util.Positions._, Types._, Contexts._, Constants._, Names._, NameOps._, Flags._
import Denotations._, SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._
import Decorators._
-import util.Attachment
+import util.Property
import language.higherKinds
import collection.mutable.ListBuffer
@@ -20,11 +20,18 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
override def isType = op.isTypeName
}
- /** A typed subtree of an untyped tree needs to be wrapped in a TypedSlice */
- case class TypedSplice(tree: tpd.Tree) extends ProxyTree {
+ /** A typed subtree of an untyped tree needs to be wrapped in a TypedSlice
+ * @param owner The current owner at the time the tree was defined
+ */
+ abstract case class TypedSplice(tree: tpd.Tree)(val owner: Symbol) extends ProxyTree {
def forwardTo = tree
}
+ object TypedSplice {
+ def apply(tree: tpd.Tree)(implicit ctx: Context): TypedSplice =
+ new TypedSplice(tree)(ctx.owner) {}
+ }
+
/** mods object name impl */
case class ModuleDef(name: TermName, impl: Template)
extends MemberDef {
@@ -35,11 +42,24 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
case class ParsedTry(expr: Tree, handler: Tree, finalizer: Tree) extends TermTree
case class SymbolLit(str: String) extends TermTree
- case class InterpolatedString(id: TermName, strings: List[Literal], elems: List[Tree]) extends TermTree
+
+ /** An interpolated string
+ * @param segments a list of two element tickets consisting of string literal and argument tree,
+ * possibly with a simple string literal as last element of the list
+ */
+ case class InterpolatedString(id: TermName, segments: List[Tree]) extends TermTree
+
case class Function(args: List[Tree], body: Tree) extends Tree {
override def isTerm = body.isTerm
override def isType = body.isType
}
+ /** A function created from a wildcard expression
+ * @param placeHolderParams a list of definitions of synthetic parameters
+ * @param body the function body where wildcards are replaced by
+ * references to synthetic parameters.
+ */
+ class WildcardFunction(placeholderParams: List[ValDef], body: Tree) extends Function(placeholderParams, body)
+
case class InfixOp(left: Tree, op: Name, right: Tree) extends OpTree
case class PostfixOp(od: Tree, op: Name) extends OpTree
case class PrefixOp(op: Name, od: Tree) extends OpTree
@@ -63,6 +83,62 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
class PolyTypeDef(name: TypeName, override val tparams: List[TypeDef], rhs: Tree)
extends TypeDef(name, rhs)
+ /** A block arising from a right-associative infix operation, where, e.g.
+ *
+ * a +: b
+ *
+ * is expanded to
+ *
+ * { val x = a; b.+:(x) }
+ */
+ class InfixOpBlock(leftOperand: Tree, rightOp: Tree) extends Block(leftOperand :: Nil, rightOp)
+
+ // ----- Modifiers -----------------------------------------------------
+
+ /** Modifiers and annotations for definitions
+ * @param flags The set flags
+ * @param privateWithin If a private or protected has is followed by a
+ * qualifier [q], the name q, "" as a typename otherwise.
+ * @param annotations The annotations preceding the modifiers
+ */
+ case class Modifiers (
+ flags: FlagSet = EmptyFlags,
+ privateWithin: TypeName = tpnme.EMPTY,
+ annotations: List[Tree] = Nil) extends Positioned with Cloneable {
+
+ def is(fs: FlagSet): Boolean = flags is fs
+ def is(fc: FlagConjunction): Boolean = flags is fc
+
+ def | (fs: FlagSet): Modifiers = withFlags(flags | fs)
+ def & (fs: FlagSet): Modifiers = withFlags(flags & fs)
+ def &~(fs: FlagSet): Modifiers = withFlags(flags &~ fs)
+
+ def toTypeFlags: Modifiers = withFlags(flags.toTypeFlags)
+ def toTermFlags: Modifiers = withFlags(flags.toTermFlags)
+
+ def withFlags(flags: FlagSet) =
+ if (this.flags == flags) this
+ else copy(flags = flags)
+
+ def withAddedAnnotation(annot: Tree): Modifiers =
+ if (annotations.exists(_ eq annot)) this
+ else withAnnotations(annotations :+ annot)
+
+ def withAnnotations(annots: List[Tree]): Modifiers =
+ if (annots eq annotations) this
+ else copy(annotations = annots)
+
+ def withPrivateWithin(pw: TypeName) =
+ if (pw.isEmpty) this
+ else copy(privateWithin = pw)
+
+ def hasFlags = flags != EmptyFlags
+ def hasAnnotations = annotations.nonEmpty
+ def hasPrivateWithin = privateWithin != tpnme.EMPTY
+ }
+
+ @sharable val EmptyModifiers: Modifiers = new Modifiers()
+
// ----- TypeTrees that refer to other tree's symbols -------------------
/** A type tree that gets its type from some other tree's symbol. Enters the
@@ -92,17 +168,17 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
def derivedType(originalSym: Symbol)(implicit ctx: Context): Type
}
- /** Attachment key containing TypeTrees whose type is computed
+ /** Property key containing TypeTrees whose type is computed
* from the symbol in this type. These type trees have marker trees
* TypeRefOfSym or InfoOfSym as their originals.
*/
- val References = new Attachment.Key[List[Tree]]
+ val References = new Property.Key[List[Tree]]
- /** Attachment key for TypeTrees marked with TypeRefOfSym or InfoOfSym
+ /** Property key for TypeTrees marked with TypeRefOfSym or InfoOfSym
* which contains the symbol of the original tree from which this
* TypeTree is derived.
*/
- val OriginalSymbol = new Attachment.Key[Symbol]
+ val OriginalSymbol = new Property.Key[Symbol]
// ------ Creation methods for untyped only -----------------
@@ -116,7 +192,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
def TypeApply(fun: Tree, args: List[Tree]): TypeApply = new TypeApply(fun, args)
def Literal(const: Constant): Literal = new Literal(const)
def New(tpt: Tree): New = new New(tpt)
- def Pair(left: Tree, right: Tree): Pair = new Pair(left, right)
def Typed(expr: Tree, tpt: Tree): Typed = new Typed(expr, tpt)
def NamedArg(name: Name, arg: Tree): NamedArg = new NamedArg(name, arg)
def Assign(lhs: Tree, rhs: Tree): Assign = new Assign(lhs, rhs)
@@ -129,14 +204,15 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
def Try(expr: Tree, cases: List[CaseDef], finalizer: Tree): Try = new Try(expr, cases, finalizer)
def SeqLiteral(elems: List[Tree], elemtpt: Tree): SeqLiteral = new SeqLiteral(elems, elemtpt)
def JavaSeqLiteral(elems: List[Tree], elemtpt: Tree): JavaSeqLiteral = new JavaSeqLiteral(elems, elemtpt)
+ def Inlined(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree): Inlined = new Inlined(call, bindings, expansion)
def TypeTree(original: Tree): TypeTree = new TypeTree(original)
def TypeTree() = new TypeTree(EmptyTree)
def SingletonTypeTree(ref: Tree): SingletonTypeTree = new SingletonTypeTree(ref)
- def SelectFromTypeTree(qualifier: Tree, name: Name): SelectFromTypeTree = new SelectFromTypeTree(qualifier, name)
def AndTypeTree(left: Tree, right: Tree): AndTypeTree = new AndTypeTree(left, right)
def OrTypeTree(left: Tree, right: Tree): OrTypeTree = new OrTypeTree(left, right)
def RefinedTypeTree(tpt: Tree, refinements: List[Tree]): RefinedTypeTree = new RefinedTypeTree(tpt, refinements)
def AppliedTypeTree(tpt: Tree, args: List[Tree]): AppliedTypeTree = new AppliedTypeTree(tpt, args)
+ def PolyTypeTree(tparams: List[TypeDef], body: Tree): PolyTypeTree = new PolyTypeTree(tparams, body)
def ByNameTypeTree(result: Tree): ByNameTypeTree = new ByNameTypeTree(result)
def TypeBoundsTree(lo: Tree, hi: Tree): TypeBoundsTree = new TypeBoundsTree(lo, hi)
def Bind(name: Name, body: Tree): Bind = new Bind(name, body)
@@ -148,7 +224,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
def Template(constr: DefDef, parents: List[Tree], self: ValDef, body: LazyTreeList): Template = new Template(constr, parents, self, body)
def Import(expr: Tree, selectors: List[untpd.Tree]): Import = new Import(expr, selectors)
def PackageDef(pid: RefTree, stats: List[Tree]): PackageDef = new PackageDef(pid, stats)
- def Annotated(annot: Tree, arg: Tree): Annotated = new Annotated(annot, arg)
+ def Annotated(arg: Tree, annot: Tree): Annotated = new Annotated(arg, annot)
// ------ Additional creation methods for untyped only -----------------
@@ -163,7 +239,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
case AppliedTypeTree(tycon, targs) =>
(tycon, targs)
case TypedSplice(AppliedTypeTree(tycon, targs)) =>
- (TypedSplice(tycon), targs map TypedSplice)
+ (TypedSplice(tycon), targs map (TypedSplice(_)))
case TypedSplice(tpt1: Tree) =>
val argTypes = tpt1.tpe.argTypes
val tycon = tpt1.tpe.withoutArgs(argTypes)
@@ -191,7 +267,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
def AppliedTypeTree(tpt: Tree, arg: Tree): AppliedTypeTree =
AppliedTypeTree(tpt, arg :: Nil)
- def TypeTree(tpe: Type): TypedSplice = TypedSplice(TypeTree().withTypeUnchecked(tpe))
+ def TypeTree(tpe: Type)(implicit ctx: Context): TypedSplice = TypedSplice(TypeTree().withTypeUnchecked(tpe))
def TypeDef(name: TypeName, tparams: List[TypeDef], rhs: Tree): TypeDef =
if (tparams.isEmpty) TypeDef(name, rhs) else new PolyTypeDef(name, tparams, rhs)
@@ -204,6 +280,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
def rootDot(name: Name) = Select(Ident(nme.ROOTPKG), name)
def scalaDot(name: Name) = Select(rootDot(nme.scala_), name)
def scalaUnit = scalaDot(tpnme.Unit)
+ def scalaAny = scalaDot(tpnme.Any)
def makeConstructor(tparams: List[TypeDef], vparamss: List[List[ValDef]], rhs: Tree = EmptyTree)(implicit ctx: Context): DefDef =
DefDef(nme.CONSTRUCTOR, tparams, vparamss, TypeTree(), rhs)
@@ -241,22 +318,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
/** A repeated argument such as `arg: _*` */
def repeated(arg: Tree)(implicit ctx: Context) = Typed(arg, Ident(tpnme.WILDCARD_STAR))
-// ------- Decorators -------------------------------------------------
+// ----- Accessing modifiers ----------------------------------------------------
- implicit class UntypedTreeDecorator(val self: Tree) extends AnyVal {
- def locateEnclosing(base: List[Tree], pos: Position): List[Tree] = {
- def encloses(elem: Any) = elem match {
- case t: Tree => t.envelope contains pos
- case _ => false
- }
- base.productIterator find encloses match {
- case Some(tree: Tree) => locateEnclosing(tree :: base, pos)
- case none => base
- }
- }
- }
+ abstract class ModsDecorator { def mods: Modifiers }
- implicit class modsDeco(val mdef: MemberDef)(implicit ctx: Context) extends ModsDeco {
+ implicit class modsDeco(val mdef: MemberDef)(implicit ctx: Context) {
def mods = mdef.rawMods
}
@@ -280,6 +346,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
case tree: ModuleDef if (name eq tree.name) && (impl eq tree.impl) => tree
case _ => untpd.ModuleDef(name, impl).withPos(tree.pos)
}
+ def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree) = tree match {
+ case tree: ParsedTry
+ if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree
+ case _ => untpd.ParsedTry(expr, handler, finalizer).withPos(tree.pos)
+ }
def PolyTypeDef(tree: Tree)(name: TypeName, tparams: List[TypeDef], rhs: Tree) = tree match {
case tree: PolyTypeDef if (name eq tree.name) && (tparams eq tree.tparams) && (rhs eq tree.rhs) => tree
case _ => new PolyTypeDef(name, tparams, rhs).withPos(tree.pos)
@@ -288,9 +359,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
case tree: SymbolLit if str == tree.str => tree
case _ => untpd.SymbolLit(str).withPos(tree.pos)
}
- def InterpolatedString(tree: Tree)(id: TermName, strings: List[Literal], elems: List[Tree]) = tree match {
- case tree: InterpolatedString if (id eq tree.id) && (strings eq tree.strings) && (elems eq tree.elems) => tree
- case _ => untpd.InterpolatedString(id, strings, elems).withPos(tree.pos)
+ def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree]) = tree match {
+ case tree: InterpolatedString if (id eq tree.id) && (segments eq tree.segments) => tree
+ case _ => untpd.InterpolatedString(id, segments).withPos(tree.pos)
}
def Function(tree: Tree)(args: List[Tree], body: Tree) = tree match {
case tree: Function if (args eq tree.args) && (body eq tree.body) => tree
@@ -358,10 +429,12 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
override def transform(tree: Tree)(implicit ctx: Context): Tree = tree match {
case ModuleDef(name, impl) =>
cpy.ModuleDef(tree)(name, transformSub(impl))
+ case ParsedTry(expr, handler, finalizer) =>
+ cpy.ParsedTry(tree)(transform(expr), transform(handler), transform(finalizer))
case SymbolLit(str) =>
cpy.SymbolLit(tree)(str)
- case InterpolatedString(id, strings, elems) =>
- cpy.InterpolatedString(tree)(id, transformSub(strings), transform(elems))
+ case InterpolatedString(id, segments) =>
+ cpy.InterpolatedString(tree)(id, transform(segments))
case Function(args, body) =>
cpy.Function(tree)(transform(args), transform(body))
case InfixOp(left, op, right) =>
@@ -403,10 +476,12 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
override def foldOver(x: X, tree: Tree)(implicit ctx: Context): X = tree match {
case ModuleDef(name, impl) =>
this(x, impl)
+ case ParsedTry(expr, handler, finalizer) =>
+ this(this(this(x, expr), handler), finalizer)
case SymbolLit(str) =>
x
- case InterpolatedString(id, strings, elems) =>
- this(this(x, strings), elems)
+ case InterpolatedString(id, segments) =>
+ this(x, segments)
case Function(args, body) =>
this(this(x, args), body)
case InfixOp(left, op, right) =>
@@ -446,6 +521,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo {
}
}
+ /** Fold `f` over all tree nodes, in depth-first, prefix order */
+ class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X] {
+ def apply(x: X, tree: Tree)(implicit ctx: Context): X = foldOver(f(x, tree), tree)
+ }
+
override def rename(tree: NameTree, newName: Name)(implicit ctx: Context): tree.ThisTree[Untyped] = tree match {
case t: PolyTypeDef =>
cpy.PolyTypeDef(t)(newName.asTypeName, t.tparams, t.rhs).asInstanceOf[tree.ThisTree[Untyped]]
diff --git a/src/dotty/tools/dotc/config/CompilerCommand.scala b/src/dotty/tools/dotc/config/CompilerCommand.scala
index e34ca07f9..19ede3cec 100644
--- a/src/dotty/tools/dotc/config/CompilerCommand.scala
+++ b/src/dotty/tools/dotc/config/CompilerCommand.scala
@@ -13,7 +13,7 @@ object CompilerCommand extends DotClass {
/** The name of the command */
def cmdName = "scalac"
- private def explainAdvanced = "\n" + """
+ private def explainAdvanced = """
|-- Notes on option parsing --
|Boolean settings are always false unless set.
|Where multiple values are accepted, they should be comma-separated.
@@ -26,7 +26,7 @@ object CompilerCommand extends DotClass {
| example: -Ylog:erasure+ logs the erasure phase and the phase after the erasure phase.
| This is useful because during the tree transform of phase X, we often
| already are in phase X + 1.
- """.stripMargin.trim + "\n"
+ """
def shortUsage = s"Usage: $cmdName <options> <source files>"
@@ -110,18 +110,18 @@ object CompilerCommand extends DotClass {
if (summary.errors.nonEmpty) {
summary.errors foreach (ctx.error(_))
- ctx.println(" dotc -help gives more information")
+ ctx.echo(" dotc -help gives more information")
Nil
}
else if (settings.version.value) {
- ctx.println(versionMsg)
+ ctx.echo(versionMsg)
Nil
}
else if (shouldStopWithInfo) {
- ctx.println(infoMessage)
+ ctx.echo(infoMessage)
Nil
} else {
- if (sourcesRequired && summary.arguments.isEmpty) ctx.println(usageMessage)
+ if (sourcesRequired && summary.arguments.isEmpty) ctx.echo(usageMessage)
summary.arguments
}
}
diff --git a/src/dotty/tools/dotc/config/Config.scala b/src/dotty/tools/dotc/config/Config.scala
index 3cc3091b5..7744a5479 100644
--- a/src/dotty/tools/dotc/config/Config.scala
+++ b/src/dotty/tools/dotc/config/Config.scala
@@ -63,6 +63,9 @@ object Config {
*/
final val checkNoDoubleBindings = true
+ /** Check positions for consistency after parsing */
+ final val checkPositions = true
+
/** Show subtype traces for all deep subtype recursions */
final val traceDeepSubTypeRecursions = false
@@ -72,10 +75,9 @@ object Config {
/** If this flag is set, take the fast path when comparing same-named type-aliases and types */
final val fastPathForRefinedSubtype = true
- /** If this flag is set, $apply projections are checked that they apply to a
- * higher-kinded type.
+ /** If this flag is set, higher-kinded applications are checked for validity
*/
- final val checkProjections = false
+ final val checkHKApplications = false
/** The recursion depth for showing a summarized string */
final val summarizeDepth = 2
@@ -85,8 +87,12 @@ object Config {
*/
final val checkLambdaVariance = false
- /** Check that certain types cannot be created in erasedTypes phases */
- final val checkUnerased = true
+ /** Check that certain types cannot be created in erasedTypes phases.
+ * Note: Turning this option on will get some false negatives, since it is
+ * possible that And/Or types are still created during erasure as the result
+ * of some operation on an existing type.
+ */
+ final val checkUnerased = false
/** In `derivedSelect`, rewrite
*
@@ -98,6 +104,12 @@ object Config {
*/
final val splitProjections = false
+ /** If this flag is on, always rewrite an application `S[Ts]` where `S` is an alias for
+ * `[Xs] -> U` to `[Xs := Ts]U`.
+ * Turning this flag on was observed to give a ~6% speedup on the JUnit test suite.
+ */
+ final val simplifyApplications = true
+
/** Initial size of superId table */
final val InitialSuperIdsSize = 4096
diff --git a/src/dotty/tools/dotc/config/PathResolver.scala b/src/dotty/tools/dotc/config/PathResolver.scala
index f9f698e72..55d585e94 100644
--- a/src/dotty/tools/dotc/config/PathResolver.scala
+++ b/src/dotty/tools/dotc/config/PathResolver.scala
@@ -180,6 +180,7 @@ class PathResolver(implicit ctx: Context) {
case "extdirs" => settings.extdirs.value
case "classpath" | "cp" => settings.classpath.value
case "sourcepath" => settings.sourcepath.value
+ case "priorityclasspath" => settings.priorityclasspath.value
}
/** Calculated values based on any given command line options, falling back on
@@ -193,6 +194,7 @@ class PathResolver(implicit ctx: Context) {
def javaUserClassPath = if (useJavaClassPath) Defaults.javaUserClassPath else ""
def scalaBootClassPath = cmdLineOrElse("bootclasspath", Defaults.scalaBootClassPath)
def scalaExtDirs = cmdLineOrElse("extdirs", Defaults.scalaExtDirs)
+ def priorityClassPath = cmdLineOrElse("prioritypath", "")
/** Scaladoc doesn't need any bootstrapping, otherwise will create errors such as:
* [scaladoc] ../scala-trunk/src/reflect/scala/reflect/macros/Reifiers.scala:89: error: object api is not a member of package reflect
* [scaladoc] case class ReificationException(val pos: reflect.api.PositionApi, val msg: String) extends Throwable(msg)
@@ -220,7 +222,9 @@ class PathResolver(implicit ctx: Context) {
import context._
// Assemble the elements!
+ // priority class path takes precedence
def basis = List[Traversable[ClassPath]](
+ classesInExpandedPath(priorityClassPath), // 0. The priority class path (for testing).
classesInPath(javaBootClassPath), // 1. The Java bootstrap class path.
contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path.
classesInExpandedPath(javaUserClassPath), // 3. The Java application class path.
@@ -235,6 +239,7 @@ class PathResolver(implicit ctx: Context) {
override def toString = """
|object Calculated {
| scalaHome = %s
+ | priorityClassPath = %s
| javaBootClassPath = %s
| javaExtDirs = %s
| javaUserClassPath = %s
@@ -244,7 +249,7 @@ class PathResolver(implicit ctx: Context) {
| userClassPath = %s
| sourcePath = %s
|}""".trim.stripMargin.format(
- scalaHome,
+ scalaHome, ppcp(priorityClassPath),
ppcp(javaBootClassPath), ppcp(javaExtDirs), ppcp(javaUserClassPath),
useJavaClassPath,
ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath),
diff --git a/src/dotty/tools/dotc/config/Printers.scala b/src/dotty/tools/dotc/config/Printers.scala
index 21147fe6f..002d0f933 100644
--- a/src/dotty/tools/dotc/config/Printers.scala
+++ b/src/dotty/tools/dotc/config/Printers.scala
@@ -4,15 +4,14 @@ object Printers {
class Printer {
def println(msg: => String): Unit = System.out.println(msg)
- def echo[T](msg: => String, value: T): T = { println(msg + value); value }
}
object noPrinter extends Printer {
override def println(msg: => String): Unit = ()
- override def echo[T](msg: => String, value: T): T = value
}
val default: Printer = new Printer
+ val dottydoc: Printer = noPrinter
val core: Printer = noPrinter
val typr: Printer = noPrinter
val constr: Printer = noPrinter
@@ -31,4 +30,5 @@ object Printers {
val completions: Printer = noPrinter
val cyclicErrors: Printer = noPrinter
val pickling: Printer = noPrinter
+ val inlining: Printer = noPrinter
}
diff --git a/src/dotty/tools/dotc/config/ScalaSettings.scala b/src/dotty/tools/dotc/config/ScalaSettings.scala
index 07a23fdb6..8f47e08bf 100644
--- a/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -15,6 +15,10 @@ class ScalaSettings extends Settings.SettingGroup {
val javabootclasspath = PathSetting("-javabootclasspath", "Override java boot classpath.", Defaults.javaBootClassPath)
val javaextdirs = PathSetting("-javaextdirs", "Override java extdirs classpath.", Defaults.javaExtDirs)
val sourcepath = PathSetting("-sourcepath", "Specify location(s) of source files.", "") // Defaults.scalaSourcePath
+ val argfiles = BooleanSetting("@<file>", "A text file containing compiler arguments (options and source files)")
+ val classpath = PathSetting("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
+ val d = StringSetting("-d", "directory|jar", "destination for generated classfiles.", ".")
+ val priorityclasspath = PathSetting("-priorityclasspath", "class path that takes precedence over all other paths (or testing only)", "")
/** Other settings.
*/
@@ -23,11 +27,12 @@ class ScalaSettings extends Settings.SettingGroup {
val migration = BooleanSetting("-migration", "Emit warning and location for migration issues from Scala 2.")
val encoding = StringSetting("-encoding", "encoding", "Specify character encoding used by source files.", Properties.sourceEncoding)
val explaintypes = BooleanSetting("-explaintypes", "Explain type errors in more detail.")
+ val explain = BooleanSetting("-explain", "Explain errors in more detail.")
val feature = BooleanSetting("-feature", "Emit warning and location for usages of features that should be imported explicitly.")
val g = ChoiceSetting("-g", "level", "Set level of generated debugging info.", List("none", "source", "line", "vars", "notailcalls"), "vars")
val help = BooleanSetting("-help", "Print a synopsis of standard options")
val nowarn = BooleanSetting("-nowarn", "Generate no warnings.")
- val print = BooleanSetting("-print", "Print program with Scala-specific features removed.")
+ val color = ChoiceSetting("-color", "mode", "Colored output", List("always", "never"/*, "auto"*/), "always"/* "auto"*/)
val target = ChoiceSetting("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.",
List("jvm-1.5", "jvm-1.5-fjbg", "jvm-1.5-asm", "jvm-1.6", "jvm-1.7", "jvm-1.8", "msil"),
"jvm-1.8")
@@ -37,7 +42,7 @@ class ScalaSettings extends Settings.SettingGroup {
val usejavacp = BooleanSetting("-usejavacp", "Utilize the java.class.path in classpath resolution.")
val verbose = BooleanSetting("-verbose", "Output messages about what the compiler is doing.")
val version = BooleanSetting("-version", "Print product version and exit.")
- val pageWidth = IntSetting("-pagewidth", "Set page width", 80)
+ val pageWidth = IntSetting("-pagewidth", "Set page width", 120)
val jvmargs = PrefixSetting("-J<flag>", "-J", "Pass <flag> directly to the runtime system.")
val defines = PrefixSetting("-Dproperty=value", "-D", "Pass -Dproperty=value directly to the runtime system.")
@@ -45,9 +50,6 @@ class ScalaSettings extends Settings.SettingGroup {
val nobootcp = BooleanSetting("-nobootcp", "Do not use the boot classpath for the scala jars.")
val strict = BooleanSetting("-strict", "Use strict type rules, which means some formerly legal code does not typecheck anymore.")
- val argfiles = BooleanSetting("@<file>", "A text file containing compiler arguments (options and source files)")
- val classpath = PathSetting("-classpath", "Specify where to find user class files.", defaultClasspath) withAbbreviation "-cp"
- val d = StringSetting("-d", "directory|jar", "destination for generated classfiles.", ".")
val nospecialization = BooleanSetting("-no-specialization", "Ignore @specialize annotations.")
val language = MultiStringSetting("-language", "feature", "Enable one or more language features.")
val rewrite = OptionSetting[Rewrites]("-rewrite", "When used in conjunction with -language:Scala2 rewrites sources to migrate to new syntax")
@@ -67,6 +69,7 @@ class ScalaSettings extends Settings.SettingGroup {
val genPhaseGraph = StringSetting("-Xgenerate-phase-graph", "file", "Generate the phase graphs (outputs .dot files) to fileX.dot.", "")
val XlogImplicits = BooleanSetting("-Xlog-implicits", "Show more detail on why some implicits are not applicable.")
val XminImplicitSearchDepth = IntSetting("-Xmin-implicit-search-depth", "Set number of levels of implicit searches undertaken before checking for divergence.", 5)
+ val xmaxInlines = IntSetting("-Xmax-inlines", "Maximal number of successive inlines", 70)
val logImplicitConv = BooleanSetting("-Xlog-implicit-conversions", "Print a message whenever an implicit conversion is inserted.")
val logReflectiveCalls = BooleanSetting("-Xlog-reflective-calls", "Print a message when a reflective method call is generated")
val logFreeTerms = BooleanSetting("-Xlog-free-terms", "Print a message when reification creates a free term.")
@@ -74,7 +77,6 @@ class ScalaSettings extends Settings.SettingGroup {
val maxClassfileName = IntSetting("-Xmax-classfile-name", "Maximum filename length for generated classes", 255, 72 to 255)
val Xmigration = VersionSetting("-Xmigration", "Warn about constructs whose behavior may have changed since version.")
val Xsource = VersionSetting("-Xsource", "Treat compiler input as Scala source for the specified version.")
- val Xnojline = BooleanSetting("-Xnojline", "Do not use JLine for editing.")
val Xverify = BooleanSetting("-Xverify", "Verify generic signatures in generated bytecode (asm backend only.)")
val plugin = MultiStringSetting("-Xplugin", "file", "Load one or more plugins from files.")
val disable = MultiStringSetting("-Xplugin-disable", "plugin", "Disable the given plugin(s).")
@@ -85,6 +87,8 @@ class ScalaSettings extends Settings.SettingGroup {
val writeICode = PhasesSetting("-Xprint-icode", "Log internal icode to *.icode files after", "icode")
val Xprintpos = BooleanSetting("-Xprint-pos", "Print tree positions, as offsets.")
val printtypes = BooleanSetting("-Xprint-types", "Print tree types (debugging option).")
+ val XprintDiff = BooleanSetting("-Xprint-diff", "Print changed parts of the tree since last print.")
+ val XprintDiffDel = BooleanSetting("-Xprint-diff-del", "Print chaged parts of the tree since last print including deleted parts.")
val prompt = BooleanSetting("-Xprompt", "Display a prompt after each error (debugging option).")
val script = StringSetting("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "")
val mainClass = StringSetting("-Xmain-class", "path", "Class for manifest's Main-Class entry (only useful with -d <jar>)", "")
@@ -119,7 +123,6 @@ class ScalaSettings extends Settings.SettingGroup {
val debugOwners = BooleanSetting("-Ydebug-owners", "Print all owners of definitions (requires -Yprint-syms)")
//val doc = BooleanSetting ("-Ydoc", "Generate documentation")
val termConflict = ChoiceSetting("-Yresolve-term-conflict", "strategy", "Resolve term conflicts", List("package", "object", "error"), "error")
- val inline = BooleanSetting("-Yinline", "Perform inlining when possible.")
val inlineHandlers = BooleanSetting("-Yinline-handlers", "Perform exception handler inlining when possible.")
val YinlinerWarnings = BooleanSetting("-Yinline-warnings", "Emit inlining warnings. (Normally surpressed due to high volume)")
val Ylinearizer = ChoiceSetting("-Ylinearizer", "which", "Linearizer to use", List("normal", "dfs", "rpo", "dump"), "rpo")
@@ -127,7 +130,7 @@ class ScalaSettings extends Settings.SettingGroup {
val Ylogcp = BooleanSetting("-Ylog-classpath", "Output information about what classpath is being applied.")
val Ynogenericsig = BooleanSetting("-Yno-generic-signatures", "Suppress generation of generic signatures for Java.")
val YnoImports = BooleanSetting("-Yno-imports", "Compile without importing scala.*, java.lang.*, or Predef.")
- val nopredef = BooleanSetting("-Yno-predef", "Compile without importing Predef.")
+ val YnoPredef = BooleanSetting("-Yno-predef", "Compile without importing Predef.")
val noAdaptedArgs = BooleanSetting("-Yno-adapted-args", "Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.")
val selfInAnnots = BooleanSetting("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.")
val Yshowtrees = BooleanSetting("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs in formatted form.")
@@ -159,6 +162,10 @@ class ScalaSettings extends Settings.SettingGroup {
val YprintSyms = BooleanSetting("-Yprint-syms", "when printing trees print info in symbols instead of corresponding info in trees.")
val YtestPickler = BooleanSetting("-Ytest-pickler", "self-test for pickling functionality; should be used with -Ystop-after:pickler")
val YcheckReentrant = BooleanSetting("-Ycheck-reentrant", "check that compiled program does not contain vars that can be accessed from a global root.")
+ val YkeepComments = BooleanSetting("-Ykeep-comments", "Keep comments when scanning source files.")
+ val YforceSbtPhases = BooleanSetting("-Yforce-sbt-phases", "Run the phases used by sbt for incremental compilation (ExtractDependencies and ExtractAPI) even if the compiler is ran outside of sbt, for debugging.")
+ val YdumpSbtInc = BooleanSetting("-Ydump-sbt-inc", "For every compiled foo.scala, output the API representation and dependencies used for sbt incremental compilation in foo.inc, implies -Yforce-sbt-phases.")
+ val YcheckAllPatmat = BooleanSetting("-Ycheck-all-patmat", "Check exhaustivity and redundancy of all pattern matching (used for testing the algorithm)")
def stop = YstopAfter
/** Area-specific debug output.
@@ -180,6 +187,7 @@ class ScalaSettings extends Settings.SettingGroup {
val Yexplainlowlevel = BooleanSetting("-Yexplain-lowlevel", "When explaining type errors, show types at a lower level.")
val YnoDoubleBindings = BooleanSetting("-Yno-double-bindings", "Assert no namedtype is bound twice (should be enabled only if program is error-free).")
val YshowVarBounds = BooleanSetting("-Yshow-var-bounds", "Print type variables with their bounds")
+ val YnoInline = BooleanSetting("-Yno-inline", "Suppress inlining.")
val optimise = BooleanSetting("-optimise", "Generates faster bytecode by applying optimisations to the program") withAbbreviation "-optimize"
@@ -192,4 +200,68 @@ class ScalaSettings extends Settings.SettingGroup {
val YpresentationLog = StringSetting("-Ypresentation-log", "file", "Log presentation compiler events into file", "")
val YpresentationReplay = StringSetting("-Ypresentation-replay", "file", "Replay presentation compiler events from file", "")
val YpresentationDelay = IntSetting("-Ypresentation-delay", "Wait number of ms after typing before starting typechecking", 0, 0 to 999)
+
+ /** Doc specific settings */
+ val template = OptionSetting[String](
+ "-template",
+ "A mustache template for rendering each top-level entity in the API"
+ )
+
+ val resources = OptionSetting[String](
+ "-resources",
+ "A directory containing static resources needed for the API documentation"
+ )
+
+ val DocTitle = StringSetting (
+ "-Ydoc-title",
+ "title",
+ "The overall name of the Scaladoc site",
+ ""
+ )
+
+ val DocVersion = StringSetting (
+ "-Ydoc-version",
+ "version",
+ "An optional version number, to be appended to the title",
+ ""
+ )
+
+ val DocOutput = StringSetting (
+ "-Ydoc-output",
+ "outdir",
+ "The output directory in which to place the documentation",
+ "."
+ )
+
+ val DocFooter = StringSetting (
+ "-Ydoc-footer",
+ "footer",
+ "A footer on every Scaladoc page, by default the EPFL/Lightbend copyright notice. Can be overridden with a custom footer.",
+ ""
+ )
+
+ val DocUncompilable = StringSetting (
+ "-Ydoc-no-compile",
+ "path",
+ "A directory containing sources which should be parsed, no more (e.g. AnyRef.scala)",
+ ""
+ )
+
+ //def DocUncompilableFiles(implicit ctx: Context) = DocUncompilable.value match {
+ // case "" => Nil
+ // case path => io.Directory(path).deepFiles.filter(_ hasExtension "scala").toList
+ //}
+
+ val DocExternalDoc = MultiStringSetting (
+ "-Ydoc-external-doc",
+ "external-doc",
+ "comma-separated list of classpath_entry_path#doc_URL pairs describing external dependencies."
+ )
+
+ val DocAuthor = BooleanSetting("-Ydoc-author", "Include authors.", true)
+
+ val DocGroups = BooleanSetting (
+ "-Ydoc:groups",
+ "Group similar functions together (based on the @group annotation)"
+ )
}
diff --git a/src/dotty/tools/dotc/config/Settings.scala b/src/dotty/tools/dotc/config/Settings.scala
index eddeb83ab..cffa047fe 100644
--- a/src/dotty/tools/dotc/config/Settings.scala
+++ b/src/dotty/tools/dotc/config/Settings.scala
@@ -25,6 +25,8 @@ object Settings {
private var values = ArrayBuffer(initialValues: _*)
private var _wasRead: Boolean = false
+ override def toString = s"SettingsState(values: ${values.toList})"
+
def value(idx: Int): Any = {
_wasRead = true
values(idx)
@@ -119,11 +121,13 @@ object Settings {
case (ListTag, _) =>
if (argRest.isEmpty) missingArg
else update((argRest split ",").toList, args)
+ case (StringTag, _) if choices.nonEmpty =>
+ if (argRest.isEmpty) missingArg
+ else if (!choices.contains(argRest))
+ fail(s"$arg is not a valid choice for $name", args)
+ else update(argRest, args)
case (StringTag, arg2 :: args2) =>
- if (choices.nonEmpty && !(choices contains arg2))
- fail(s"$arg2 is not a valid choice for $name", args2)
- else
- update(arg2, args2)
+ update(arg2, args2)
case (IntTag, arg2 :: args2) =>
try {
val x = arg2.toInt
@@ -233,8 +237,8 @@ object Settings {
setting
}
- def BooleanSetting(name: String, descr: String): Setting[Boolean] =
- publish(Setting(name, descr, false))
+ def BooleanSetting(name: String, descr: String, initialValue: Boolean = false): Setting[Boolean] =
+ publish(Setting(name, descr, initialValue))
def StringSetting(name: String, helpArg: String, descr: String, default: String): Setting[String] =
publish(Setting(name, descr, default, helpArg))
diff --git a/src/dotty/tools/dotc/core/Annotations.scala b/src/dotty/tools/dotc/core/Annotations.scala
index 2b27b5e01..0e8e5a1f0 100644
--- a/src/dotty/tools/dotc/core/Annotations.scala
+++ b/src/dotty/tools/dotc/core/Annotations.scala
@@ -5,7 +5,6 @@ import Symbols._, Types._, util.Positions._, Contexts._, Constants._, ast.tpd._
import config.ScalaVersion
import StdNames._
import dotty.tools.dotc.ast.{tpd, untpd}
-import dotty.tools.dotc.typer.ProtoTypes.FunProtoTyped
object Annotations {
@@ -27,6 +26,8 @@ object Annotations {
}
def argumentConstant(i: Int)(implicit ctx: Context): Option[Constant] =
for (ConstantType(c) <- argument(i) map (_.tpe)) yield c
+
+ def ensureCompleted(implicit ctx: Context): Unit = tree
}
case class ConcreteAnnotation(t: Tree) extends Annotation {
@@ -43,6 +44,36 @@ object Annotations {
override def symbol(implicit ctx: Context): Symbol = sym
}
+ /** An annotation indicating the body of a right-hand side,
+ * typically of an inline method. Treated specially in
+ * pickling/unpickling and TypeTreeMaps
+ */
+ abstract class BodyAnnotation extends Annotation {
+ override def symbol(implicit ctx: Context) = defn.BodyAnnot
+ override def derivedAnnotation(tree: Tree)(implicit ctx: Context) =
+ if (tree eq this.tree) this else ConcreteBodyAnnotation(tree)
+ override def arguments(implicit ctx: Context) = Nil
+ override def ensureCompleted(implicit ctx: Context) = ()
+ }
+
+ case class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation {
+ def tree(implicit ctx: Context) = body
+ }
+
+ case class LazyBodyAnnotation(bodyExpr: Context => Tree) extends BodyAnnotation {
+ private var evaluated = false
+ private var myBody: Tree = _
+ def tree(implicit ctx: Context) = {
+ if (evaluated) assert(myBody != null)
+ else {
+ evaluated = true
+ myBody = bodyExpr(ctx)
+ }
+ myBody
+ }
+ def isEvaluated = evaluated
+ }
+
object Annotation {
def apply(tree: Tree) = ConcreteAnnotation(tree)
@@ -95,6 +126,9 @@ object Annotations {
def makeChild(sym: Symbol)(implicit ctx: Context) =
deferred(defn.ChildAnnot,
implicit ctx => New(defn.ChildAnnotType.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil))
+
+ def makeSourceFile(path: String)(implicit ctx: Context) =
+ apply(defn.SourceFileAnnot, Literal(Constant(path)))
}
def ThrowsAnnotation(cls: ClassSymbol)(implicit ctx: Context) = {
diff --git a/src/dotty/tools/dotc/core/CheckRealizable.scala b/src/dotty/tools/dotc/core/CheckRealizable.scala
index 11fd6786a..78ec685fc 100644
--- a/src/dotty/tools/dotc/core/CheckRealizable.scala
+++ b/src/dotty/tools/dotc/core/CheckRealizable.scala
@@ -4,7 +4,6 @@ package core
import Contexts._, Types._, Symbols._, Names._, Flags._, Scopes._
import SymDenotations._, Denotations.SingleDenotation
-import config.Printers._
import util.Positions._
import Decorators._
import StdNames._
diff --git a/src/dotty/tools/dotc/core/Comments.scala b/src/dotty/tools/dotc/core/Comments.scala
new file mode 100644
index 000000000..1cf5aec38
--- /dev/null
+++ b/src/dotty/tools/dotc/core/Comments.scala
@@ -0,0 +1,458 @@
+package dotty.tools
+package dotc
+package core
+
+import ast.{ untpd, tpd }
+import Decorators._, Symbols._, Contexts._, Flags.EmptyFlags
+import util.SourceFile
+import util.Positions._
+import util.CommentParsing._
+import util.Property.Key
+import parsing.Parsers.Parser
+
+object Comments {
+ val ContextDoc = new Key[ContextDocstrings]
+
+ /** Decorator for getting docbase out of context */
+ implicit class CommentsContext(val ctx: Context) extends AnyVal {
+ def docCtx: Option[ContextDocstrings] = ctx.property(ContextDoc)
+ }
+
+ /** Context for Docstrings, contains basic functionality for getting
+ * docstrings via `Symbol` and expanding templates
+ */
+ class ContextDocstrings {
+ import scala.collection.mutable
+
+ private[this] val _docstrings: mutable.Map[Symbol, Comment] =
+ mutable.Map.empty
+
+ val templateExpander = new CommentExpander
+
+ def docstrings: Map[Symbol, Comment] = _docstrings.toMap
+
+ def docstring(sym: Symbol): Option[Comment] = _docstrings.get(sym)
+
+ def addDocstring(sym: Symbol, doc: Option[Comment]): Unit =
+ doc.map(d => _docstrings += (sym -> d))
+ }
+
+ /** A `Comment` contains the unformatted docstring as well as a position
+ *
+ * The `Comment` contains functionality to create versions of itself without
+ * `@usecase` sections as well as functionality to map the `raw` docstring
+ */
+ abstract case class Comment(pos: Position, raw: String) { self =>
+ def isExpanded: Boolean
+
+ def usecases: List[UseCase]
+
+ val isDocComment = raw.startsWith("/**")
+
+ def expand(f: String => String): Comment = new Comment(pos, f(raw)) {
+ val isExpanded = true
+ val usecases = self.usecases
+ }
+
+ def withUsecases(implicit ctx: Context): Comment = new Comment(pos, stripUsecases) {
+ val isExpanded = self.isExpanded
+ val usecases = parseUsecases
+ }
+
+ private[this] lazy val stripUsecases: String =
+ removeSections(raw, "@usecase", "@define")
+
+ private[this] def parseUsecases(implicit ctx: Context): List[UseCase] =
+ if (!raw.startsWith("/**"))
+ List.empty[UseCase]
+ else
+ tagIndex(raw)
+ .filter { startsWithTag(raw, _, "@usecase") }
+ .map { case (start, end) => decomposeUseCase(start, end) }
+
+ /** Turns a usecase section into a UseCase, with code changed to:
+ * {{{
+ * // From:
+ * def foo: A
+ * // To:
+ * def foo: A = ???
+ * }}}
+ */
+ private[this] def decomposeUseCase(start: Int, end: Int)(implicit ctx: Context): UseCase = {
+ def subPos(start: Int, end: Int) =
+ if (pos == NoPosition) NoPosition
+ else {
+ val start1 = pos.start + start
+ val end1 = pos.end + end
+ pos withStart start1 withPoint start1 withEnd end1
+ }
+
+ val codeStart = skipWhitespace(raw, start + "@usecase".length)
+ val codeEnd = skipToEol(raw, codeStart)
+ val code = raw.substring(codeStart, codeEnd) + " = ???"
+ val codePos = subPos(codeStart, codeEnd)
+ val commentStart = skipLineLead(raw, codeEnd + 1) min end
+ val commentStr = "/** " + raw.substring(commentStart, end) + "*/"
+ val commentPos = subPos(commentStart, end)
+
+ UseCase(Comment(commentPos, commentStr), code, codePos)
+ }
+ }
+
+ object Comment {
+ def apply(pos: Position, raw: String, expanded: Boolean = false, usc: List[UseCase] = Nil)(implicit ctx: Context): Comment =
+ new Comment(pos, raw) {
+ val isExpanded = expanded
+ val usecases = usc
+ }
+ }
+
+ abstract case class UseCase(comment: Comment, code: String, codePos: Position) {
+ /** Set by typer */
+ var tpdCode: tpd.DefDef = _
+
+ def untpdCode: untpd.Tree
+ }
+
+ object UseCase {
+ def apply(comment: Comment, code: String, codePos: Position)(implicit ctx: Context) =
+ new UseCase(comment, code, codePos) {
+ val untpdCode = {
+ val tree = new Parser(new SourceFile("<usecase>", code)).localDef(codePos.start, EmptyFlags)
+
+ tree match {
+ case tree: untpd.DefDef =>
+ val newName = (tree.name.show + "$" + codePos + "$doc").toTermName
+ untpd.DefDef(newName, tree.tparams, tree.vparamss, tree.tpt, tree.rhs)
+ case _ =>
+ ctx.error("proper definition was not found in `@usecase`", codePos)
+ tree
+ }
+ }
+ }
+ }
+
+ /**
+ * Port of DocComment.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+ class CommentExpander {
+ import dotc.config.Printers.dottydoc
+ import scala.collection.mutable
+
+ def expand(sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val parent = if (site != NoSymbol) site else sym
+ defineVariables(parent)
+ expandedDocComment(sym, parent)
+ }
+
+ /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing.
+ *
+ * @param sym The symbol for which doc comment is returned
+ * @param site The class for which doc comments are generated
+ * @throws ExpansionLimitExceeded when more than 10 successive expansions
+ * of the same string are done, which is
+ * interpreted as a recursive variable definition.
+ */
+ def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(implicit ctx: Context): String = {
+ // when parsing a top level class or module, use the (module-)class itself to look up variable definitions
+ val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym
+ else site
+ expandVariables(cookedDocComment(sym, docStr), sym, parent)
+ }
+
+ private def template(raw: String): String =
+ removeSections(raw, "@define")
+
+ private def defines(raw: String): List[String] = {
+ val sections = tagIndex(raw)
+ val defines = sections filter { startsWithTag(raw, _, "@define") }
+ val usecases = sections filter { startsWithTag(raw, _, "@usecase") }
+ val end = startTag(raw, (defines ::: usecases).sortBy(_._1))
+
+ defines map { case (start, end) => raw.substring(start, end) }
+ }
+
+ private def replaceInheritDocToInheritdoc(docStr: String): String =
+ docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc")
+
+ /** The cooked doc comment of an overridden symbol */
+ protected def superComment(sym: Symbol)(implicit ctx: Context): Option[String] =
+ allInheritedOverriddenSymbols(sym).iterator map (x => cookedDocComment(x)) find (_ != "")
+
+ private val cookedDocComments = mutable.HashMap[Symbol, String]()
+
+ /** The raw doc comment of symbol `sym`, minus usecase and define sections, augmented by
+ * missing sections of an inherited doc comment.
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the doc comment of the overridden version is copied instead.
+ */
+ def cookedDocComment(sym: Symbol, docStr: String = "")(implicit ctx: Context): String = cookedDocComments.getOrElseUpdate(sym, {
+ var ownComment =
+ if (docStr.length == 0) ctx.docCtx.flatMap(_.docstring(sym).map(c => template(c.raw))).getOrElse("")
+ else template(docStr)
+ ownComment = replaceInheritDocToInheritdoc(ownComment)
+
+ superComment(sym) match {
+ case None =>
+ // SI-8210 - The warning would be false negative when this symbol is a setter
+ if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter)
+ dottydoc.println(s"${sym.pos}: the comment for ${sym} contains @inheritdoc, but no parent comment is available to inherit from.")
+ ownComment.replaceAllLiterally("@inheritdoc", "<invalid inheritdoc annotation>")
+ case Some(sc) =>
+ if (ownComment == "") sc
+ else expandInheritdoc(sc, merge(sc, ownComment, sym), sym)
+ }
+ })
+
+ private def isMovable(str: String, sec: (Int, Int)): Boolean =
+ startsWithTag(str, sec, "@param") ||
+ startsWithTag(str, sec, "@tparam") ||
+ startsWithTag(str, sec, "@return")
+
+ def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = {
+ val srcSections = tagIndex(src)
+ val dstSections = tagIndex(dst)
+ val srcParams = paramDocs(src, "@param", srcSections)
+ val dstParams = paramDocs(dst, "@param", dstSections)
+ val srcTParams = paramDocs(src, "@tparam", srcSections)
+ val dstTParams = paramDocs(dst, "@tparam", dstSections)
+ val out = new StringBuilder
+ var copied = 0
+ var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _)))
+
+ if (copyFirstPara) {
+ val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment
+ (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections)
+ out append src.substring(0, eop).trim
+ copied = 3
+ tocopy = 3
+ }
+
+ def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match {
+ case Some((start, end)) =>
+ if (end > tocopy) tocopy = end
+ case None =>
+ srcSec match {
+ case Some((start1, end1)) => {
+ out append dst.substring(copied, tocopy).trim
+ out append "\n"
+ copied = tocopy
+ out append src.substring(start1, end1).trim
+ }
+ case None =>
+ }
+ }
+
+ //TODO: enable this once you know how to get `sym.paramss`
+ /*
+ for (params <- sym.paramss; param <- params)
+ mergeSection(srcParams get param.name.toString, dstParams get param.name.toString)
+ for (tparam <- sym.typeParams)
+ mergeSection(srcTParams get tparam.name.toString, dstTParams get tparam.name.toString)
+
+ mergeSection(returnDoc(src, srcSections), returnDoc(dst, dstSections))
+ mergeSection(groupDoc(src, srcSections), groupDoc(dst, dstSections))
+ */
+
+ if (out.length == 0) dst
+ else {
+ out append dst.substring(copied)
+ out.toString
+ }
+ }
+
+ /**
+ * Expand inheritdoc tags
+ * - for the main comment we transform the inheritdoc into the super variable,
+ * and the variable expansion can expand it further
+ * - for the param, tparam and throws sections we must replace comments on the spot
+ *
+ * This is done separately, for two reasons:
+ * 1. It takes longer to run compared to merge
+ * 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
+ * impacts performance
+ *
+ * @param parent The source (or parent) comment
+ * @param child The child (overriding member or usecase) comment
+ * @param sym The child symbol
+ * @return The child comment with the inheritdoc sections expanded
+ */
+ def expandInheritdoc(parent: String, child: String, sym: Symbol): String =
+ if (child.indexOf("@inheritdoc") == -1)
+ child
+ else {
+ val parentSections = tagIndex(parent)
+ val childSections = tagIndex(child)
+ val parentTagMap = sectionTagMap(parent, parentSections)
+ val parentNamedParams = Map() +
+ ("@param" -> paramDocs(parent, "@param", parentSections)) +
+ ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) +
+ ("@throws" -> paramDocs(parent, "@throws", parentSections))
+
+ val out = new StringBuilder
+
+ def replaceInheritdoc(childSection: String, parentSection: => String) =
+ if (childSection.indexOf("@inheritdoc") == -1)
+ childSection
+ else
+ childSection.replaceAllLiterally("@inheritdoc", parentSection)
+
+ def getParentSection(section: (Int, Int)): String = {
+
+ def getSectionHeader = extractSectionTag(child, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section)
+ case other => other
+ }
+
+ def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String =
+ paramMap.get(param) match {
+ case Some(section) =>
+ // Cleanup the section tag and parameter
+ val sectionTextBounds = extractSectionText(parent, section)
+ cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
+ case None =>
+ dottydoc.println(s"""${sym.pos}: the """" + getSectionHeader + "\" annotation of the " + sym +
+ " comment contains @inheritdoc, but the corresponding section in the parent is not defined.")
+ "<invalid inheritdoc annotation>"
+ }
+
+ child.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") =>
+ sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
+ case _ =>
+ sectionString(extractSectionTag(child, section), parentTagMap)
+ }
+ }
+
+ def mainComment(str: String, sections: List[(Int, Int)]): String =
+ if (str.trim.length > 3)
+ str.trim.substring(3, startTag(str, sections))
+ else
+ ""
+
+ // Append main comment
+ out.append("/**")
+ out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections)))
+
+ // Append sections
+ for (section <- childSections)
+ out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section)))
+
+ out.append("*/")
+ out.toString
+ }
+
+ protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(implicit ctx: Context): String = {
+ val expandLimit = 10
+
+ def expandInternal(str: String, depth: Int): String = {
+ if (depth >= expandLimit)
+ throw new ExpansionLimitExceeded(str)
+
+ val out = new StringBuilder
+ var copied, idx = 0
+ // excluding variables written as \$foo so we can use them when
+ // necessary to document things like Symbol#decode
+ def isEscaped = idx > 0 && str.charAt(idx - 1) == '\\'
+ while (idx < str.length) {
+ if ((str charAt idx) != '$' || isEscaped)
+ idx += 1
+ else {
+ val vstart = idx
+ idx = skipVariable(str, idx + 1)
+ def replaceWith(repl: String) = {
+ out append str.substring(copied, vstart)
+ out append repl
+ copied = idx
+ }
+ variableName(str.substring(vstart + 1, idx)) match {
+ case "super" =>
+ superComment(sym) foreach { sc =>
+ val superSections = tagIndex(sc)
+ replaceWith(sc.substring(3, startTag(sc, superSections)))
+ for (sec @ (start, end) <- superSections)
+ if (!isMovable(sc, sec)) out append sc.substring(start, end)
+ }
+ case "" => idx += 1
+ case vname =>
+ lookupVariable(vname, site) match {
+ case Some(replacement) => replaceWith(replacement)
+ case None =>
+ dottydoc.println(s"Variable $vname undefined in comment for $sym in $site")
+ }
+ }
+ }
+ }
+ if (out.length == 0) str
+ else {
+ out append str.substring(copied)
+ expandInternal(out.toString, depth + 1)
+ }
+ }
+
+ // We suppressed expanding \$ throughout the recursion, and now we
+ // need to replace \$ with $ so it looks as intended.
+ expandInternal(initialStr, 0).replaceAllLiterally("""\$""", "$")
+ }
+
+ def defineVariables(sym: Symbol)(implicit ctx: Context) = {
+ val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r
+
+ val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("")
+ defs(sym) ++= defines(raw).map {
+ str => {
+ val start = skipWhitespace(str, "@define".length)
+ val (key, value) = str.splitAt(skipVariable(str, start))
+ key.drop(start) -> value
+ }
+ } map {
+ case (key, Trim(value)) =>
+ variableName(key) -> value.replaceAll("\\s+\\*+$", "")
+ }
+ }
+
+ /** Maps symbols to the variable -> replacement maps that are defined
+ * in their doc comments
+ */
+ private val defs = mutable.HashMap[Symbol, Map[String, String]]() withDefaultValue Map()
+
+ /** Lookup definition of variable.
+ *
+ * @param vble The variable for which a definition is searched
+ * @param site The class for which doc comments are generated
+ */
+ def lookupVariable(vble: String, site: Symbol)(implicit ctx: Context): Option[String] = site match {
+ case NoSymbol => None
+ case _ =>
+ val searchList =
+ if (site.flags.is(Flags.Module)) site :: site.info.baseClasses
+ else site.info.baseClasses
+
+ searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match {
+ case Some(str) if str startsWith "$" => lookupVariable(str.tail, site)
+ case res => res orElse lookupVariable(vble, site.owner)
+ }
+ }
+
+ /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing
+ * If a symbol does not have a doc comment but some overridden version of it does,
+ * the position of the doc comment of the overridden version is returned instead.
+ */
+ def docCommentPos(sym: Symbol)(implicit ctx: Context): Position =
+ ctx.docCtx.flatMap(_.docstring(sym).map(_.pos)).getOrElse(NoPosition)
+
+ /** A version which doesn't consider self types, as a temporary measure:
+ * an infinite loop has broken out between superComment and cookedDocComment
+ * since r23926.
+ */
+ private def allInheritedOverriddenSymbols(sym: Symbol)(implicit ctx: Context): List[Symbol] = {
+ if (!sym.owner.isClass) Nil
+ else sym.allOverriddenSymbols.toList.filter(_ != NoSymbol) //TODO: could also be `sym.owner.allOverrid..`
+ //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol)
+ }
+
+ class ExpansionLimitExceeded(str: String) extends Exception
+ }
+}
diff --git a/src/dotty/tools/dotc/core/Constants.scala b/src/dotty/tools/dotc/core/Constants.scala
index e13e07f58..1892e4bdc 100644
--- a/src/dotty/tools/dotc/core/Constants.scala
+++ b/src/dotty/tools/dotc/core/Constants.scala
@@ -167,12 +167,19 @@ object Constants {
/** Convert constant value to conform to given type.
*/
def convertTo(pt: Type)(implicit ctx: Context): Constant = {
- def lowerBound(pt: Type): Type = pt.dealias.stripTypeVar match {
- case tref: TypeRef if !tref.symbol.isClass => lowerBound(tref.info.bounds.lo)
- case param: PolyParam => lowerBound(ctx.typerState.constraint.nonParamBounds(param).lo)
+ def classBound(pt: Type): Type = pt.dealias.stripTypeVar match {
+ case tref: TypeRef if !tref.symbol.isClass => classBound(tref.info.bounds.lo)
+ case param: PolyParam =>
+ ctx.typerState.constraint.entry(param) match {
+ case TypeBounds(lo, hi) =>
+ if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound
+ else classBound(lo)
+ case NoType => classBound(param.binder.paramBounds(param.paramNum).lo)
+ case inst => classBound(inst)
+ }
case pt => pt
}
- val target = lowerBound(pt).typeSymbol
+ val target = classBound(pt).typeSymbol
if (target == tpe.typeSymbol)
this
else if ((target == defn.ByteClass) && isByteRange)
diff --git a/src/dotty/tools/dotc/core/Constraint.scala b/src/dotty/tools/dotc/core/Constraint.scala
index 19f93ce47..c99b748b7 100644
--- a/src/dotty/tools/dotc/core/Constraint.scala
+++ b/src/dotty/tools/dotc/core/Constraint.scala
@@ -8,7 +8,7 @@ import collection.mutable
import printing.{Printer, Showable}
import printing.Texts._
import config.Config
-import config.Printers._
+import config.Printers.constr
/** Constraint over undetermined type parameters. Constraints are built
* over values of the following types:
@@ -32,7 +32,7 @@ abstract class Constraint extends Showable {
def contains(tvar: TypeVar): Boolean
/** The constraint entry for given type parameter `param`, or NoType if `param` is not part of
- * the constraint domain.
+ * the constraint domain. Note: Low level, implementation dependent.
*/
def entry(param: PolyParam): Type
@@ -117,12 +117,11 @@ abstract class Constraint extends Showable {
*/
def narrowBound(param: PolyParam, bound: Type, isUpper: Boolean)(implicit ctx: Context): This
- /** Is entry associated with `pt` removable?
- * @param removedParam The index of a parameter which is still present in the
- * entry array, but is going to be removed at the same step,
- * or -1 if no such parameter exists.
+ /** Is entry associated with `pt` removable? This is the case if
+ * all type parameters of the entry are associated with type variables
+ * which have their `inst` fields set.
*/
- def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean
+ def isRemovable(pt: PolyType): Boolean
/** A new constraint with all entries coming from `pt` removed. */
def remove(pt: PolyType)(implicit ctx: Context): This
@@ -144,6 +143,9 @@ abstract class Constraint extends Showable {
/** The uninstantiated typevars of this constraint */
def uninstVars: collection.Seq[TypeVar]
+ /** The weakest constraint that subsumes both this constraint and `other` */
+ def & (other: Constraint)(implicit ctx: Context): Constraint
+
/** Check that no constrained parameter contains itself as a bound */
def checkNonCyclic()(implicit ctx: Context): Unit
diff --git a/src/dotty/tools/dotc/core/ConstraintHandling.scala b/src/dotty/tools/dotc/core/ConstraintHandling.scala
index f8eae186a..3835d553c 100644
--- a/src/dotty/tools/dotc/core/ConstraintHandling.scala
+++ b/src/dotty/tools/dotc/core/ConstraintHandling.scala
@@ -5,7 +5,8 @@ package core
import Types._, Contexts._, Symbols._
import Decorators._
import config.Config
-import config.Printers._
+import config.Printers.{constr, typr}
+import TypeApplications.EtaExpansion
import collection.mutable
/** Methods for adding constraints and solving them.
@@ -34,6 +35,20 @@ trait ConstraintHandling {
/** If the constraint is frozen we cannot add new bounds to the constraint. */
protected var frozenConstraint = false
+ protected var alwaysFluid = false
+
+ /** Perform `op` in a mode where all attempts to set `frozen` to true are ignored */
+ def fluidly[T](op: => T): T = {
+ val saved = alwaysFluid
+ alwaysFluid = true
+ try op finally alwaysFluid = saved
+ }
+
+ /** We are currently comparing polytypes. Used as a flag for
+ * optimization: when `false`, no need to do an expensive `pruneLambdaParams`
+ */
+ protected var comparedPolyTypes: Set[PolyType] = Set.empty
+
private def addOneBound(param: PolyParam, bound: Type, isUpper: Boolean): Boolean =
!constraint.contains(param) || {
def occursIn(bound: Type): Boolean = {
@@ -120,14 +135,14 @@ trait ConstraintHandling {
final def isSubTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = {
val saved = frozenConstraint
- frozenConstraint = true
+ frozenConstraint = !alwaysFluid
try isSubType(tp1, tp2)
finally frozenConstraint = saved
}
final def isSameTypeWhenFrozen(tp1: Type, tp2: Type): Boolean = {
val saved = frozenConstraint
- frozenConstraint = true
+ frozenConstraint = !alwaysFluid
try isSameType(tp1, tp2)
finally frozenConstraint = saved
}
@@ -163,12 +178,64 @@ trait ConstraintHandling {
}
}
}
+ assert(constraint.contains(param))
val bound = if (fromBelow) constraint.fullLowerBound(param) else constraint.fullUpperBound(param)
val inst = avoidParam(bound)
typr.println(s"approx ${param.show}, from below = $fromBelow, bound = ${bound.show}, inst = ${inst.show}")
inst
}
+ /** The instance type of `param` in the current constraint (which contains `param`).
+ * If `fromBelow` is true, the instance type is the lub of the parameter's
+ * lower bounds; otherwise it is the glb of its upper bounds. However,
+ * a lower bound instantiation can be a singleton type only if the upper bound
+ * is also a singleton type.
+ */
+ def instanceType(param: PolyParam, fromBelow: Boolean): Type = {
+ def upperBound = constraint.fullUpperBound(param)
+ def isSingleton(tp: Type): Boolean = tp match {
+ case tp: SingletonType => true
+ case AndType(tp1, tp2) => isSingleton(tp1) | isSingleton(tp2)
+ case OrType(tp1, tp2) => isSingleton(tp1) & isSingleton(tp2)
+ case _ => false
+ }
+ def isFullyDefined(tp: Type): Boolean = tp match {
+ case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt)
+ case tp: TypeProxy => isFullyDefined(tp.underlying)
+ case tp: AndOrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2)
+ case _ => true
+ }
+ def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match {
+ case tp: OrType => true
+ case tp: RefinedOrRecType => isOrType(tp.parent)
+ case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2)
+ case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi)
+ case _ => false
+ }
+
+ // First, solve the constraint.
+ var inst = approximation(param, fromBelow)
+
+ // Then, approximate by (1.) - (3.) and simplify as follows.
+ // 1. If instance is from below and is a singleton type, yet
+ // upper bound is not a singleton type, widen the instance.
+ if (fromBelow && isSingleton(inst) && !isSingleton(upperBound))
+ inst = inst.widen
+
+ inst = inst.simplified
+
+ // 2. If instance is from below and is a fully-defined union type, yet upper bound
+ // is not a union type, approximate the union type from above by an intersection
+ // of all common base types.
+ if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound))
+ inst = ctx.harmonizeUnion(inst)
+
+ // 3. If instance is from below, and upper bound has open named parameters
+ // make sure the instance has all named parameters of the bound.
+ if (fromBelow) inst = inst.widenToNamedTypeParams(param.namedTypeParams)
+ inst
+ }
+
/** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have
* for all poly params `p` defined in `c2` as `p >: L2 <: U2`:
*
@@ -193,9 +260,9 @@ trait ConstraintHandling {
}
/** The current bounds of type parameter `param` */
- final def bounds(param: PolyParam): TypeBounds = constraint.entry(param) match {
- case bounds: TypeBounds => bounds
- case _ => param.binder.paramBounds(param.paramNum)
+ final def bounds(param: PolyParam): TypeBounds = {
+ val e = constraint.entry(param)
+ if (e.exists) e.bounds else param.binder.paramBounds(param.paramNum)
}
/** Add polytype `pt`, possibly with type variables `tvars`, to current constraint
@@ -236,6 +303,36 @@ trait ConstraintHandling {
checkPropagated(s"added $description") {
addConstraintInvocations += 1
+ /** When comparing lambdas we might get constraints such as
+ * `A <: X0` or `A = List[X0]` where `A` is a constrained parameter
+ * and `X0` is a lambda parameter. The constraint for `A` is not allowed
+ * to refer to such a lambda parameter because the lambda parameter is
+ * not visible where `A` is defined. Consequently, we need to
+ * approximate the bound so that the lambda parameter does not appear in it.
+ * If `tp` is an upper bound, we need to approximate with something smaller,
+ * otherwise something larger.
+ * Test case in pos/i94-nada.scala. This test crashes with an illegal instance
+ * error in Test2 when the rest of the SI-2712 fix is applied but `pruneLambdaParams` is
+ * missing.
+ */
+ def pruneLambdaParams(tp: Type) =
+ if (comparedPolyTypes.nonEmpty) {
+ val approx = new ApproximatingTypeMap {
+ def apply(t: Type): Type = t match {
+ case t @ PolyParam(pt: PolyType, n) if comparedPolyTypes contains pt =>
+ val effectiveVariance = if (fromBelow) -variance else variance
+ val bounds = pt.paramBounds(n)
+ if (effectiveVariance > 0) bounds.lo
+ else if (effectiveVariance < 0) bounds.hi
+ else NoType
+ case _ =>
+ mapOver(t)
+ }
+ }
+ approx(tp)
+ }
+ else tp
+
def addParamBound(bound: PolyParam) =
if (fromBelow) addLess(bound, param) else addLess(param, bound)
@@ -281,12 +378,18 @@ trait ConstraintHandling {
else NoType
case bound: TypeVar if constraint contains bound.origin =>
prune(bound.underlying)
- case bound: PolyParam if constraint contains bound =>
- if (!addParamBound(bound)) NoType
- else if (fromBelow) defn.NothingType
- else defn.AnyType
+ case bound: PolyParam =>
+ constraint.entry(bound) match {
+ case NoType => pruneLambdaParams(bound)
+ case _: TypeBounds =>
+ if (!addParamBound(bound)) NoType
+ else if (fromBelow) defn.NothingType
+ else defn.AnyType
+ case inst =>
+ prune(inst)
+ }
case _ =>
- bound
+ pruneLambdaParams(bound)
}
try bound match {
diff --git a/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
index 4b7e22653..e0f659cc6 100644
--- a/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
+++ b/src/dotty/tools/dotc/core/ConstraintRunInfo.scala
@@ -1,7 +1,8 @@
package dotty.tools.dotc
package core
-import Contexts._, config.Printers._
+import Contexts._
+import config.Printers.typr
trait ConstraintRunInfo { self: RunInfo =>
private var maxSize = 0
diff --git a/src/dotty/tools/dotc/core/Contexts.scala b/src/dotty/tools/dotc/core/Contexts.scala
index 2fc958a49..edc68588d 100644
--- a/src/dotty/tools/dotc/core/Contexts.scala
+++ b/src/dotty/tools/dotc/core/Contexts.scala
@@ -13,12 +13,13 @@ import Scopes._
import NameOps._
import Uniques._
import SymDenotations._
+import Comments._
import Flags.ParamAccessor
import util.Positions._
import ast.Trees._
import ast.untpd
import util.{FreshNameCreator, SimpleMap, SourceFile, NoSource}
-import typer._
+import typer.{Implicits, ImplicitRunInfo, ImportInfo, Inliner, NamerContextOps, SearchHistory, TypeAssigner, Typer}
import Implicits.ContextualImplicits
import config.Settings._
import config.Config
@@ -29,6 +30,8 @@ import printing._
import config.{Settings, ScalaSettings, Platform, JavaPlatform, SJSPlatform}
import language.implicitConversions
import DenotTransformers.DenotTransformer
+import util.Property.Key
+import xsbti.AnalysisCallback
object Contexts {
@@ -84,6 +87,12 @@ object Contexts {
_compilerCallback = callback
def compilerCallback: CompilerCallback = _compilerCallback
+ /** The sbt callback implementation if we are run from sbt, null otherwise */
+ private[this] var _sbtCallback: AnalysisCallback = _
+ protected def sbtCallback_=(callback: AnalysisCallback) =
+ _sbtCallback = callback
+ def sbtCallback: AnalysisCallback = _sbtCallback
+
/** The current context */
private[this] var _period: Period = _
protected def period_=(period: Period) = {
@@ -123,7 +132,7 @@ object Contexts {
def compilationUnit: CompilationUnit = _compilationUnit
/** The current tree */
- private[this] var _tree: Tree[_ >: Untyped] = _
+ private[this] var _tree: Tree[_ >: Untyped]= _
protected def tree_=(tree: Tree[_ >: Untyped]) = _tree = tree
def tree: Tree[_ >: Untyped] = _tree
@@ -169,9 +178,12 @@ object Contexts {
def freshName(prefix: Name): String = freshName(prefix.toString)
/** A map in which more contextual properties can be stored */
- private var _moreProperties: Map[String, Any] = _
- protected def moreProperties_=(moreProperties: Map[String, Any]) = _moreProperties = moreProperties
- def moreProperties: Map[String, Any] = _moreProperties
+ private var _moreProperties: Map[Key[Any], Any] = _
+ protected def moreProperties_=(moreProperties: Map[Key[Any], Any]) = _moreProperties = moreProperties
+ def moreProperties: Map[Key[Any], Any] = _moreProperties
+
+ def property[T](key: Key[T]): Option[T] =
+ moreProperties.get(key).asInstanceOf[Option[T]]
private var _typeComparer: TypeComparer = _
protected def typeComparer_=(typeComparer: TypeComparer) = _typeComparer = typeComparer
@@ -248,7 +260,7 @@ object Contexts {
withPhase(phase.id)
final def withPhaseNoLater(phase: Phase) =
- if (ctx.phase.id > phase.id) withPhase(phase) else ctx
+ if (phase.exists && ctx.phase.id > phase.id) withPhase(phase) else ctx
/** If -Ydebug is on, the top of the stack trace where this context
* was created, otherwise `null`.
@@ -336,13 +348,17 @@ object Contexts {
def thisCallArgContext: Context = {
assert(owner.isClassConstructor)
val constrCtx = outersIterator.dropWhile(_.outer.owner == owner).next
- superOrThisCallContext(owner, constrCtx.scope).setTyperState(typerState)
+ superOrThisCallContext(owner, constrCtx.scope)
+ .setTyperState(typerState)
+ .setGadt(gadt)
}
- /** The super= or this-call context with given owner and locals. */
+ /** The super- or this-call context with given owner and locals. */
private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = {
var classCtx = outersIterator.dropWhile(!_.isClassDefContext).next
- classCtx.outer.fresh.setOwner(owner).setScope(locals).setMode(classCtx.mode | Mode.InSuperCall)
+ classCtx.outer.fresh.setOwner(owner)
+ .setScope(locals)
+ .setMode(classCtx.mode | Mode.InSuperCall)
}
/** The context of expression `expr` seen as a member of a statement sequence */
@@ -366,6 +382,10 @@ object Contexts {
/** Is the verbose option set? */
def verbose: Boolean = base.settings.verbose.value
+ /** Should use colors when printing? */
+ def useColors: Boolean =
+ base.settings.color.value == "always"
+
/** A condensed context containing essential information of this but
* no outer contexts except the initial context.
private var _condensed: CondensedContext = null
@@ -422,6 +442,7 @@ object Contexts {
def setPeriod(period: Period): this.type = { this.period = period; this }
def setMode(mode: Mode): this.type = { this.mode = mode; this }
def setCompilerCallback(callback: CompilerCallback): this.type = { this.compilerCallback = callback; this }
+ def setSbtCallback(callback: AnalysisCallback): this.type = { this.sbtCallback = callback; this }
def setTyperState(typerState: TyperState): this.type = { this.typerState = typerState; this }
def setReporter(reporter: Reporter): this.type = setTyperState(typerState.withReporter(reporter))
def setNewTyperState: this.type = setTyperState(typerState.fresh(isCommittable = true))
@@ -438,12 +459,14 @@ object Contexts {
def setImportInfo(importInfo: ImportInfo): this.type = { this.importInfo = importInfo; this }
def setRunInfo(runInfo: RunInfo): this.type = { this.runInfo = runInfo; this }
def setDiagnostics(diagnostics: Option[StringBuilder]): this.type = { this.diagnostics = diagnostics; this }
+ def setGadt(gadt: GADTMap): this.type = { this.gadt = gadt; this }
def setTypeComparerFn(tcfn: Context => TypeComparer): this.type = { this.typeComparer = tcfn(this); this }
def setSearchHistory(searchHistory: SearchHistory): this.type = { this.searchHistory = searchHistory; this }
def setFreshNames(freshNames: FreshNameCreator): this.type = { this.freshNames = freshNames; this }
- def setMoreProperties(moreProperties: Map[String, Any]): this.type = { this.moreProperties = moreProperties; this }
+ def setMoreProperties(moreProperties: Map[Key[Any], Any]): this.type = { this.moreProperties = moreProperties; this }
- def setProperty(prop: (String, Any)): this.type = setMoreProperties(moreProperties + prop)
+ def setProperty[T](key: Key[T], value: T): this.type =
+ setMoreProperties(moreProperties.updated(key, value))
def setPhase(pid: PhaseId): this.type = setPeriod(Period(runId, pid))
def setPhase(phase: Phase): this.type = setPeriod(Period(runId, phase.start, phase.end))
@@ -544,7 +567,7 @@ object Contexts {
*/
def initialize()(implicit ctx: Context): Unit = {
_platform = newPlatform
- definitions.init
+ definitions.init()
}
def squashed(p: Phase): Phase = {
diff --git a/src/dotty/tools/dotc/core/Decorators.scala b/src/dotty/tools/dotc/core/Decorators.scala
index 60c019bce..b0f1f0c98 100644
--- a/src/dotty/tools/dotc/core/Decorators.scala
+++ b/src/dotty/tools/dotc/core/Decorators.scala
@@ -7,8 +7,9 @@ import Contexts._, Names._, Phases._, printing.Texts._, printing.Printer, printi
import util.Positions.Position, util.SourcePosition
import collection.mutable.ListBuffer
import dotty.tools.dotc.transform.TreeTransforms._
-import typer.Mode
+import ast.tpd._
import scala.language.implicitConversions
+import printing.Formatting._
/** This object provides useful implicit decorators for types defined elsewhere */
object Decorators {
@@ -40,7 +41,7 @@ object Decorators {
*/
implicit class ListDecorator[T](val xs: List[T]) extends AnyVal {
- @inline final def mapconserve[U](f: T => U): List[U] = {
+ final def mapconserve[U](f: T => U): List[U] = {
@tailrec
def loop(mapped: ListBuffer[U], unchanged: List[U], pending: List[T]): List[U] =
if (pending.isEmpty) {
@@ -148,75 +149,37 @@ object Decorators {
}
}
- implicit def sourcePos(pos: Position)(implicit ctx: Context): SourcePosition =
- ctx.source.atPos(pos)
+ implicit def sourcePos(pos: Position)(implicit ctx: Context): SourcePosition = {
+ def recur(inlinedCalls: List[Tree], pos: Position): SourcePosition = inlinedCalls match {
+ case inlinedCall :: rest =>
+ sourceFile(inlinedCall).atPos(pos).withOuter(recur(rest, inlinedCall.pos))
+ case empty =>
+ ctx.source.atPos(pos)
+ }
+ recur(enclosingInlineds, pos)
+ }
- /** The i"..." string interpolator adds two features to the s interpolator:
- * 1) On all Showables, `show` is called instead of `toString`
- * 2) Lists can be formatted using the desired separator between two `%` signs,
- * eg `i"myList = (${myList}%, %)"`
- */
implicit class StringInterpolators(val sc: StringContext) extends AnyVal {
- def i(args: Any*)(implicit ctx: Context): String = {
-
- def treatArg(arg: Any, suffix: String): (Any, String) = arg match {
- case arg: Seq[_] if suffix.nonEmpty && suffix.head == '%' =>
- val (rawsep, rest) = suffix.tail.span(_ != '%')
- val sep = StringContext.treatEscapes(rawsep)
- if (rest.nonEmpty) (arg map treatSingleArg mkString sep, rest.tail)
- else (arg, suffix)
- case _ =>
- (treatSingleArg(arg), suffix)
- }
-
- def treatSingleArg(arg: Any) : Any =
- try
- arg match {
- case arg: Showable => arg.show(ctx.addMode(Mode.FutureDefsOK))
- case _ => arg
- }
- catch {
- case ex: Exception => throw ex // s"(missing due to $ex)"
- }
+ /** General purpose string formatting */
+ def i(args: Any*)(implicit ctx: Context): String =
+ new StringFormatter(sc).assemble(args)
- val prefix :: suffixes = sc.parts.toList
- val (args1, suffixes1) = (args, suffixes).zipped.map(treatArg(_, _)).unzip
- new StringContext(prefix :: suffixes1.toList: _*).s(args1: _*)
- }
+ /** Formatting for error messages: Like `i` but suppress follow-on
+ * error messages after the first one if some of their arguments are "non-sensical".
+ */
+ def em(args: Any*)(implicit ctx: Context): String =
+ new ErrorMessageFormatter(sc).assemble(args)
- /** Lifted from scala.reflect.internal.util
- * A safe combination of [[scala.collection.immutable.StringLike#stripMargin]]
- * and [[scala.StringContext#raw]].
- *
- * The margin of each line is defined by whitespace leading up to a '|' character.
- * This margin is stripped '''before''' the arguments are interpolated into to string.
- *
- * String escape sequences are '''not''' processed; this interpolater is designed to
- * be used with triple quoted Strings.
- *
- * {{{
- * scala> val foo = "f|o|o"
- * foo: String = f|o|o
- * scala> sm"""|${foo}
- * |"""
- * res0: String =
- * "f|o|o
- * "
- * }}}
+ /** Formatting with added explanations: Like `em`, but add explanations to
+ * give more info about type variables and to disambiguate where needed.
*/
- final def sm(args: Any*): String = {
- def isLineBreak(c: Char) = c == '\n' || c == '\f' // compatible with StringLike#isLineBreak
- def stripTrailingPart(s: String) = {
- val (pre, post) = s.span(c => !isLineBreak(c))
- pre + post.stripMargin
- }
- val stripped: List[String] = sc.parts.toList match {
- case head :: tail => head.stripMargin :: (tail map stripTrailingPart)
- case Nil => Nil
- }
- new StringContext(stripped: _*).raw(args: _*)
- }
+ def ex(args: Any*)(implicit ctx: Context): String =
+ explained2(implicit ctx => em(args: _*))
+
+ /** Formatter that adds syntax highlighting to all interpolated values */
+ def hl(args: Any*)(implicit ctx: Context): String =
+ new SyntaxFormatter(sc).assemble(args)
}
}
diff --git a/src/dotty/tools/dotc/core/Definitions.scala b/src/dotty/tools/dotc/core/Definitions.scala
index 6f8a8f837..50746c61d 100644
--- a/src/dotty/tools/dotc/core/Definitions.scala
+++ b/src/dotty/tools/dotc/core/Definitions.scala
@@ -12,7 +12,13 @@ import collection.mutable
import scala.reflect.api.{ Universe => ApiUniverse }
object Definitions {
- val MaxFunctionArity, MaxTupleArity = 22
+ val MaxTupleArity, MaxAbstractFunctionArity = 22
+ val MaxFunctionArity = 30
+ // Awaiting a definite solution that drops the limit altogether, 30 gives a safety
+ // margin over the previous 22, so that treecopiers in miniphases are allowed to
+ // temporarily create larger closures. This is needed in lambda lift where large closures
+ // are first formed by treecopiers before they are split apart into parameters and
+ // environment in the lambdalift transform itself.
}
/** A class defining symbols and types of standard definitions
@@ -167,7 +173,7 @@ class Definitions {
lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, MethodType(Nil, IntType))
lazy val Any_toString = newMethod(AnyClass, nme.toString_, MethodType(Nil, StringType))
lazy val Any_## = newMethod(AnyClass, nme.HASHHASH, ExprType(IntType), Final)
- lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef), Final)
+ lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, MethodType(Nil, ClassClass.typeRef.appliedTo(TypeBounds.empty)), Final)
lazy val Any_isInstanceOf = newT1ParameterlessMethod(AnyClass, nme.isInstanceOf_, _ => BooleanType, Final)
lazy val Any_asInstanceOf = newT1ParameterlessMethod(AnyClass, nme.asInstanceOf_, PolyParam(_, 0), Final)
@@ -244,10 +250,13 @@ class Definitions {
lazy val DottyPredefModuleRef = ctx.requiredModuleRef("dotty.DottyPredef")
def DottyPredefModule(implicit ctx: Context) = DottyPredefModuleRef.symbol
+
+ def Predef_eqAny(implicit ctx: Context) = DottyPredefModule.requiredMethod(nme.eqAny)
+
lazy val DottyArraysModuleRef = ctx.requiredModuleRef("dotty.runtime.Arrays")
def DottyArraysModule(implicit ctx: Context) = DottyArraysModuleRef.symbol
-
- def newRefArrayMethod(implicit ctx: Context) = DottyArraysModule.requiredMethod("newRefArray")
+ def newGenericArrayMethod(implicit ctx: Context) = DottyArraysModule.requiredMethod("newGenericArray")
+ def newArrayMethod(implicit ctx: Context) = DottyArraysModule.requiredMethod("newArray")
lazy val NilModuleRef = ctx.requiredModuleRef("scala.collection.immutable.Nil")
def NilModule(implicit ctx: Context) = NilModuleRef.symbol
@@ -279,6 +288,9 @@ class Definitions {
def Array_clone(implicit ctx: Context) = Array_cloneR.symbol
lazy val ArrayConstructorR = ArrayClass.requiredMethodRef(nme.CONSTRUCTOR)
def ArrayConstructor(implicit ctx: Context) = ArrayConstructorR.symbol
+ lazy val ArrayModuleType = ctx.requiredModuleRef("scala.Array")
+ def ArrayModule(implicit ctx: Context) = ArrayModuleType.symbol.moduleClass.asClass
+
lazy val UnitType: TypeRef = valueTypeRef("scala.Unit", BoxedUnitType, java.lang.Void.TYPE, UnitEnc)
def UnitClass(implicit ctx: Context) = UnitType.symbol.asClass
@@ -418,13 +430,17 @@ class Definitions {
def Product_productArity(implicit ctx: Context) = Product_productArityR.symbol
lazy val Product_productPrefixR = ProductClass.requiredMethodRef(nme.productPrefix)
def Product_productPrefix(implicit ctx: Context) = Product_productPrefixR.symbol
- lazy val LanguageModuleRef = ctx.requiredModule("dotty.language")
+ lazy val LanguageModuleRef = ctx.requiredModule("scala.language")
def LanguageModuleClass(implicit ctx: Context) = LanguageModuleRef.symbol.moduleClass.asClass
lazy val NonLocalReturnControlType: TypeRef = ctx.requiredClassRef("scala.runtime.NonLocalReturnControl")
+
lazy val ClassTagType = ctx.requiredClassRef("scala.reflect.ClassTag")
def ClassTagClass(implicit ctx: Context) = ClassTagType.symbol.asClass
def ClassTagModule(implicit ctx: Context) = ClassTagClass.companionModule
+ lazy val EqType = ctx.requiredClassRef("scala.Eq")
+ def EqClass(implicit ctx: Context) = EqType.symbol.asClass
+
// Annotation base classes
lazy val AnnotationType = ctx.requiredClassRef("scala.annotation.Annotation")
def AnnotationClass(implicit ctx: Context) = AnnotationType.symbol.asClass
@@ -438,6 +454,8 @@ class Definitions {
def AliasAnnot(implicit ctx: Context) = AliasAnnotType.symbol.asClass
lazy val AnnotationDefaultAnnotType = ctx.requiredClassRef("dotty.annotation.internal.AnnotationDefault")
def AnnotationDefaultAnnot(implicit ctx: Context) = AnnotationDefaultAnnotType.symbol.asClass
+ lazy val BodyAnnotType = ctx.requiredClassRef("dotty.annotation.internal.Body")
+ def BodyAnnot(implicit ctx: Context) = BodyAnnotType.symbol.asClass
lazy val ChildAnnotType = ctx.requiredClassRef("dotty.annotation.internal.Child")
def ChildAnnot(implicit ctx: Context) = ChildAnnotType.symbol.asClass
lazy val CovariantBetweenAnnotType = ctx.requiredClassRef("dotty.annotation.internal.CovariantBetween")
@@ -446,6 +464,12 @@ class Definitions {
def ContravariantBetweenAnnot(implicit ctx: Context) = ContravariantBetweenAnnotType.symbol.asClass
lazy val DeprecatedAnnotType = ctx.requiredClassRef("scala.deprecated")
def DeprecatedAnnot(implicit ctx: Context) = DeprecatedAnnotType.symbol.asClass
+ lazy val ImplicitNotFoundAnnotType = ctx.requiredClassRef("scala.annotation.implicitNotFound")
+ def ImplicitNotFoundAnnot(implicit ctx: Context) = ImplicitNotFoundAnnotType.symbol.asClass
+ lazy val InlineAnnotType = ctx.requiredClassRef("scala.inline")
+ def InlineAnnot(implicit ctx: Context) = InlineAnnotType.symbol.asClass
+ lazy val InlineParamAnnotType = ctx.requiredClassRef("dotty.annotation.internal.InlineParam")
+ def InlineParamAnnot(implicit ctx: Context) = InlineParamAnnotType.symbol.asClass
lazy val InvariantBetweenAnnotType = ctx.requiredClassRef("dotty.annotation.internal.InvariantBetween")
def InvariantBetweenAnnot(implicit ctx: Context) = InvariantBetweenAnnotType.symbol.asClass
lazy val MigrationAnnotType = ctx.requiredClassRef("scala.annotation.migration")
@@ -456,6 +480,8 @@ class Definitions {
def RemoteAnnot(implicit ctx: Context) = RemoteAnnotType.symbol.asClass
lazy val RepeatedAnnotType = ctx.requiredClassRef("dotty.annotation.internal.Repeated")
def RepeatedAnnot(implicit ctx: Context) = RepeatedAnnotType.symbol.asClass
+ lazy val SourceFileAnnotType = ctx.requiredClassRef("dotty.annotation.internal.SourceFile")
+ def SourceFileAnnot(implicit ctx: Context) = SourceFileAnnotType.symbol.asClass
lazy val ScalaSignatureAnnotType = ctx.requiredClassRef("scala.reflect.ScalaSignature")
def ScalaSignatureAnnot(implicit ctx: Context) = ScalaSignatureAnnotType.symbol.asClass
lazy val ScalaLongSignatureAnnotType = ctx.requiredClassRef("scala.reflect.ScalaLongSignature")
@@ -472,6 +498,8 @@ class Definitions {
def TASTYLongSignatureAnnot(implicit ctx: Context) = TASTYLongSignatureAnnotType.symbol.asClass
lazy val TailrecAnnotType = ctx.requiredClassRef("scala.annotation.tailrec")
def TailrecAnnot(implicit ctx: Context) = TailrecAnnotType.symbol.asClass
+ lazy val SwitchAnnotType = ctx.requiredClassRef("scala.annotation.switch")
+ def SwitchAnnot(implicit ctx: Context) = SwitchAnnotType.symbol.asClass
lazy val ThrowsAnnotType = ctx.requiredClassRef("scala.throws")
def ThrowsAnnot(implicit ctx: Context) = ThrowsAnnotType.symbol.asClass
lazy val TransientAnnotType = ctx.requiredClassRef("scala.transient")
@@ -569,7 +597,7 @@ class Definitions {
// ----- Symbol sets ---------------------------------------------------
- lazy val AbstractFunctionType = mkArityArray("scala.runtime.AbstractFunction", MaxFunctionArity, 0)
+ lazy val AbstractFunctionType = mkArityArray("scala.runtime.AbstractFunction", MaxAbstractFunctionArity, 0)
val AbstractFunctionClassPerRun = new PerRun[Array[Symbol]](implicit ctx => AbstractFunctionType.map(_.symbol.asClass))
def AbstractFunctionClass(n: Int)(implicit ctx: Context) = AbstractFunctionClassPerRun()(ctx)(n)
lazy val FunctionType = mkArityArray("scala.Function", MaxFunctionArity, 0)
@@ -598,29 +626,44 @@ class Definitions {
}
def isBottomClass(cls: Symbol) = cls == NothingClass || cls == NullClass
- def isBottomType(tp: Type) = tp.derivesFrom(NothingClass) || tp.derivesFrom(NullClass)
+ def isBottomType(tp: Type) = {
+ def test(implicit ctx: Context) = tp.derivesFrom(NothingClass) || tp.derivesFrom(NullClass)
+ try test
+ catch { // See remark in SymDenotations#accessWithin
+ case ex: NotDefinedHere => test(ctx.addMode(Mode.FutureDefsOK))
+ }
+ }
def isFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.Function)
def isAbstractFunctionClass(cls: Symbol) = isVarArityClass(cls, tpnme.AbstractFunction)
def isTupleClass(cls: Symbol) = isVarArityClass(cls, tpnme.Tuple)
def isProductClass(cls: Symbol) = isVarArityClass(cls, tpnme.Product)
- val RootImportFns = List[() => TermRef](
- () => JavaLangPackageVal.termRef,
- () => ScalaPackageVal.termRef,
+ val StaticRootImportFns = List[() => TermRef](
+ () => JavaLangPackageVal.termRef,
+ () => ScalaPackageVal.termRef
+ )
+
+ val PredefImportFns = List[() => TermRef](
() => ScalaPredefModuleRef,
- () => DottyPredefModuleRef)
+ () => DottyPredefModuleRef
+ )
+
+ lazy val RootImportFns =
+ if (ctx.settings.YnoImports.value) List.empty[() => TermRef]
+ else if (ctx.settings.YnoPredef.value) StaticRootImportFns
+ else StaticRootImportFns ++ PredefImportFns
lazy val RootImportTypes = RootImportFns.map(_())
- /** `Modules whose members are in the default namespace and their module classes */
+ /** Modules whose members are in the default namespace and their module classes */
lazy val UnqualifiedOwnerTypes: Set[NamedType] =
RootImportTypes.toSet[NamedType] ++ RootImportTypes.map(_.symbol.moduleClass.typeRef)
lazy val PhantomClasses = Set[Symbol](AnyClass, AnyValClass, NullClass, NothingClass)
def isPolymorphicAfterErasure(sym: Symbol) =
- (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf) || (sym eq newRefArrayMethod)
+ (sym eq Any_isInstanceOf) || (sym eq Any_asInstanceOf)
def isTupleType(tp: Type)(implicit ctx: Context) = {
val arity = tp.dealias.argInfos.length
@@ -641,71 +684,6 @@ class Definitions {
def functionArity(tp: Type)(implicit ctx: Context) = tp.dealias.argInfos.length - 1
- // ----- LambdaXYZ traits ------------------------------------------
-
- private var myLambdaTraits: Set[Symbol] = Set()
-
- /** The set of HigherKindedXYZ traits encountered so far */
- def lambdaTraits: Set[Symbol] = myLambdaTraits
-
- private var LambdaTraitForVariances = mutable.Map[List[Int], ClassSymbol]()
-
- /** The HigherKinded trait corresponding to symbols `boundSyms` (which are assumed
- * to be the type parameters of a higher-kided type). This is a class symbol that
- * would be generated by the following schema.
- *
- * trait LambdaXYZ extends Object with P1 with ... with Pn {
- * type v_1 hk$0; ...; type v_N hk$N;
- * type +$Apply
- * }
- *
- * Here:
- *
- * - v_i are the variances of the bound symbols (i.e. +, -, or empty).
- * - XYZ is a string of length N with one letter for each variant of a bound symbol,
- * using `P` (positive variance), `N` (negative variance), `I` (invariant).
- * - for each positive or negative variance v_i there is a parent trait Pj which
- * is the same as LambdaXYZ except that it has `I` in i-th position.
- */
- def LambdaTrait(vcs: List[Int]): ClassSymbol = {
- assert(vcs.nonEmpty)
-
- def varianceFlags(v: Int) = v match {
- case -1 => Contravariant
- case 0 => EmptyFlags
- case 1 => Covariant
- }
-
- val completer = new LazyType {
- def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
- val cls = denot.asClass.classSymbol
- val paramDecls = newScope
- for (i <- 0 until vcs.length)
- newTypeParam(cls, tpnme.hkArg(i), varianceFlags(vcs(i)), paramDecls)
- newTypeField(cls, tpnme.hkApply, Covariant, paramDecls)
- val parentTraitRefs =
- for (i <- 0 until vcs.length if vcs(i) != 0)
- yield LambdaTrait(vcs.updated(i, 0)).typeRef
- denot.info = ClassInfo(
- ScalaPackageClass.thisType, cls, ObjectClass.typeRef :: parentTraitRefs.toList, paramDecls)
- }
- }
-
- val traitName = tpnme.hkLambda(vcs)
-
- def createTrait = {
- val cls = newClassSymbol(
- ScalaPackageClass,
- traitName,
- PureInterfaceCreationFlags | Synthetic,
- completer)
- myLambdaTraits += cls
- cls
- }
-
- LambdaTraitForVariances.getOrElseUpdate(vcs, createTrait)
- }
-
// ----- primitive value class machinery ------------------------------------------
/** This class would also be obviated by the implicit function type design */
@@ -798,7 +776,7 @@ class Definitions {
private[this] var _isInitialized = false
private def isInitialized = _isInitialized
- def init(implicit ctx: Context) = {
+ def init()(implicit ctx: Context) = {
this.ctx = ctx
if (!_isInitialized) {
// force initialization of every symbol that is synthesized or hijacked by the compiler
diff --git a/src/dotty/tools/dotc/core/Denotations.scala b/src/dotty/tools/dotc/core/Denotations.scala
index b52c11201..7866d6697 100644
--- a/src/dotty/tools/dotc/core/Denotations.scala
+++ b/src/dotty/tools/dotc/core/Denotations.scala
@@ -6,6 +6,7 @@ import SymDenotations.{ SymDenotation, ClassDenotation, NoDenotation, NotDefined
import Contexts.{Context, ContextBase}
import Names.{Name, PreName}
import Names.TypeName
+import StdNames._
import Symbols.NoSymbol
import Symbols._
import Types._
@@ -18,7 +19,6 @@ import printing.Texts._
import printing.Printer
import io.AbstractFile
import config.Config
-import typer.Mode
import util.common._
import collection.mutable.ListBuffer
import Decorators.SymbolIteratorDecorator
@@ -71,6 +71,8 @@ import Decorators.SymbolIteratorDecorator
*/
object Denotations {
+ implicit def eqDenotation: Eq[Denotation, Denotation] = Eq
+
/** A denotation is the result of resolving
* a name (either simple identifier or select) during a given period.
*
@@ -123,11 +125,11 @@ object Denotations {
/** The signature of the denotation. */
def signature(implicit ctx: Context): Signature
- /** Resolve overloaded denotation to pick the one with the given signature
+ /** Resolve overloaded denotation to pick the ones with the given signature
* when seen from prefix `site`.
* @param relaxed When true, consider only parameter signatures for a match.
*/
- def atSignature(sig: Signature, site: Type = NoPrefix, relaxed: Boolean = false)(implicit ctx: Context): SingleDenotation
+ def atSignature(sig: Signature, site: Type = NoPrefix, relaxed: Boolean = false)(implicit ctx: Context): Denotation
/** The variant of this denotation that's current in the given context, or
* `NotDefinedHereDenotation` if this denotation does not exist at current phase, but
@@ -145,6 +147,9 @@ object Denotations {
/** Is this denotation different from NoDenotation or an ErrorDenotation? */
def exists: Boolean = true
+ /** A denotation with the info of this denotation transformed using `f` */
+ def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation
+
/** If this denotation does not exist, fallback to alternative */
final def orElse(that: => Denotation) = if (this.exists) this else that
@@ -158,7 +163,10 @@ object Denotations {
* or NoDenotation if no satisfying alternative exists.
* @throws TypeError if there is at more than one alternative that satisfies `p`.
*/
- def suchThat(p: Symbol => Boolean): SingleDenotation
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation
+
+ /** If this is a SingleDenotation, return it, otherwise throw a TypeError */
+ def checkUnique(implicit ctx: Context): SingleDenotation = suchThat(alwaysTrue)
/** Does this denotation have an alternative that satisfies the predicate `p`? */
def hasAltWith(p: SingleDenotation => Boolean): Boolean
@@ -228,13 +236,36 @@ object Denotations {
/** The alternative of this denotation that has a type matching `targetType` when seen
* as a member of type `site`, `NoDenotation` if none exists.
*/
- def matchingDenotation(site: Type, targetType: Type)(implicit ctx: Context): SingleDenotation =
- if (isOverloaded)
- atSignature(targetType.signature, site, relaxed = true).matchingDenotation(site, targetType)
- else if (exists && !site.memberInfo(symbol).matchesLoosely(targetType))
- NoDenotation
- else
- asSingleDenotation
+ def matchingDenotation(site: Type, targetType: Type)(implicit ctx: Context): SingleDenotation = {
+ def qualifies(sym: Symbol) = site.memberInfo(sym).matchesLoosely(targetType)
+ if (isOverloaded) {
+ atSignature(targetType.signature, site, relaxed = true) match {
+ case sd: SingleDenotation => sd.matchingDenotation(site, targetType)
+ case md => md.suchThat(qualifies(_))
+ }
+ }
+ else if (exists && !qualifies(symbol)) NoDenotation
+ else asSingleDenotation
+ }
+
+ /** Handle merge conflict by throwing a `MergeError` exception */
+ private def mergeConflict(tp1: Type, tp2: Type)(implicit ctx: Context): Type = {
+ def showType(tp: Type) = tp match {
+ case ClassInfo(_, cls, _, _, _) => cls.showLocated
+ case bounds: TypeBounds => i"type bounds $bounds"
+ case _ => tp.show
+ }
+ if (true) throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2)
+ else throw new Error(s"cannot merge ${showType(tp1)} with ${showType(tp2)}") // flip condition for debugging
+ }
+
+ /** Merge two lists of names. If names in corresponding positions match, keep them,
+ * otherwise generate new synthetic names.
+ */
+ def mergeNames[N <: Name](names1: List[N], names2: List[N], syntheticName: Int => N): List[N] = {
+ for ((name1, name2, idx) <- (names1, names2, 0 until names1.length).zipped)
+ yield if (name1 == name2) name1 else syntheticName(idx)
+ }.toList
/** Form a denotation by conjoining with denotation `that`.
*
@@ -266,6 +297,50 @@ object Denotations {
*/
def & (that: Denotation, pre: Type, safeIntersection: Boolean = false)(implicit ctx: Context): Denotation = {
+ /** Normally, `tp1 & tp2`. Special cases for matching methods and classes, with
+ * the possibility of raising a merge error.
+ */
+ def infoMeet(tp1: Type, tp2: Type): Type = {
+ if (tp1 eq tp2) tp1
+ else tp1 match {
+ case tp1: TypeBounds =>
+ tp2 match {
+ case tp2: TypeBounds => if (safeIntersection) tp1 safe_& tp2 else tp1 & tp2
+ case tp2: ClassInfo if tp1 contains tp2 => tp2
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1: ClassInfo =>
+ tp2 match {
+ case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix)
+ case tp2: TypeBounds if tp2 contains tp1 => tp1
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1 @ MethodType(names1, formals1) if isTerm =>
+ tp2 match {
+ case tp2 @ MethodType(names2, formals2) if ctx.typeComparer.matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
+ tp1.isImplicit == tp2.isImplicit =>
+ tp1.derivedMethodType(
+ mergeNames(names1, names2, nme.syntheticParamName),
+ formals1,
+ infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1)))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case tp1: PolyType if isTerm =>
+ tp2 match {
+ case tp2: PolyType if ctx.typeComparer.matchingTypeParams(tp1, tp2) =>
+ tp1.derivedPolyType(
+ mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
+ tp1.paramBounds,
+ infoMeet(tp1.resultType, tp2.resultType.subst(tp2, tp1)))
+ case _: MethodicType =>
+ mergeConflict(tp1, tp2)
+ }
+ case _ =>
+ tp1 & tp2
+ }
+ }
+
/** Try to merge denot1 and denot2 without adding a new signature. */
def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match {
case denot1 @ MultiDenotation(denot11, denot12) =>
@@ -278,63 +353,95 @@ object Denotations {
}
case denot1: SingleDenotation =>
if (denot1 eq denot2) denot1
- else if (denot1.matches(denot2)) {
- val info1 = denot1.info
- val info2 = denot2.info
- val sym1 = denot1.symbol
- val sym2 = denot2.symbol
- val sym2Accessible = sym2.isAccessibleFrom(pre)
-
- /** Does `sym1` come before `sym2` in the linearization of `pre`? */
- def precedes(sym1: Symbol, sym2: Symbol) = {
- def precedesIn(bcs: List[ClassSymbol]): Boolean = bcs match {
- case bc :: bcs1 => (sym1 eq bc) || !(sym2 eq bc) && precedesIn(bcs1)
- case Nil => true
- }
- sym1.derivesFrom(sym2) ||
- !sym2.derivesFrom(sym1) && precedesIn(pre.baseClasses)
- }
+ else if (denot1.matches(denot2)) mergeSingleDenot(denot1, denot2)
+ else NoDenotation
+ }
- /** Preference according to partial pre-order (isConcrete, precedes) */
- def preferSym(sym1: Symbol, sym2: Symbol) =
- sym1.eq(sym2) ||
- sym1.isAsConcrete(sym2) &&
- (!sym2.isAsConcrete(sym1) || precedes(sym1.owner, sym2.owner))
+ /** Try to merge single-denotations. */
+ def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): SingleDenotation = {
+ val info1 = denot1.info
+ val info2 = denot2.info
+ val sym1 = denot1.symbol
+ val sym2 = denot2.symbol
- /** Sym preference provided types also override */
- def prefer(sym1: Symbol, sym2: Symbol, info1: Type, info2: Type) =
- preferSym(sym1, sym2) && info1.overrides(info2)
+ val sym2Accessible = sym2.isAccessibleFrom(pre)
- if (sym2Accessible && prefer(sym2, sym1, info2, info1)) denot2
- else {
- val sym1Accessible = sym1.isAccessibleFrom(pre)
- if (sym1Accessible && prefer(sym1, sym2, info1, info2)) denot1
- else if (sym1Accessible && sym2.exists && !sym2Accessible) denot1
- else if (sym2Accessible && sym1.exists && !sym1Accessible) denot2
- else {
- val sym =
- if (!sym1.exists) sym2
- else if (!sym2.exists) sym1
- else if (preferSym(sym2, sym1)) sym2
- else sym1
- val jointInfo =
- try
- if (safeIntersection)
- info1 safe_& info2
- else
- info1 & info2
- catch {
- case ex: MergeError =>
- if (pre.widen.classSymbol.is(Scala2x) || ctx.scala2Mode)
- info1 // follow Scala2 linearization -
- // compare with way merge is performed in SymDenotation#computeMembersNamed
- else
- throw new MergeError(s"${ex.getMessage} as members of type ${pre.show}", ex.tp1, ex.tp2)
- }
- new JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor)
+ /** Does `sym1` come before `sym2` in the linearization of `pre`? */
+ def precedes(sym1: Symbol, sym2: Symbol) = {
+ def precedesIn(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 => (sym1 eq bc) || !(sym2 eq bc) && precedesIn(bcs1)
+ case Nil => true
+ }
+ (sym1 ne sym2) &&
+ (sym1.derivesFrom(sym2) ||
+ !sym2.derivesFrom(sym1) && precedesIn(pre.baseClasses))
+ }
+
+ /** Similar to SymDenotation#accessBoundary, but without the special cases. */
+ def accessBoundary(sym: Symbol) =
+ if (sym.is(Private)) sym.owner
+ else sym.privateWithin.orElse(
+ if (sym.is(Protected)) sym.owner.enclosingPackageClass
+ else defn.RootClass)
+
+ /** Establish a partial order "preference" order between symbols.
+ * Give preference to `sym1` over `sym2` if one of the following
+ * conditions holds, in decreasing order of weight:
+ * 1. sym1 is concrete and sym2 is abstract
+ * 2. The owner of sym1 comes before the owner of sym2 in the linearization
+ * of the type of the prefix `pre`.
+ * 3. The access boundary of sym2 is properly contained in the access
+ * boundary of sym1. For protected access, we count the enclosing
+ * package as access boundary.
+ * 4. sym1 a method but sym2 is not.
+ * The aim of these criteria is to give some disambiguation on access which
+ * - does not depend on textual order or other arbitrary choices
+ * - minimizes raising of doubleDef errors
+ */
+ def preferSym(sym1: Symbol, sym2: Symbol) =
+ sym1.eq(sym2) ||
+ sym1.isAsConcrete(sym2) &&
+ (!sym2.isAsConcrete(sym1) ||
+ precedes(sym1.owner, sym2.owner) ||
+ accessBoundary(sym2).isProperlyContainedIn(accessBoundary(sym1)) ||
+ sym1.is(Method) && !sym2.is(Method)) ||
+ sym1.info.isErroneous
+
+ /** Sym preference provided types also override */
+ def prefer(sym1: Symbol, sym2: Symbol, info1: Type, info2: Type) =
+ preferSym(sym1, sym2) && info1.overrides(info2)
+
+ def handleDoubleDef =
+ if (preferSym(sym1, sym2)) denot1
+ else if (preferSym(sym2, sym1)) denot2
+ else doubleDefError(denot1, denot2, pre)
+
+ if (sym2Accessible && prefer(sym2, sym1, info2, info1)) denot2
+ else {
+ val sym1Accessible = sym1.isAccessibleFrom(pre)
+ if (sym1Accessible && prefer(sym1, sym2, info1, info2)) denot1
+ else if (sym1Accessible && sym2.exists && !sym2Accessible) denot1
+ else if (sym2Accessible && sym1.exists && !sym1Accessible) denot2
+ else if (isDoubleDef(sym1, sym2)) handleDoubleDef
+ else {
+ val sym =
+ if (!sym1.exists) sym2
+ else if (!sym2.exists) sym1
+ else if (preferSym(sym2, sym1)) sym2
+ else sym1
+ val jointInfo =
+ try infoMeet(info1, info2)
+ catch {
+ case ex: MergeError =>
+ if (pre.widen.classSymbol.is(Scala2x) || ctx.scala2Mode)
+ info1 // follow Scala2 linearization -
+ // compare with way merge is performed in SymDenotation#computeMembersNamed
+ else
+ throw new MergeError(s"${ex.getMessage} as members of type ${pre.show}", ex.tp1, ex.tp2)
}
- }
- } else NoDenotation
+ new JointRefDenotation(sym, jointInfo, denot1.validFor & denot2.validFor)
+ }
+ }
}
if (this eq that) this
@@ -355,6 +462,46 @@ object Denotations {
*/
def | (that: Denotation, pre: Type)(implicit ctx: Context): Denotation = {
+ /** Normally, `tp1 | tp2`. Special cases for matching methods and classes, with
+ * the possibility of raising a merge error.
+ */
+ def infoJoin(tp1: Type, tp2: Type): Type = tp1 match {
+ case tp1: TypeBounds =>
+ tp2 match {
+ case tp2: TypeBounds => tp1 | tp2
+ case tp2: ClassInfo if tp1 contains tp2 => tp1
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1: ClassInfo =>
+ tp2 match {
+ case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix | tp2.prefix)
+ case tp2: TypeBounds if tp2 contains tp1 => tp2
+ case _ => mergeConflict(tp1, tp2)
+ }
+ case tp1 @ MethodType(names1, formals1) =>
+ tp2 match {
+ case tp2 @ MethodType(names2, formals2)
+ if ctx.typeComparer.matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
+ tp1.isImplicit == tp2.isImplicit =>
+ tp1.derivedMethodType(
+ mergeNames(names1, names2, nme.syntheticParamName),
+ formals1, tp1.resultType | tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case tp1: PolyType =>
+ tp2 match {
+ case tp2: PolyType if ctx.typeComparer.matchingTypeParams(tp1, tp2) =>
+ tp1.derivedPolyType(
+ mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
+ tp1.paramBounds, tp1.resultType | tp2.resultType.subst(tp2, tp1))
+ case _ =>
+ mergeConflict(tp1, tp2)
+ }
+ case _ =>
+ tp1 | tp2
+ }
+
def unionDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation =
if (denot1.matches(denot2)) {
val sym1 = denot1.symbol
@@ -384,7 +531,8 @@ object Denotations {
}
lubSym(sym1.allOverriddenSymbols, NoSymbol)
}
- new JointRefDenotation(jointSym, info1 | info2, denot1.validFor & denot2.validFor)
+ new JointRefDenotation(
+ jointSym, infoJoin(info1, info2), denot1.validFor & denot2.validFor)
}
}
else NoDenotation
@@ -419,19 +567,21 @@ object Denotations {
final def validFor = denot1.validFor & denot2.validFor
final def isType = false
final def signature(implicit ctx: Context) = Signature.OverloadedSignature
- def atSignature(sig: Signature, site: Type, relaxed: Boolean)(implicit ctx: Context): SingleDenotation =
- denot1.atSignature(sig, site, relaxed) orElse denot2.atSignature(sig, site, relaxed)
+ def atSignature(sig: Signature, site: Type, relaxed: Boolean)(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.atSignature(sig, site, relaxed), denot2.atSignature(sig, site, relaxed))
def currentIfExists(implicit ctx: Context): Denotation =
derivedMultiDenotation(denot1.currentIfExists, denot2.currentIfExists)
def current(implicit ctx: Context): Denotation =
derivedMultiDenotation(denot1.current, denot2.current)
def altsWith(p: Symbol => Boolean): List[SingleDenotation] =
denot1.altsWith(p) ++ denot2.altsWith(p)
- def suchThat(p: Symbol => Boolean): SingleDenotation = {
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation = {
val sd1 = denot1.suchThat(p)
val sd2 = denot2.suchThat(p)
if (sd1.exists)
- if (sd2.exists) throw new TypeError(s"failure to disambiguate overloaded reference $this")
+ if (sd2.exists)
+ if (isDoubleDef(denot1.symbol, denot2.symbol)) doubleDefError(denot1, denot2)
+ else throw new TypeError(s"failure to disambiguate overloaded reference $this")
else sd1
else sd2
}
@@ -444,6 +594,8 @@ object Denotations {
else if (!d2.exists) d1
else derivedMultiDenotation(d1, d2)
}
+ def mapInfo(f: Type => Type)(implicit ctx: Context): Denotation =
+ derivedMultiDenotation(denot1.mapInfo(f), denot2.mapInfo(f))
def derivedMultiDenotation(d1: Denotation, d2: Denotation) =
if ((d1 eq denot1) && (d2 eq denot2)) this else MultiDenotation(d1, d2)
override def toString = alternatives.mkString(" <and> ")
@@ -465,7 +617,7 @@ object Denotations {
try info.signature
catch { // !!! DEBUG
case scala.util.control.NonFatal(ex) =>
- ctx.println(s"cannot take signature of ${info.show}")
+ ctx.echo(s"cannot take signature of ${info.show}")
throw ex
}
case _ => Signature.NotAMethod
@@ -476,12 +628,15 @@ object Denotations {
if ((symbol eq this.symbol) && (info eq this.info)) this
else newLikeThis(symbol, info)
+ def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation =
+ derivedSingleDenotation(symbol, f(info))
+
def orElse(that: => SingleDenotation) = if (this.exists) this else that
def altsWith(p: Symbol => Boolean): List[SingleDenotation] =
if (exists && p(symbol)) this :: Nil else Nil
- def suchThat(p: Symbol => Boolean): SingleDenotation =
+ def suchThat(p: Symbol => Boolean)(implicit ctx: Context): SingleDenotation =
if (exists && p(symbol)) this else NoDenotation
def hasAltWith(p: SingleDenotation => Boolean): Boolean =
@@ -581,7 +736,8 @@ object Denotations {
*/
private def bringForward()(implicit ctx: Context): SingleDenotation = this match {
case denot: SymDenotation if ctx.stillValid(denot) =>
- assert(ctx.runId > validFor.runId, s"denotation $denot invalid in run ${ctx.runId}. ValidFor: $validFor")
+ assert(ctx.runId > validFor.runId || ctx.settings.YtestPickler.value, // mixing test pickler with debug printing can travel back in time
+ s"denotation $denot invalid in run ${ctx.runId}. ValidFor: $validFor")
var d: SingleDenotation = denot
do {
d.validFor = Period(ctx.period.runId, d.validFor.firstPhaseId, d.validFor.lastPhaseId)
@@ -646,7 +802,13 @@ object Denotations {
var startPid = nextTransformerId + 1
val transformer = ctx.denotTransformers(nextTransformerId)
//println(s"transforming $this with $transformer")
- next = transformer.transform(cur)(ctx.withPhase(transformer)).syncWithParents
+ try {
+ next = transformer.transform(cur)(ctx.withPhase(transformer)).syncWithParents
+ } catch {
+ case ex: CyclicReference =>
+ println(s"error while transforming $this") // DEBUG
+ throw ex
+ }
if (next eq cur)
startPid = cur.validFor.firstPhaseId
else {
@@ -656,8 +818,7 @@ object Denotations {
next.resetFlag(Frozen)
case _ =>
}
- next.nextInRun = cur.nextInRun
- cur.nextInRun = next
+ next.insertAfter(cur)
cur = next
}
cur.validFor = Period(currentPeriod.runId, startPid, transformer.lastPhaseId)
@@ -673,6 +834,10 @@ object Denotations {
while (!(cur.validFor contains currentPeriod)) {
//println(s"searching: $cur at $currentPeriod, valid for ${cur.validFor}")
cur = cur.nextInRun
+ // Note: One might be tempted to add a `prev` field to get to the new denotation
+ // more directly here. I tried that, but it degrades rather than improves
+ // performance: Test setup: Compile everything in dotc and immediate subdirectories
+ // 10 times. Best out of 10: 18154ms with `prev` field, 17777ms without.
cnt += 1
if (cnt > MaxPossiblePhaseId) return NotDefinedHereDenotation
}
@@ -709,12 +874,10 @@ object Denotations {
// printPeriods(current)
this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId)
if (current.validFor.firstPhaseId >= targetId)
- replaceDenotation(current)
+ insertInsteadOf(current)
else {
- // insert this denotation after current
current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1)
- this.nextInRun = current.nextInRun
- current.nextInRun = this
+ insertAfter(current)
}
// printPeriods(this)
}
@@ -732,19 +895,35 @@ object Denotations {
val current1: SingleDenotation = f(current.asSymDenotation)
if (current1 ne current) {
current1.validFor = current.validFor
- current1.replaceDenotation(current)
+ current1.insertInsteadOf(current)
}
hasNext = current1.nextInRun.validFor.code > current1.validFor.code
current = current1.nextInRun
}
}
- private def replaceDenotation(current: SingleDenotation): Unit = {
- var prev = current
- while (prev.nextInRun ne current) prev = prev.nextInRun
+ /** Insert this denotation so that it follows `prev`. */
+ private def insertAfter(prev: SingleDenotation) = {
+ this.nextInRun = prev.nextInRun
+ prev.nextInRun = this
+ }
+
+ /** Insert this denotation instead of `old`.
+ * Also ensure that `old` refers with `nextInRun` to this denotation
+ * and set its `validFor` field to `NoWhere`. This is necessary so that
+ * references to the old denotation can be brought forward via `current`
+ * to a valid denotation.
+ *
+ * The code to achieve this is subtle in that it works correctly
+ * whether the replaced denotation is the only one in its cycle or not.
+ */
+ private def insertInsteadOf(old: SingleDenotation): Unit = {
+ var prev = old
+ while (prev.nextInRun ne old) prev = prev.nextInRun
+ // order of next two assignments is important!
prev.nextInRun = this
- this.nextInRun = current.nextInRun
- current.validFor = Nowhere
+ this.nextInRun = old.nextInRun
+ old.validFor = Nowhere
}
def staleSymbolError(implicit ctx: Context) = {
@@ -884,6 +1063,27 @@ object Denotations {
*/
case class NoQualifyingRef(alts: List[SingleDenotation])(implicit ctx: Context) extends ErrorDenotation
+ /** A double definition
+ */
+ def isDoubleDef(sym1: Symbol, sym2: Symbol)(implicit ctx: Context): Boolean =
+ (sym1.exists && sym2.exists &&
+ (sym1 ne sym2) && (sym1.owner eq sym2.owner) &&
+ !sym1.is(Bridge) && !sym2.is(Bridge))
+
+ def doubleDefError(denot1: Denotation, denot2: Denotation, pre: Type = NoPrefix)(implicit ctx: Context): Nothing = {
+ val sym1 = denot1.symbol
+ val sym2 = denot2.symbol
+ def fromWhere = if (pre == NoPrefix) "" else i"\nwhen seen as members of $pre"
+ throw new MergeError(
+ i"""cannot merge
+ | $sym1: ${sym1.info} and
+ | $sym2: ${sym2.info};
+ |they are both defined in ${sym1.owner} but have matching signatures
+ | ${denot1.info} and
+ | ${denot2.info}$fromWhere""",
+ denot2.info, denot2.info)
+ }
+
// --------------- PreDenotations -------------------------------------------------
/** A PreDenotation represents a group of single denotations
@@ -1038,5 +1238,4 @@ object Denotations {
util.Stats.record("not defined here")
override def getMessage() = msg
}
-}
-
+} \ No newline at end of file
diff --git a/src/dotty/tools/dotc/core/Flags.scala b/src/dotty/tools/dotc/core/Flags.scala
index f866621f2..3f4433708 100644
--- a/src/dotty/tools/dotc/core/Flags.scala
+++ b/src/dotty/tools/dotc/core/Flags.scala
@@ -431,12 +431,13 @@ object Flags {
/** Flags representing source modifiers */
final val SourceModifierFlags =
- commonFlags(Private, Protected, Abstract, Final,
+ commonFlags(Private, Protected, Abstract, Final, Inline,
Sealed, Case, Implicit, Override, AbsOverride, Lazy, JavaStatic)
/** Flags representing modifiers that can appear in trees */
final val ModifierFlags =
- SourceModifierFlags | Module | Param | Synthetic | Package | Local | commonFlags(Mutable)
+ SourceModifierFlags | Module | Param | Synthetic | Package | Local |
+ commonFlags(Mutable)
// | Trait is subsumed by commonFlags(Lazy) from SourceModifierFlags
assert(ModifierFlags.isTermFlags && ModifierFlags.isTypeFlags)
@@ -447,9 +448,9 @@ object Flags {
/** Flags guaranteed to be set upon symbol creation */
final val FromStartFlags =
AccessFlags | Module | Package | Deferred | Final | MethodOrHKCommon | Param | ParamAccessor | Scala2ExistentialCommon |
- InSuperCall | Touched | JavaStatic | CovariantOrOuter | ContravariantOrLabel | ExpandedName | AccessorOrSealed |
+ Mutable.toCommonFlags | InSuperCall | Touched | JavaStatic | CovariantOrOuter | ContravariantOrLabel | ExpandedName | AccessorOrSealed |
CaseAccessorOrBaseTypeArg | Fresh | Frozen | Erroneous | ImplicitCommon | Permanent | Synthetic |
- LazyOrTrait | SuperAccessorOrScala2x | SelfNameOrImplClass
+ Inline | LazyOrTrait | SuperAccessorOrScala2x | SelfNameOrImplClass
assert(FromStartFlags.isTermFlags && FromStartFlags.isTypeFlags)
// TODO: Should check that FromStartFlags do not change in completion
@@ -525,8 +526,11 @@ object Flags {
/** Either method or lazy */
final val MethodOrLazy = Method | Lazy
- /** Labeled `private` or `final` */
- final val PrivateOrFinal = Private | Final
+ /** Either method or lazy or deferred */
+ final val MethodOrLazyOrDeferred = Method | Lazy | Deferred
+
+ /** Labeled `private`, `final`, or `inline` */
+ final val PrivateOrFinalOrInline = Private | Final | Inline
/** A private method */
final val PrivateMethod = allOf(Private, Method)
@@ -537,6 +541,9 @@ object Flags {
/** A type parameter with synthesized name */
final val ExpandedTypeParam = allOf(ExpandedName, TypeParam)
+ /** An inline method */
+ final val InlineMethod = allOf(Inline, Method)
+
/** A parameter or parameter accessor */
final val ParamOrAccessor = Param | ParamAccessor
@@ -549,6 +556,12 @@ object Flags {
/** A type parameter or type parameter accessor */
final val TypeParamOrAccessor = TypeParam | TypeParamAccessor
+ /** A deferred member or a parameter accessor (these don't have right hand sides) */
+ final val DeferredOrParamAccessor = Deferred | ParamAccessor
+
+ /** value that's final or inline */
+ final val FinalOrInline = Final | Inline
+
/** If symbol of a type alias has these flags, prefer the alias */
final val AliasPreferred = TypeParam | BaseTypeArg | ExpandedName
diff --git a/src/dotty/tools/dotc/typer/Mode.scala b/src/dotty/tools/dotc/core/Mode.scala
index 55d44ad7a..7a9bb0572 100644
--- a/src/dotty/tools/dotc/typer/Mode.scala
+++ b/src/dotty/tools/dotc/core/Mode.scala
@@ -1,7 +1,6 @@
-package dotty.tools.dotc.typer
-
-import collection.mutable
+package dotty.tools.dotc.core
+/** A collection of mode bits that are part of a context */
case class Mode(val bits: Int) extends AnyVal {
import Mode._
def | (that: Mode) = Mode(bits | that.bits)
@@ -82,5 +81,16 @@ object Mode {
/** We are currently unpickling Scala2 info */
val Scala2Unpickling = newMode(13, "Scala2Unpickling")
+ /** Use Scala2 scheme for overloading and implicit resolution */
+ val OldOverloadingResolution = newMode(14, "OldOverloadingResolution")
+
+ /** Allow hk applications of type lambdas to wildcard arguments;
+ * used for checking that such applications do not normally arise
+ */
+ val AllowLambdaWildcardApply = newMode(15, "AllowHKApplyToWildcards")
+
+ /** Read original positions when unpickling from TASTY */
+ val ReadPositions = newMode(16, "ReadPositions")
+
val PatternOrType = Pattern | Type
}
diff --git a/src/dotty/tools/dotc/core/NameOps.scala b/src/dotty/tools/dotc/core/NameOps.scala
index 81240a9fc..48e823e81 100644
--- a/src/dotty/tools/dotc/core/NameOps.scala
+++ b/src/dotty/tools/dotc/core/NameOps.scala
@@ -63,6 +63,7 @@ object NameOps {
(if (name.isTermName) n.toTermName else n.toTypeName).asInstanceOf[N]
def isConstructorName = name == CONSTRUCTOR || name == TRAIT_CONSTRUCTOR
+ def isStaticConstructorName = name == STATIC_CONSTRUCTOR
def isExceptionResultName = name startsWith EXCEPTION_RESULT_PREFIX
def isImplClassName = name endsWith IMPL_CLASS_SUFFIX
def isLocalDummyName = name startsWith LOCALDUMMY_PREFIX
@@ -83,6 +84,8 @@ object NameOps {
name.stripAnonNumberSuffix endsWith MODULE_VAR_SUFFIX
def isSelectorName = name.startsWith(" ") && name.tail.forall(_.isDigit)
def isLazyLocal = name.endsWith(nme.LAZY_LOCAL)
+ def isOuterSelect = name.endsWith(nme.OUTER_SELECT)
+ def isInlineAccessor = name.startsWith(nme.INLINE_ACCESSOR_PREFIX)
/** Is name a variable name? */
def isVariableName: Boolean = name.length > 0 && {
@@ -100,24 +103,6 @@ object NameOps {
name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.head)
}
- /** Is this the name of a higher-kinded type parameter of a Lambda? */
- def isHkArgName =
- name.length > 0 &&
- name.head == tpnme.hkArgPrefixHead &&
- name.startsWith(tpnme.hkArgPrefix) && {
- val digits = name.drop(tpnme.hkArgPrefixLength)
- digits.length <= 4 && digits.forall(_.isDigit)
- }
-
- /** The index of the higher-kinded type parameter with this name.
- * Pre: isLambdaArgName.
- */
- def hkArgIndex: Int =
- name.drop(tpnme.hkArgPrefixLength).toString.toInt
-
- def isLambdaTraitName(implicit ctx: Context): Boolean =
- name.startsWith(tpnme.hkLambdaPrefix)
-
/** If the name ends with $nn where nn are
* all digits, strip the $ and the digits.
* Otherwise return the argument.
@@ -179,7 +164,13 @@ object NameOps {
* an encoded name, e.g. super$$plus$eq. See #765.
*/
def unexpandedName: N = {
- val idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
+ var idx = name.lastIndexOfSlice(nme.EXPAND_SEPARATOR)
+
+ // Hack to make super accessors from traits work. They would otherwise fail because of #765
+ // TODO: drop this once we have more robust name handling
+ if (idx > FalseSuperLength && name.slice(idx - FalseSuperLength, idx) == FalseSuper)
+ idx -= FalseSuper.length
+
if (idx < 0) name else (name drop (idx + nme.EXPAND_SEPARATOR.length)).asInstanceOf[N]
}
@@ -430,5 +421,10 @@ object NameOps {
assert(name.isLazyLocal)
name.dropRight(nme.LAZY_LOCAL.length)
}
+
+ def inlineAccessorName = nme.INLINE_ACCESSOR_PREFIX ++ name ++ "$"
}
+
+ private final val FalseSuper = "$$super".toTermName
+ private val FalseSuperLength = FalseSuper.length
}
diff --git a/src/dotty/tools/dotc/core/Names.scala b/src/dotty/tools/dotc/core/Names.scala
index f1e6f7606..11f0b55a8 100644
--- a/src/dotty/tools/dotc/core/Names.scala
+++ b/src/dotty/tools/dotc/core/Names.scala
@@ -26,6 +26,8 @@ object Names {
def toTermName: TermName
}
+ implicit def eqName: Eq[Name, Name] = Eq
+
/** A name is essentially a string, with three differences
* 1. Names belong in one of two name spaces: they are type names or term names.
* Term names have a sub-category of "local" field names.
@@ -37,7 +39,7 @@ object Names {
*/
abstract class Name extends DotClass
with PreName
- with Seq[Char]
+ with collection.immutable.Seq[Char]
with IndexedSeqOptimized[Char, Name] {
/** A type for names of the same kind as this name */
@@ -333,6 +335,7 @@ object Names {
// can't move CONSTRUCTOR/EMPTY_PACKAGE to `nme` because of bootstrap failures in `encode`.
val CONSTRUCTOR = termName("<init>")
+ val STATIC_CONSTRUCTOR = termName("<clinit>")
val EMPTY_PACKAGE = termName("<empty>")
val dontEncode = Set(CONSTRUCTOR, EMPTY_PACKAGE)
diff --git a/src/dotty/tools/dotc/core/OrderingConstraint.scala b/src/dotty/tools/dotc/core/OrderingConstraint.scala
index e818862cb..72c7a8e51 100644
--- a/src/dotty/tools/dotc/core/OrderingConstraint.scala
+++ b/src/dotty/tools/dotc/core/OrderingConstraint.scala
@@ -8,17 +8,19 @@ import collection.mutable
import printing.{Printer, Showable}
import printing.Texts._
import config.Config
-import config.Printers._
import collection.immutable.BitSet
import reflect.ClassTag
+import annotation.tailrec
object OrderingConstraint {
+ type ArrayValuedMap[T] = SimpleMap[PolyType, Array[T]]
+
/** The type of `OrderingConstraint#boundsMap` */
- type ParamBounds = SimpleMap[PolyType, Array[Type]]
+ type ParamBounds = ArrayValuedMap[Type]
/** The type of `OrderingConstraint#lowerMap`, `OrderingConstraint#upperMap` */
- type ParamOrdering = SimpleMap[PolyType, Array[List[PolyParam]]]
+ type ParamOrdering = ArrayValuedMap[List[PolyParam]]
/** A new constraint with given maps */
private def newConstraint(boundsMap: ParamBounds, lowerMap: ParamOrdering, upperMap: ParamOrdering)(implicit ctx: Context) : OrderingConstraint = {
@@ -151,7 +153,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def contains(param: PolyParam): Boolean = {
val entries = boundsMap(param.binder)
- entries != null && entries(param.paramNum).isInstanceOf[TypeBounds]
+ entries != null && isBounds(entries(param.paramNum))
}
def contains(tvar: TypeVar): Boolean = {
@@ -391,7 +393,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
val replacement = tp.dealias.stripTypeVar
if (param == replacement) this
else {
- assert(replacement.isValueType)
+ assert(replacement.isValueTypeOrLambda)
val poly = param.binder
val idx = param.paramNum
@@ -419,7 +421,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
def replaceIn(tp: Type, isUpper: Boolean): Type = tp match {
case `param` => normalize(replacement, isUpper)
case tp: AndOrType if isUpper == tp.isAnd => recombine(tp, replaceIn, isUpper)
- case _ => tp
+ case _ => tp.substParam(param, replacement)
}
bounds.derivedTypeBounds(replaceIn(lo, isUpper = false), replaceIn(hi, isUpper = true))
@@ -428,7 +430,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
var current =
- if (isRemovable(poly, idx)) remove(poly) else updateEntry(param, replacement)
+ if (isRemovable(poly)) remove(poly) else updateEntry(param, replacement)
current.foreachParam {(p, i) =>
current = boundsLens.map(this, current, p, i, replaceParam(_, p, i))
current = lowerLens.map(this, current, p, i, removeParam)
@@ -449,20 +451,15 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap))
}
- def isRemovable(pt: PolyType, removedParam: Int = -1): Boolean = {
+ def isRemovable(pt: PolyType): Boolean = {
val entries = boundsMap(pt)
- var noneLeft = true
- var i = paramCount(entries)
- while (noneLeft && i > 0) {
- i -= 1
- if (i != removedParam && isBounds(entries(i))) noneLeft = false
- else typeVar(entries, i) match {
- case tv: TypeVar =>
- if (!tv.inst.exists) noneLeft = false // need to keep line around to compute instType
- case _ =>
+ @tailrec def allRemovable(last: Int): Boolean =
+ if (last < 0) true
+ else typeVar(entries, last) match {
+ case tv: TypeVar => tv.inst.exists && allRemovable(last - 1)
+ case _ => false
}
- }
- noneLeft
+ allRemovable(paramCount(entries) - 1)
}
// ---------- Exploration --------------------------------------------------------
@@ -473,7 +470,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
for {
(poly, entries) <- boundsMap.toList
n <- 0 until paramCount(entries)
- if isBounds(entries(n))
+ if entries(n).exists
} yield PolyParam(poly, n)
def forallParams(p: PolyParam => Boolean): Boolean = {
@@ -499,9 +496,52 @@ class OrderingConstraint(private val boundsMap: ParamBounds,
}
}
+ def & (other: Constraint)(implicit ctx: Context) = {
+ def merge[T](m1: ArrayValuedMap[T], m2: ArrayValuedMap[T], join: (T, T) => T): ArrayValuedMap[T] = {
+ var merged = m1
+ def mergeArrays(xs1: Array[T], xs2: Array[T]) = {
+ val xs = xs1.clone
+ for (i <- xs.indices) xs(i) = join(xs1(i), xs2(i))
+ xs
+ }
+ m2.foreachBinding { (poly, xs2) =>
+ merged = merged.updated(poly,
+ if (m1.contains(poly)) mergeArrays(m1(poly), xs2) else xs2)
+ }
+ merged
+ }
+
+ def mergeParams(ps1: List[PolyParam], ps2: List[PolyParam]) =
+ (ps1 /: ps2)((ps1, p2) => if (ps1.contains(p2)) ps1 else p2 :: ps1)
+
+ def mergeEntries(e1: Type, e2: Type): Type = e1 match {
+ case e1: TypeBounds =>
+ e2 match {
+ case e2: TypeBounds => e1 & e2
+ case _ if e1 contains e2 => e2
+ case _ => mergeError
+ }
+ case tv1: TypeVar =>
+ e2 match {
+ case tv2: TypeVar if tv1.instanceOpt eq tv2.instanceOpt => e1
+ case _ => mergeError
+ }
+ case _ if e1 eq e2 => e1
+ case _ => mergeError
+ }
+
+ def mergeError = throw new AssertionError(i"cannot merge $this with $other")
+
+ val that = other.asInstanceOf[OrderingConstraint]
+ new OrderingConstraint(
+ merge(this.boundsMap, that.boundsMap, mergeEntries),
+ merge(this.lowerMap, that.lowerMap, mergeParams),
+ merge(this.upperMap, that.upperMap, mergeParams))
+ }
+
override def checkClosed()(implicit ctx: Context): Unit = {
def isFreePolyParam(tp: Type) = tp match {
- case PolyParam(binder, _) => !contains(binder)
+ case PolyParam(binder: PolyType, _) => !contains(binder)
case _ => false
}
def checkClosedType(tp: Type, where: String) =
diff --git a/src/dotty/tools/dotc/core/Phases.scala b/src/dotty/tools/dotc/core/Phases.scala
index ce87506ae..222e2235d 100644
--- a/src/dotty/tools/dotc/core/Phases.scala
+++ b/src/dotty/tools/dotc/core/Phases.scala
@@ -9,7 +9,7 @@ import util.DotClass
import DenotTransformers._
import Denotations._
import Decorators._
-import config.Printers._
+import config.Printers.config
import scala.collection.mutable.{ListBuffer, ArrayBuffer}
import dotty.tools.dotc.transform.TreeTransforms.{TreeTransformer, MiniPhase, TreeTransform}
import dotty.tools.dotc.transform._
@@ -291,7 +291,11 @@ object Phases {
*/
def relaxedTyping: Boolean = false
- /** Overridden by FrontEnd */
+ /** Is this phase the standard typerphase? True for FrontEnd, but
+ * not for other first phases (such as FromTasty). The predicate
+ * is tested in some places that perform checks and corrections. It's
+ * different from isAfterTyper (and cheaper to test).
+ */
def isTyper = false
def exists: Boolean = true
diff --git a/src/dotty/tools/dotc/core/Signature.scala b/src/dotty/tools/dotc/core/Signature.scala
index 54771bae5..b2e627cbe 100644
--- a/src/dotty/tools/dotc/core/Signature.scala
+++ b/src/dotty/tools/dotc/core/Signature.scala
@@ -22,22 +22,40 @@ import TypeErasure.sigName
* "scala.String".toTypeName)
*
* The signatures of non-method types are always `NotAMethod`.
+ *
+ * There are three kinds of "missing" parts of signatures:
+ *
+ * - tpnme.EMPTY Result type marker for NotAMethod and OverloadedSignature
+ * - tpnme.WILDCARD Arises from a Wildcard or error type
+ * - tpnme.Uninstantiated Arises from an uninstantiated type variable
*/
case class Signature(paramsSig: List[TypeName], resSig: TypeName) {
import Signature._
- /** Does this signature coincide with that signature on their parameter parts? */
- final def sameParams(that: Signature): Boolean = this.paramsSig == that.paramsSig
+ /** Two names are consistent if they are the same or one of them is tpnme.Uninstantiated */
+ private def consistent(name1: TypeName, name2: TypeName) =
+ name1 == name2 || name1 == tpnme.Uninstantiated || name2 == tpnme.Uninstantiated
+
+ /** Does this signature coincide with that signature on their parameter parts?
+ * This is the case if all parameter names are _consistent_, i.e. they are either
+ * equal or on of them is tpnme.Uninstantiated.
+ */
+ final def consistentParams(that: Signature): Boolean = {
+ def loop(names1: List[TypeName], names2: List[TypeName]): Boolean =
+ if (names1.isEmpty) names2.isEmpty
+ else names2.nonEmpty && consistent(names1.head, names2.head) && loop(names1.tail, names2.tail)
+ loop(this.paramsSig, that.paramsSig)
+ }
/** The degree to which this signature matches `that`.
- * If both parameter and result type names match (i.e. they are the same
+ * If parameter names are consistent and result types names match (i.e. they are the same
* or one is a wildcard), the result is `FullMatch`.
- * If only the parameter names match, the result is `ParamMatch` before erasure and
+ * If only the parameter names are consistent, the result is `ParamMatch` before erasure and
* `NoMatch` otherwise.
- * If the parameters do not match, the result is always `NoMatch`.
+ * If the parameters are inconsistent, the result is always `NoMatch`.
*/
final def matchDegree(that: Signature)(implicit ctx: Context): MatchDegree =
- if (sameParams(that))
+ if (consistentParams(that))
if (resSig == that.resSig || isWildcard(resSig) || isWildcard(that.resSig)) FullMatch
else if (!ctx.erasedTypes) ParamMatch
else NoMatch
@@ -52,6 +70,13 @@ case class Signature(paramsSig: List[TypeName], resSig: TypeName) {
def prepend(params: List[Type], isJava: Boolean)(implicit ctx: Context) =
Signature((params.map(sigName(_, isJava))) ++ paramsSig, resSig)
+ /** A signature is under-defined if its paramsSig part contains at least one
+ * `tpnme.Uninstantiated`. Under-defined signatures arise when taking a signature
+ * of a type that still contains uninstantiated type variables. They are eliminated
+ * by `fixSignature` in `PostTyper`.
+ */
+ def isUnderDefined(implicit ctx: Context) =
+ paramsSig.contains(tpnme.Uninstantiated) || resSig == tpnme.Uninstantiated
}
object Signature {
diff --git a/src/dotty/tools/dotc/core/StdNames.scala b/src/dotty/tools/dotc/core/StdNames.scala
index e2add1a52..920c9635e 100644
--- a/src/dotty/tools/dotc/core/StdNames.scala
+++ b/src/dotty/tools/dotc/core/StdNames.scala
@@ -46,6 +46,7 @@ object StdNames {
final val IFkw: N = kw("if")
final val IMPLICITkw: N = kw("implicit")
final val IMPORTkw: N = kw("import")
+ final val INLINEkw: N = kw("inline")
final val LAZYkw: N = kw("lazy")
final val MACROkw: N = kw("macro")
final val MATCHkw: N = kw("match")
@@ -100,6 +101,7 @@ object StdNames {
val EXPAND_SEPARATOR: N = "$$"
val IMPL_CLASS_SUFFIX: N = "$class"
val IMPORT: N = "<import>"
+ val INLINE_ACCESSOR_PREFIX = "$inlineAccessor$"
val INTERPRETER_IMPORT_WRAPPER: N = "$iw"
val INTERPRETER_LINE_PREFIX: N = "line"
val INTERPRETER_VAR_PREFIX: N = "res"
@@ -252,7 +254,7 @@ object StdNames {
val MODULE_INSTANCE_FIELD: N = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
val OUTER: N = "$outer"
val OUTER_LOCAL: N = "$outer "
- val OUTER_SYNTH: N = "<outer>" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter
+ val OUTER_SELECT: N = "_<outer>" // emitted by inliner, replaced by outer path in explicitouter
val REFINE_CLASS: N = "<refinement>"
val ROOTPKG: N = "_root_"
val SELECTOR_DUMMY: N = "<unapply-selector>"
@@ -383,6 +385,7 @@ object StdNames {
val delayedInit: N = "delayedInit"
val delayedInitArg: N = "delayedInit$body"
val drop: N = "drop"
+ val dynamics: N = "dynamics"
val dummyApply: N = "<dummy-apply>"
val elem: N = "elem"
val emptyValDef: N = "emptyValDef"
@@ -394,6 +397,7 @@ object StdNames {
val equals_ : N = "equals"
val error: N = "error"
val eval: N = "eval"
+ val eqAny: N = "eqAny"
val ex: N = "ex"
val experimental: N = "experimental"
val f: N = "f"
@@ -425,7 +429,6 @@ object StdNames {
val isEmpty: N = "isEmpty"
val isInstanceOf_ : N = "isInstanceOf"
val java: N = "java"
- val keepUnions: N = "keepUnions"
val key: N = "key"
val lang: N = "lang"
val length: N = "length"
@@ -528,12 +531,6 @@ object StdNames {
val synthSwitch: N = "$synthSwitch"
- val hkApply: N = "$Apply"
- val hkArgPrefix: N = "$hk"
- val hkLambdaPrefix: N = "Lambda$"
- val hkArgPrefixHead: Char = hkArgPrefix.head
- val hkArgPrefixLength: Int = hkArgPrefix.length
-
// unencoded operators
object raw {
final val AMP : N = "&"
@@ -738,18 +735,18 @@ object StdNames {
class ScalaTypeNames extends ScalaNames[TypeName] {
protected implicit def fromString(s: String): TypeName = typeName(s)
- @switch def syntheticTypeParamName(i: Int): TypeName = "T" + i
+ def syntheticTypeParamName(i: Int): TypeName = "T" + i
+ def syntheticLambdaParamName(i: Int): TypeName = "X" + i
def syntheticTypeParamNames(num: Int): List[TypeName] =
(0 until num).map(syntheticTypeParamName)(breakOut)
- def hkLambda(vcs: List[Int]): TypeName = hkLambdaPrefix ++ vcs.map(varianceSuffix).mkString
- def hkArg(n: Int): TypeName = hkArgPrefix ++ n.toString
-
- def varianceSuffix(v: Int): Char = varianceSuffixes.charAt(v + 1)
- val varianceSuffixes = "NIP"
+ def syntheticLambdaParamNames(num: Int): List[TypeName] =
+ (0 until num).map(syntheticLambdaParamName)(breakOut)
final val Conforms = encode("<:<")
+
+ final val Uninstantiated: TypeName = "?$"
}
abstract class JavaNames[N <: Name] extends DefinedNames[N] {
diff --git a/src/dotty/tools/dotc/core/Substituters.scala b/src/dotty/tools/dotc/core/Substituters.scala
index 0083ac626..23683608a 100644
--- a/src/dotty/tools/dotc/core/Substituters.scala
+++ b/src/dotty/tools/dotc/core/Substituters.scala
@@ -102,14 +102,13 @@ trait Substituters { this: Context =>
}
if (sym.isStatic && !existsStatic(from)) tp
else {
- val prefix1 = substDealias(tp.prefix, from, to, theMap)
- if (prefix1 ne tp.prefix) tp.derivedSelect(prefix1)
- else if (sym.isAliasType) {
- val hi = sym.info.bounds.hi
- val hi1 = substDealias(hi, from, to, theMap)
- if (hi1 eq hi) tp else hi1
+ tp.info match {
+ case TypeAlias(alias) =>
+ val alias1 = substDealias(alias, from, to, theMap)
+ if (alias1 ne alias) return alias1
+ case _ =>
}
- else tp
+ tp.derivedSelect(substDealias(tp.prefix, from, to, theMap))
}
case _: ThisType | _: BoundType | NoPrefix =>
tp
@@ -179,21 +178,21 @@ trait Substituters { this: Context =>
.mapOver(tp)
}
- final def substRefinedThis(tp: Type, from: Type, to: Type, theMap: SubstRefinedThisMap): Type =
+ final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap): Type =
tp match {
- case tp @ RefinedThis(binder) =>
+ case tp @ RecThis(binder) =>
if (binder eq from) to else tp
case tp: NamedType =>
if (tp.currentSymbol.isStatic) tp
- else tp.derivedSelect(substRefinedThis(tp.prefix, from, to, theMap))
+ else tp.derivedSelect(substRecThis(tp.prefix, from, to, theMap))
case _: ThisType | _: BoundType | NoPrefix =>
tp
case tp: RefinedType =>
- tp.derivedRefinedType(substRefinedThis(tp.parent, from, to, theMap), tp.refinedName, substRefinedThis(tp.refinedInfo, from, to, theMap))
+ tp.derivedRefinedType(substRecThis(tp.parent, from, to, theMap), tp.refinedName, substRecThis(tp.refinedInfo, from, to, theMap))
case tp: TypeAlias =>
- tp.derivedTypeAlias(substRefinedThis(tp.alias, from, to, theMap))
+ tp.derivedTypeAlias(substRecThis(tp.alias, from, to, theMap))
case _ =>
- (if (theMap != null) theMap else new SubstRefinedThisMap(from, to))
+ (if (theMap != null) theMap else new SubstRecThisMap(from, to))
.mapOver(tp)
}
@@ -266,8 +265,8 @@ trait Substituters { this: Context =>
def apply(tp: Type): Type = substThis(tp, from, to, this)
}
- final class SubstRefinedThisMap(from: Type, to: Type) extends DeepTypeMap {
- def apply(tp: Type): Type = substRefinedThis(tp, from, to, this)
+ final class SubstRecThisMap(from: Type, to: Type) extends DeepTypeMap {
+ def apply(tp: Type): Type = substRecThis(tp, from, to, this)
}
final class SubstParamMap(from: ParamType, to: Type) extends DeepTypeMap {
diff --git a/src/dotty/tools/dotc/core/SymDenotations.scala b/src/dotty/tools/dotc/core/SymDenotations.scala
index a83e7726a..a98d6732a 100644
--- a/src/dotty/tools/dotc/core/SymDenotations.scala
+++ b/src/dotty/tools/dotc/core/SymDenotations.scala
@@ -3,7 +3,7 @@ package dotc
package core
import Periods._, Contexts._, Symbols._, Denotations._, Names._, NameOps._, Annotations._
-import Types._, Flags._, Decorators._, DenotTransformers._, StdNames._, Scopes._
+import Types._, Flags._, Decorators._, DenotTransformers._, StdNames._, Scopes._, Comments._
import NameOps._
import Scopes.Scope
import collection.mutable
@@ -13,11 +13,10 @@ import Decorators.SymbolIteratorDecorator
import ast._
import annotation.tailrec
import CheckRealizable._
-import typer.Mode
import util.SimpleMap
import util.Stats
import config.Config
-import config.Printers._
+import config.Printers.{completions, incremental, noPrinter}
trait SymDenotations { this: Context =>
import SymDenotations._
@@ -42,12 +41,12 @@ trait SymDenotations { this: Context =>
}
def stillValid(denot: SymDenotation): Boolean =
- if (denot.is(ValidForever) || denot.isRefinementClass) true
+ if (denot.is(ValidForever) || denot.isRefinementClass || denot.isImport) true
else {
val initial = denot.initial
val firstPhaseId = initial.validFor.firstPhaseId.max(ctx.typerPhase.id)
if ((initial ne denot) || ctx.phaseId != firstPhaseId)
- ctx.withPhase(firstPhaseId).stillValidInOwner(initial.asSymDenotation)
+ ctx.withPhase(firstPhaseId).stillValidInOwner(initial)
else
stillValidInOwner(denot)
}
@@ -57,6 +56,7 @@ trait SymDenotations { this: Context =>
stillValid(owner) && (
!owner.isClass
|| owner.isRefinementClass
+ || owner.is(Scala2x)
|| (owner.unforcedDecls.lookupAll(denot.name) contains denot.symbol)
|| denot.isSelfSym)
} catch {
@@ -78,7 +78,7 @@ trait SymDenotations { this: Context =>
implicit val ctx: Context = this
val initial = denot.initial
if ((initial ne denot) || ctx.phaseId != initial.validFor.firstPhaseId) {
- ctx.withPhase(initial.validFor.firstPhaseId).traceInvalid(initial.asSymDenotation)
+ ctx.withPhase(initial.validFor.firstPhaseId).traceInvalid(initial)
} else try {
val owner = denot.owner.denot
if (!traceInvalid(owner)) explainSym("owner is invalid")
@@ -281,6 +281,15 @@ object SymDenotations {
case nil => None
}
+ /** The same as getAnnotation, but without ensuring
+ * that the symbol carrying the annotation is completed
+ */
+ final def unforcedAnnotation(cls: Symbol)(implicit ctx: Context): Option[Annotation] =
+ dropOtherAnnotations(myAnnotations, cls) match {
+ case annot :: _ => Some(annot)
+ case nil => None
+ }
+
/** Add given annotation to the annotations of this denotation */
final def addAnnotation(annot: Annotation): Unit =
annotations = annot :: myAnnotations
@@ -289,6 +298,12 @@ object SymDenotations {
final def removeAnnotation(cls: Symbol)(implicit ctx: Context): Unit =
annotations = myAnnotations.filterNot(_ matches cls)
+ /** Remove any annotations with same class as `annot`, and add `annot` */
+ final def updateAnnotation(annot: Annotation)(implicit ctx: Context): Unit = {
+ removeAnnotation(annot.symbol)
+ addAnnotation(annot)
+ }
+
/** Add all given annotations to this symbol */
final def addAnnotations(annots: TraversableOnce[Annotation])(implicit ctx: Context): Unit =
annots.foreach(addAnnotation)
@@ -347,14 +362,14 @@ object SymDenotations {
else {
def legalize(name: Name): Name = // JVM method names may not contain `<' or `>' characters
if (is(Method)) name.replace('<', '(').replace('>', ')') else name
- legalize(name.expandedName(initial.asSymDenotation.owner))
+ legalize(name.expandedName(initial.owner))
}
// need to use initial owner to disambiguate, as multiple private symbols with the same name
// might have been moved from different origins into the same class
/** The name with which the denoting symbol was created */
final def originalName(implicit ctx: Context) = {
- val d = initial.asSymDenotation
+ val d = initial
if (d is ExpandedName) d.name.unexpandedName else d.name // !!!DEBUG, was: effectiveName
}
@@ -436,13 +451,13 @@ object SymDenotations {
/** Is this symbol an anonymous class? */
final def isAnonymousClass(implicit ctx: Context): Boolean =
- isClass && (initial.asSymDenotation.name startsWith tpnme.ANON_CLASS)
+ isClass && (initial.name startsWith tpnme.ANON_CLASS)
final def isAnonymousFunction(implicit ctx: Context) =
- this.symbol.is(Method) && (initial.asSymDenotation.name startsWith nme.ANON_FUN)
+ this.symbol.is(Method) && (initial.name startsWith nme.ANON_FUN)
final def isAnonymousModuleVal(implicit ctx: Context) =
- this.symbol.is(ModuleVal) && (initial.asSymDenotation.name startsWith nme.ANON_CLASS)
+ this.symbol.is(ModuleVal) && (initial.name startsWith nme.ANON_CLASS)
/** Is this a companion class method or companion object method?
* These methods are generated by Symbols#synthesizeCompanionMethod
@@ -479,10 +494,6 @@ object SymDenotations {
final def isRefinementClass(implicit ctx: Context): Boolean =
name.decode == tpnme.REFINE_CLASS
- /** is this symbol a trait representing a type lambda? */
- final def isLambdaTrait(implicit ctx: Context): Boolean =
- isClass && name.startsWith(tpnme.hkLambdaPrefix) && owner == defn.ScalaPackageClass
-
/** Is this symbol a package object or its module class? */
def isPackageObject(implicit ctx: Context): Boolean = {
val poName = if (isType) nme.PACKAGE_CLS else nme.PACKAGE
@@ -579,6 +590,9 @@ object SymDenotations {
originalName.isSetterName &&
(!isCompleted || info.firstParamTypes.nonEmpty) // to avoid being fooled by var x_= : Unit = ...
+ /** is this a symbol representing an import? */
+ final def isImport = name == nme.IMPORT
+
/** is this the constructor of a class? */
final def isClassConstructor = name == nme.CONSTRUCTOR
@@ -595,6 +609,10 @@ object SymDenotations {
final def isPrimaryConstructor(implicit ctx: Context) =
isConstructor && owner.primaryConstructor == symbol
+ /** Does this symbol denote the static constructor of its enclosing class? */
+ final def isStaticConstructor(implicit ctx: Context) =
+ name.isStaticConstructorName
+
/** Is this a subclass of the given class `base`? */
def isSubClass(base: Symbol)(implicit ctx: Context) = false
@@ -607,7 +625,7 @@ object SymDenotations {
/** Is this symbol a class that extends `AnyVal`? */
final def isValueClass(implicit ctx: Context): Boolean = {
- val di = this.initial.asSymDenotation
+ val di = initial
di.isClass &&
di.derivesFrom(defn.AnyValClass)(ctx.withPhase(di.validFor.firstPhaseId))
// We call derivesFrom at the initial phase both because AnyVal does not exist
@@ -670,9 +688,9 @@ object SymDenotations {
val cls = owner.enclosingSubClass
if (!cls.exists)
fail(
- s""" Access to protected $this not permitted because
- | enclosing ${ctx.owner.enclosingClass.showLocated} is not a subclass of
- | ${owner.showLocated} where target is defined""".stripMargin)
+ i"""
+ | Access to protected $this not permitted because enclosing ${ctx.owner.enclosingClass.showLocated}
+ | is not a subclass of ${owner.showLocated} where target is defined""")
else if (
!( isType // allow accesses to types from arbitrary subclasses fixes #4737
|| pre.baseTypeRef(cls).exists // ??? why not use derivesFrom ???
@@ -680,9 +698,9 @@ object SymDenotations {
|| (owner is ModuleClass) // don't perform this check for static members
))
fail(
- s""" Access to protected ${symbol.show} not permitted because
- | prefix type ${pre.widen.show} does not conform to
- | ${cls.showLocated} where the access takes place""".stripMargin)
+ i"""
+ | Access to protected ${symbol.show} not permitted because prefix type ${pre.widen.show}
+ | does not conform to ${cls.showLocated} where the access takes place""")
else true
}
@@ -744,6 +762,11 @@ object SymDenotations {
// def isOverridable: Boolean = !!! need to enforce that classes cannot be redefined
def isSkolem: Boolean = name == nme.SKOLEM
+ def isInlineMethod(implicit ctx: Context): Boolean =
+ is(Method, butNot = Accessor) &&
+ !isCompleting && // don't force method type; recursive inlines are ignored anyway.
+ hasAnnotation(defn.InlineAnnot)
+
// ------ access to related symbols ---------------------------------
/* Modules and module classes are represented as follows:
@@ -845,9 +868,13 @@ object SymDenotations {
enclClass(symbol, false)
}
+ /** A class that in source code would be lexically enclosing */
+ final def lexicallyEnclosingClass(implicit ctx: Context): Symbol =
+ if (!exists || isClass) symbol else owner.lexicallyEnclosingClass
+
/** A symbol is effectively final if it cannot be overridden in a subclass */
final def isEffectivelyFinal(implicit ctx: Context): Boolean =
- is(PrivateOrFinal) || !owner.isClass || owner.is(ModuleOrFinal) || owner.isAnonymousClass
+ is(PrivateOrFinalOrInline) || !owner.isClass || owner.is(ModuleOrFinal) || owner.isAnonymousClass
/** The class containing this denotation which has the given effective name. */
final def enclosingClassNamed(name: Name)(implicit ctx: Context): Symbol = {
@@ -869,7 +896,7 @@ object SymDenotations {
*/
final def topLevelClass(implicit ctx: Context): Symbol = {
def topLevel(d: SymDenotation): Symbol = {
- if ((d is PackageClass) || (d.owner is PackageClass)) d.symbol
+ if (d.isEffectiveRoot || (d is PackageClass) || (d.owner is PackageClass)) d.symbol
else topLevel(d.owner)
}
val sym = topLevel(this)
@@ -1002,7 +1029,7 @@ object SymDenotations {
if (!canMatchInheritedSymbols) Iterator.empty
else overriddenFromType(owner.info)
- /** Returns all all matching symbols defined in parents of the selftype. */
+ /** Returns all matching symbols defined in parents of the selftype. */
final def extendedOverriddenSymbols(implicit ctx: Context): Iterator[Symbol] =
if (!canMatchInheritedSymbols) Iterator.empty
else overriddenFromType(owner.asClass.classInfo.selfType)
@@ -1069,6 +1096,9 @@ object SymDenotations {
/** The type parameters of a class symbol, Nil for all other symbols */
def typeParams(implicit ctx: Context): List[TypeSymbol] = Nil
+ /** The named type parameters declared or inherited by this symbol */
+ def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = Set()
+
/** The type This(cls), where cls is this class, NoPrefix for all other symbols */
def thisType(implicit ctx: Context): Type = NoPrefix
@@ -1115,13 +1145,15 @@ object SymDenotations {
def debugString = toString + "#" + symbol.id // !!! DEBUG
- def hasSkolems(tp: Type): Boolean = tp match {
+ def hasSkolems(tp: Type): Boolean = tp match {
case tp: SkolemType => true
case tp: NamedType => hasSkolems(tp.prefix)
case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo)
+ case tp: RecType => hasSkolems(tp.parent)
case tp: PolyType => tp.paramBounds.exists(hasSkolems) || hasSkolems(tp.resType)
case tp: MethodType => tp.paramTypes.exists(hasSkolems) || hasSkolems(tp.resType)
case tp: ExprType => hasSkolems(tp.resType)
+ case tp: HKApply => hasSkolems(tp.tycon) || tp.args.exists(hasSkolems)
case tp: AndOrType => hasSkolems(tp.tp1) || hasSkolems(tp.tp2)
case tp: TypeBounds => hasSkolems(tp.lo) || hasSkolems(tp.hi)
case tp: AnnotatedType => hasSkolems(tp.tpe)
@@ -1156,6 +1188,8 @@ object SymDenotations {
d
}
+ override def initial: SymDenotation = super.initial.asSymDenotation
+
/** Install this denotation as the result of the given denotation transformer. */
override def installAfter(phase: DenotTransformer)(implicit ctx: Context): Unit =
super.installAfter(phase)
@@ -1202,18 +1236,43 @@ object SymDenotations {
/** TODO: Document why caches are supposedly safe to use */
private[this] var myTypeParams: List[TypeSymbol] = _
+ private[this] var myNamedTypeParams: Set[TypeSymbol] = _
+
+ /** The type parameters in this class, in the order they appear in the current
+ * scope `decls`. This might be temporarily the incorrect order when
+ * reading Scala2 pickled info. The problem is fixed by `updateTypeParams`
+ * which is called once an unpickled symbol has been completed.
+ */
+ private def typeParamsFromDecls(implicit ctx: Context) =
+ unforcedDecls.filter(sym =>
+ (sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]]
+
/** The type parameters of this class */
override final def typeParams(implicit ctx: Context): List[TypeSymbol] = {
- def computeTypeParams = {
- if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls
- else if (this ne initial) initial.asSymDenotation.typeParams
- else unforcedDecls.filter(sym =>
- (sym is TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]]
- }
- if (myTypeParams == null) myTypeParams = computeTypeParams
+ if (myTypeParams == null)
+ myTypeParams =
+ if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls
+ else {
+ val di = initial
+ if (this ne di) di.typeParams
+ else infoOrCompleter match {
+ case info: TypeParamsCompleter => info.completerTypeParams(symbol)
+ case _ => typeParamsFromDecls
+ }
+ }
myTypeParams
}
+ /** The named type parameters declared or inherited by this class */
+ override final def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = {
+ def computeNamedTypeParams: Set[TypeSymbol] =
+ if (ctx.erasedTypes || is(Module)) Set() // fast return for modules to avoid scanning package decls
+ else memberNames(abstractTypeNameFilter).map(name =>
+ info.member(name).symbol.asType).filter(_.is(TypeParam, butNot = ExpandedName)).toSet
+ if (myNamedTypeParams == null) myNamedTypeParams = computeNamedTypeParams
+ myNamedTypeParams
+ }
+
override protected[dotc] final def info_=(tp: Type) = {
super.info_=(tp)
myTypeParams = null // changing the info might change decls, and with it typeParams
@@ -1485,7 +1544,15 @@ object SymDenotations {
/** Enter a symbol in given `scope` without potentially replacing the old copy. */
def enterNoReplace(sym: Symbol, scope: MutableScope)(implicit ctx: Context): Unit = {
- require((sym.denot.flagsUNSAFE is Private) || !(this is Frozen) || (scope ne this.unforcedDecls))
+ def isUsecase = ctx.docCtx.isDefined && sym.name.show.takeRight(4) == "$doc"
+ require(
+ (sym.denot.flagsUNSAFE is Private) ||
+ !(this is Frozen) ||
+ (scope ne this.unforcedDecls) ||
+ sym.hasAnnotation(defn.ScalaStaticAnnot) ||
+ sym.name.isInlineAccessor ||
+ isUsecase)
+
scope.enter(sym)
if (myMemberFingerPrint != FingerPrint.unknown)
@@ -1516,19 +1583,20 @@ object SymDenotations {
if (myMemberCache != null) myMemberCache invalidate sym.name
}
- /** Make sure the type parameters of this class are `tparams`, reorder definitions
- * in scope if necessary.
- * @pre All type parameters in `tparams` are entered in class scope `info.decls`.
+ /** Make sure the type parameters of this class appear in the order given
+ * by `typeParams` in the scope of the class. Reorder definitions in scope if necessary.
*/
- def updateTypeParams(tparams: List[Symbol])(implicit ctx: Context): Unit =
- if (!typeParams.corresponds(tparams)(_.name == _.name)) {
+ def ensureTypeParamsInCorrectOrder()(implicit ctx: Context): Unit = {
+ val tparams = typeParams
+ if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(tparams)(_.name == _.name)) {
val decls = info.decls
val decls1 = newScope
- for (tparam <- tparams) decls1.enter(decls.lookup(tparam.name))
- for (sym <- decls) if (!typeParams.contains(sym)) decls1.enter(sym)
+ for (tparam <- typeParams) decls1.enter(decls.lookup(tparam.name))
+ for (sym <- decls) if (!tparams.contains(sym)) decls1.enter(sym)
info = classInfo.derivedClassInfo(decls = decls1)
myTypeParams = null
}
+ }
/** All members of this class that have the given name.
* The elements of the returned pre-denotation all
@@ -1615,6 +1683,7 @@ object SymDenotations {
*/
def isCachable(tp: Type): Boolean = tp match {
case _: TypeErasure.ErasedValueType => false
+ case tp: TypeRef if tp.symbol.isClass => true
case tp: TypeVar => tp.inst.exists && inCache(tp.inst)
case tp: TypeProxy => inCache(tp.underlying)
case tp: AndOrType => inCache(tp.tp1) && inCache(tp.tp2)
@@ -1635,10 +1704,10 @@ object SymDenotations {
if (cdenot.superClassBits contains symbol.superId) foldGlb(NoType, tp.parents)
else NoType
case _ =>
- baseTypeRefOf(tp.underlying)
+ baseTypeRefOf(tp.superType)
}
case tp: TypeProxy =>
- baseTypeRefOf(tp.underlying)
+ baseTypeRefOf(tp.superType)
case AndType(tp1, tp2) =>
baseTypeRefOf(tp1) & baseTypeRefOf(tp2)
case OrType(tp1, tp2) =>
@@ -1798,6 +1867,7 @@ object SymDenotations {
override def isType = false
override def owner: Symbol = throw new AssertionError("NoDenotation.owner")
override def computeAsSeenFrom(pre: Type)(implicit ctx: Context): SingleDenotation = this
+ override def mapInfo(f: Type => Type)(implicit ctx: Context): SingleDenotation = this
validFor = Period.allInRun(NoRunId) // will be brought forward automatically
}
@@ -1845,9 +1915,9 @@ object SymDenotations {
/** A subclass of LazyTypes where type parameters can be completed independently of
* the info.
*/
- abstract class TypeParamsCompleter extends LazyType {
+ trait TypeParamsCompleter extends LazyType {
/** The type parameters computed by the completer before completion has finished */
- def completerTypeParams(sym: Symbol): List[TypeSymbol]
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol]
}
val NoSymbolFn = (ctx: Context) => NoSymbol
@@ -1900,10 +1970,10 @@ object SymDenotations {
else ("", "the signature")
val name = ctx.fresh.setSetting(ctx.settings.debugNames, true).nameString(denot.name)
ctx.error(
- s"""|bad symbolic reference. A signature$location
- |refers to $name in ${denot.owner.showKind} ${denot.owner.showFullName} which is not available.
- |It may be completely missing from the current classpath, or the version on
- |the classpath might be incompatible with the version used when compiling $src.""".stripMargin)
+ i"""bad symbolic reference. A signature$location
+ |refers to $name in ${denot.owner.showKind} ${denot.owner.showFullName} which is not available.
+ |It may be completely missing from the current classpath, or the version on
+ |the classpath might be incompatible with the version used when compiling $src.""")
if (ctx.debug) throw new Error()
initializeToDefaults(denot)
}
diff --git a/src/dotty/tools/dotc/core/SymbolLoaders.scala b/src/dotty/tools/dotc/core/SymbolLoaders.scala
index a62a88dfb..4ae28c10b 100644
--- a/src/dotty/tools/dotc/core/SymbolLoaders.scala
+++ b/src/dotty/tools/dotc/core/SymbolLoaders.scala
@@ -70,8 +70,8 @@ class SymbolLoaders {
// require yjp.jar at runtime. See SI-2089.
if (ctx.settings.termConflict.isDefault)
throw new TypeError(
- sm"""$owner contains object and package with same name: $pname
- |one of them needs to be removed from classpath""")
+ i"""$owner contains object and package with same name: $pname
+ |one of them needs to be removed from classpath""")
else if (ctx.settings.termConflict.value == "package") {
ctx.warning(
s"Resolving package/object name conflict in favor of package ${preExisting.fullName}. The object will be inaccessible.")
@@ -198,7 +198,7 @@ abstract class SymbolLoader extends LazyType {
try {
val start = currentTime
if (ctx.settings.debugTrace.value)
- ctx.traceIndented(s">>>> loading ${root.debugString}", _ => s"<<<< loaded ${root.debugString}") {
+ ctx.doTraceIndented(s">>>> loading ${root.debugString}", _ => s"<<<< loaded ${root.debugString}") {
doComplete(root)
}
else
diff --git a/src/dotty/tools/dotc/core/Symbols.scala b/src/dotty/tools/dotc/core/Symbols.scala
index 2a76f18d8..38b2c8bd6 100644
--- a/src/dotty/tools/dotc/core/Symbols.scala
+++ b/src/dotty/tools/dotc/core/Symbols.scala
@@ -21,6 +21,7 @@ import StdNames._
import NameOps._
import ast.tpd.Tree
import ast.TreeTypeMap
+import Constants.Constant
import Denotations.{ Denotation, SingleDenotation, MultiDenotation }
import collection.mutable
import io.AbstractFile
@@ -360,11 +361,13 @@ trait Symbols { this: Context =>
object Symbols {
+ implicit def eqSymbol: Eq[Symbol, Symbol] = Eq
+
/** A Symbol represents a Scala definition/declaration or a package.
* @param coord The coordinates of the symbol (a position or an index)
* @param id A unique identifier of the symbol (unique per ContextBase)
*/
- class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with printing.Showable {
+ class Symbol private[Symbols] (val coord: Coord, val id: Int) extends DotClass with TypeParamInfo with printing.Showable {
type ThisName <: Name
@@ -397,10 +400,10 @@ object Symbols {
/** Subclass tests and casts */
final def isTerm(implicit ctx: Context): Boolean =
- (if(isDefinedInCurrentRun) lastDenot else denot).isTerm
+ (if (defRunId == ctx.runId) lastDenot else denot).isTerm
final def isType(implicit ctx: Context): Boolean =
- (if(isDefinedInCurrentRun) lastDenot else denot).isType
+ (if (defRunId == ctx.runId) lastDenot else denot).isType
final def isClass: Boolean = isInstanceOf[ClassSymbol]
@@ -463,26 +466,38 @@ object Symbols {
denot.topLevelClass.symbol.associatedFile
/** The class file from which this class was generated, null if not applicable. */
- final def binaryFile(implicit ctx: Context): AbstractFile =
- pickFile(associatedFile, classFile = true)
+ final def binaryFile(implicit ctx: Context): AbstractFile = {
+ val file = associatedFile
+ if (file != null && file.path.endsWith("class")) file else null
+ }
/** The source file from which this class was generated, null if not applicable. */
- final def sourceFile(implicit ctx: Context): AbstractFile =
- pickFile(associatedFile, classFile = false)
-
- /** Desire to re-use the field in ClassSymbol which stores the source
- * file to also store the classfile, but without changing the behavior
- * of sourceFile (which is expected at least in the IDE only to
- * return actual source code.) So sourceFile has classfiles filtered out.
- */
- private def pickFile(file: AbstractFile, classFile: Boolean): AbstractFile =
- if ((file eq null) || classFile != (file.path endsWith ".class")) null else file
+ final def sourceFile(implicit ctx: Context): AbstractFile = {
+ val file = associatedFile
+ if (file != null && !file.path.endsWith("class")) file
+ else denot.topLevelClass.getAnnotation(defn.SourceFileAnnot) match {
+ case Some(sourceAnnot) => sourceAnnot.argumentConstant(0) match {
+ case Some(Constant(path: String)) => AbstractFile.getFile(path)
+ case none => null
+ }
+ case none => null
+ }
+ }
/** The position of this symbol, or NoPosition is symbol was not loaded
* from source.
*/
def pos: Position = if (coord.isPosition) coord.toPosition else NoPosition
+ // TypeParamInfo methods
+ def isTypeParam(implicit ctx: Context) = denot.is(TypeParam)
+ def paramName(implicit ctx: Context) = name.asTypeName
+ def paramBounds(implicit ctx: Context) = denot.info.bounds
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context) = pre.memberInfo(this).bounds
+ def paramBoundsOrCompleter(implicit ctx: Context): Type = denot.infoOrCompleter
+ def paramVariance(implicit ctx: Context) = denot.variance
+ def paramRef(implicit ctx: Context) = denot.typeRef
+
// -------- Printing --------------------------------------------------------
/** The prefix string to be used when displaying this symbol without denotation */
@@ -495,6 +510,7 @@ object Symbols {
def toText(printer: Printer): Text = printer.toText(this)
def showLocated(implicit ctx: Context): String = ctx.locatedText(this).show
+ def showExtendedLocation(implicit ctx: Context): String = ctx.extendedLocationText(this).show
def showDcl(implicit ctx: Context): String = ctx.dclText(this).show
def showKind(implicit ctx: Context): String = ctx.kindString(this)
def showName(implicit ctx: Context): String = ctx.nameString(this)
@@ -513,7 +529,7 @@ object Symbols {
/** The source or class file from which this class was generated, null if not applicable. */
override def associatedFile(implicit ctx: Context): AbstractFile =
- if (assocFile != null || (this.owner is PackageClass)) assocFile
+ if (assocFile != null || (this.owner is PackageClass) || this.isEffectiveRoot) assocFile
else super.associatedFile
final def classDenot(implicit ctx: Context): ClassDenotation =
diff --git a/src/dotty/tools/dotc/core/TypeApplications.scala b/src/dotty/tools/dotc/core/TypeApplications.scala
index 8f8a7dbdd..8aaf77032 100644
--- a/src/dotty/tools/dotc/core/TypeApplications.scala
+++ b/src/dotty/tools/dotc/core/TypeApplications.scala
@@ -4,7 +4,7 @@ package core
import Types._
import Contexts._
import Symbols._
-import SymDenotations.TypeParamsCompleter
+import SymDenotations.{LazyType, TypeParamsCompleter}
import Decorators._
import util.Stats._
import util.common._
@@ -12,10 +12,10 @@ import Names._
import NameOps._
import Flags._
import StdNames.tpnme
-import typer.Mode
import util.Positions.Position
-import config.Printers._
+import config.Printers.core
import collection.mutable
+import dotty.tools.dotc.config.Config
import java.util.NoSuchElementException
object TypeApplications {
@@ -38,56 +38,23 @@ object TypeApplications {
case _ => tp
}
- /** Does the variance of `sym1` conform to the variance of `sym2`?
+ /** Does variance `v1` conform to variance `v2`?
* This is the case if the variances are the same or `sym` is nonvariant.
*/
- def varianceConforms(sym1: TypeSymbol, sym2: TypeSymbol)(implicit ctx: Context) =
- sym1.variance == sym2.variance || sym2.variance == 0
+ def varianceConforms(v1: Int, v2: Int): Boolean =
+ v1 == v2 || v2 == 0
- def variancesConform(syms1: List[TypeSymbol], syms2: List[TypeSymbol])(implicit ctx: Context) =
- syms1.corresponds(syms2)(varianceConforms)
-
- /** Extractor for
- *
- * [v1 X1: B1, ..., vn Xn: Bn] -> T
- * ==>
- * Lambda$_v1...vn { type $hk_i: B_i, type $Apply = [X_i := this.$Arg_i] T }
+ /** Does the variance of type parameter `tparam1` conform to the variance of type parameter `tparam2`?
*/
- object TypeLambda {
- def apply(variances: List[Int],
- argBoundsFns: List[RefinedType => TypeBounds],
- bodyFn: RefinedType => Type)(implicit ctx: Context): Type = {
- def argRefinements(parent: Type, i: Int, bs: List[RefinedType => TypeBounds]): Type = bs match {
- case b :: bs1 =>
- argRefinements(RefinedType(parent, tpnme.hkArg(i), b), i + 1, bs1)
- case nil =>
- parent
- }
- assert(variances.nonEmpty)
- assert(argBoundsFns.length == variances.length)
- RefinedType(
- argRefinements(defn.LambdaTrait(variances).typeRef, 0, argBoundsFns),
- tpnme.hkApply, bodyFn(_).bounds.withVariance(1))
- }
+ def varianceConforms(tparam1: TypeParamInfo, tparam2: TypeParamInfo)(implicit ctx: Context): Boolean =
+ varianceConforms(tparam1.paramVariance, tparam2.paramVariance)
- def unapply(tp: Type)(implicit ctx: Context): Option[(List[Int], List[TypeBounds], Type)] = tp match {
- case app @ RefinedType(parent, tpnme.hkApply) =>
- val cls = parent.typeSymbol
- val variances = cls.typeParams.map(_.variance)
- def collectBounds(t: Type, acc: List[TypeBounds]): List[TypeBounds] = t match {
- case t @ RefinedType(p, rname) =>
- assert(rname.isHkArgName)
- collectBounds(p, t.refinedInfo.bounds :: acc)
- case TypeRef(_, lname) =>
- assert(lname.isLambdaTraitName)
- acc
- }
- val argBounds = collectBounds(parent, Nil)
- Some((variances, argBounds, app.refinedInfo.argInfo))
- case _ =>
- None
- }
- }
+ /** Do the variances of type parameters `tparams1` conform to the variances
+ * of corresponding type parameters `tparams2`?
+ * This is only the case of `tparams1` and `tparams2` have the same length.
+ */
+ def variancesConform(tparams1: List[TypeParamInfo], tparams2: List[TypeParamInfo])(implicit ctx: Context): Boolean =
+ tparams1.corresponds(tparams2)(varianceConforms)
/** Extractor for
*
@@ -99,25 +66,14 @@ object TypeApplications {
* @param tycon C
*/
object EtaExpansion {
- def apply(tycon: TypeRef)(implicit ctx: Context) = {
- assert(tycon.isEtaExpandable)
- tycon.EtaExpand(tycon.typeParams)
+ def apply(tycon: Type)(implicit ctx: Context) = {
+ assert(tycon.typeParams.nonEmpty, tycon)
+ tycon.EtaExpand(tycon.typeParamSymbols)
}
- def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = {
- def argsAreForwarders(args: List[Type], n: Int): Boolean = args match {
- case Nil =>
- n == 0
- case TypeRef(RefinedThis(rt), sel) :: args1 =>
- rt.eq(tp) && sel == tpnme.hkArg(n - 1) && argsAreForwarders(args1, n - 1)
- case _ =>
- false
- }
- tp match {
- case TypeLambda(_, argBounds, AppliedType(fn: TypeRef, args))
- if argsAreForwarders(args, tp.typeParams.length) => Some(fn)
- case _ => None
- }
+ def unapply(tp: Type)(implicit ctx: Context): Option[TypeRef] = tp match {
+ case tp @ PolyType(tparams, AppliedType(fn: TypeRef, args)) if (args == tparams.map(_.toArg)) => Some(fn)
+ case _ => None
}
}
@@ -125,94 +81,112 @@ object TypeApplications {
*
* T { type p_1 v_1= U_1; ...; type p_n v_n= U_n }
*
- * where v_i, p_i are the variances and names of the type parameters of T,
- * If `T`'s class symbol is a lambda trait, follow the refined type with a
- * projection
- *
- * T { ... } # $Apply
+ * where v_i, p_i are the variances and names of the type parameters of T.
*/
object AppliedType {
def apply(tp: Type, args: List[Type])(implicit ctx: Context): Type = tp.appliedTo(args)
def unapply(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match {
- case TypeRef(prefix, tpnme.hkApply) => unapp(prefix)
- case _ =>
- unapp(tp) match {
- case Some((tycon: TypeRef, _)) if tycon.symbol.isLambdaTrait =>
- // We are seeing part of a lambda abstraction, not an applied type
- None
- case x => x
+ case tp: RefinedType =>
+ var refinements: List[RefinedType] = Nil
+ var tycon = tp.stripTypeVar
+ while (tycon.isInstanceOf[RefinedType]) {
+ val rt = tycon.asInstanceOf[RefinedType]
+ refinements = rt :: refinements
+ tycon = rt.parent.stripTypeVar
}
- }
-
- private def unapp(tp: Type)(implicit ctx: Context): Option[(Type, List[Type])] = tp match {
- case _: RefinedType =>
- val tparams = tp.classSymbol.typeParams
- if (tparams.isEmpty) None
- else {
- val argBuf = new mutable.ListBuffer[Type]
- def stripArgs(tp: Type, n: Int): Type =
- if (n == 0) tp
- else tp match {
- case tp @ RefinedType(parent, pname) if pname == tparams(n - 1).name =>
- val res = stripArgs(parent, n - 1)
- if (res.exists) argBuf += tp.refinedInfo.argInfo
- res
- case _ =>
- NoType
- }
- val res = stripArgs(tp, tparams.length)
- if (res.exists) Some((res, argBuf.toList)) else None
+ def collectArgs(tparams: List[TypeParamInfo],
+ refinements: List[RefinedType],
+ argBuf: mutable.ListBuffer[Type]): Option[(Type, List[Type])] = refinements match {
+ case Nil if tparams.isEmpty && argBuf.nonEmpty =>
+ Some((tycon, argBuf.toList))
+ case RefinedType(_, rname, rinfo) :: refinements1
+ if tparams.nonEmpty && rname == tparams.head.paramName =>
+ collectArgs(tparams.tail, refinements1, argBuf += rinfo.argInfo)
+ case _ =>
+ None
}
- case _ => None
+ collectArgs(tycon.typeParams, refinements, new mutable.ListBuffer[Type])
+ case HKApply(tycon, args) =>
+ Some((tycon, args))
+ case _ =>
+ None
}
}
/** Adapt all arguments to possible higher-kinded type parameters using etaExpandIfHK
*/
- def etaExpandIfHK(tparams: List[Symbol], args: List[Type])(implicit ctx: Context): List[Type] =
+ def EtaExpandIfHK(tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): List[Type] =
if (tparams.isEmpty) args
- else args.zipWithConserve(tparams)((arg, tparam) => arg.etaExpandIfHK(tparam.infoOrCompleter))
-
- /** The references `<rt>.this.$hk0, ..., <rt>.this.$hk<n-1>`. */
- def argRefs(rt: RefinedType, n: Int)(implicit ctx: Context) =
- List.range(0, n).map(i => RefinedThis(rt).select(tpnme.hkArg(i)))
+ else args.zipWithConserve(tparams)((arg, tparam) => arg.EtaExpandIfHK(tparam.paramBoundsOrCompleter))
- /** Merge `tp1` and `tp2` under a common lambda, combining them with `op`.
- * @param tparams1 The type parameters of `tp1`
- * @param tparams2 The type parameters of `tp2`
- * @pre tparams1.length == tparams2.length
- * Produces the type lambda
+ /** A type map that tries to reduce (part of) the result type of the type lambda `tycon`
+ * with the given `args`(some of which are wildcard arguments represented by type bounds).
+ * Non-wildcard arguments are substituted everywhere as usual. A wildcard argument
+ * `>: L <: H` is substituted for a type lambda parameter `X` only under certain conditions.
*
- * [v1 X1 B1, ..., vn Xn Bn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn])
+ * 1. If Mode.AllowLambdaWildcardApply is set:
+ * The wildcard argument is substituted only if `X` appears in a toplevel refinement of the form
*
- * where
+ * { type A = X }
*
- * - variances `vi` are the variances of corresponding type parameters for `tp1`
- * or `tp2`, or are 0 of the latter disagree.
- * - bounds `Bi` are the intersection of the corresponding type parameter bounds
- * of `tp1` and `tp2`.
+ * and there are no other occurrences of `X` in the reduced type. In that case
+ * the refinement above is replaced by
+ *
+ * { type A >: L <: U }
+ *
+ * The `allReplaced` field indicates whether all occurrences of type lambda parameters
+ * in the reduced type have been replaced with arguments.
+ *
+ * 2. If Mode.AllowLambdaWildcardApply is not set:
+ * All refinements of the form
+ *
+ * { type A = X }
+ *
+ * are replaced by:
+ *
+ * { type A >: L <: U }
+ *
+ * Any other occurrence of `X` in `tycon` is replaced by `U`, if the
+ * occurrence of `X` in `tycon` is covariant, or nonvariant, or by `L`,
+ * if the occurrence is contravariant.
+ *
+ * The idea is that the `AllowLambdaWildcardApply` mode is used to check whether
+ * a type can be soundly reduced, and to give an error or warning if that
+ * is not the case. By contrast, the default mode, with `AllowLambdaWildcardApply`
+ * not set, reduces all applications even if this yields a different type, so
+ * its postcondition is that no type parameters of `tycon` appear in the
+ * result type. Using this mode, we can guarantee that `appliedTo` will never
+ * produce a higher-kinded application with a type lambda as type constructor.
*/
- def hkCombine(tp1: Type, tp2: Type,
- tparams1: List[TypeSymbol], tparams2: List[TypeSymbol], op: (Type, Type) => Type)
- (implicit ctx: Context): Type = {
- val variances = (tparams1, tparams2).zipped.map { (tparam1, tparam2) =>
- val v1 = tparam1.variance
- val v2 = tparam2.variance
- if (v1 == v2) v1 else 0
+ class Reducer(tycon: PolyType, args: List[Type])(implicit ctx: Context) extends TypeMap {
+ private var available = (0 until args.length).toSet
+ var allReplaced = true
+ def hasWildcardArg(p: PolyParam) =
+ p.binder == tycon && args(p.paramNum).isInstanceOf[TypeBounds]
+ def canReduceWildcard(p: PolyParam) =
+ !ctx.mode.is(Mode.AllowLambdaWildcardApply) || available.contains(p.paramNum)
+ def apply(t: Type) = t match {
+ case t @ TypeAlias(p: PolyParam) if hasWildcardArg(p) && canReduceWildcard(p) =>
+ available -= p.paramNum
+ args(p.paramNum)
+ case p: PolyParam if p.binder == tycon =>
+ args(p.paramNum) match {
+ case TypeBounds(lo, hi) =>
+ if (ctx.mode.is(Mode.AllowLambdaWildcardApply)) { allReplaced = false; p }
+ else if (variance < 0) lo
+ else hi
+ case arg =>
+ arg
+ }
+ case _: TypeBounds | _: HKApply =>
+ val saved = available
+ available = Set()
+ try mapOver(t)
+ finally available = saved
+ case _ =>
+ mapOver(t)
}
- val bounds: List[RefinedType => TypeBounds] =
- (tparams1, tparams2).zipped.map { (tparam1, tparam2) =>
- val b1: RefinedType => TypeBounds =
- tp1.memberInfo(tparam1).bounds.internalizeFrom(tparams1)
- val b2: RefinedType => TypeBounds =
- tp2.memberInfo(tparam2).bounds.internalizeFrom(tparams2)
- (rt: RefinedType) => b1(rt) & b2(rt)
- }
- val app1: RefinedType => Type = rt => tp1.appliedTo(argRefs(rt, tparams1.length))
- val app2: RefinedType => Type = rt => tp2.appliedTo(argRefs(rt, tparams2.length))
- val body: RefinedType => Type = rt => op(app1(rt), app2(rt))
- TypeLambda(variances, bounds, body)
}
}
@@ -234,136 +208,152 @@ class TypeApplications(val self: Type) extends AnyVal {
* with the bounds on its hk args. See `LambdaAbstract`, where these
* types get introduced, and see `isBoundedLambda` below for the test.
*/
- final def typeParams(implicit ctx: Context): List[TypeSymbol] = /*>|>*/ track("typeParams") /*<|<*/ {
+ final def typeParams(implicit ctx: Context): List[TypeParamInfo] = /*>|>*/ track("typeParams") /*<|<*/ {
self match {
case self: ClassInfo =>
self.cls.typeParams
+ case self: PolyType =>
+ self.typeParams
case self: TypeRef =>
val tsym = self.symbol
if (tsym.isClass) tsym.typeParams
- else tsym.infoOrCompleter match {
- case completer: TypeParamsCompleter =>
- val tparams = completer.completerTypeParams(tsym)
- defn.LambdaTrait(tparams.map(_.variance)).typeParams
- case _ =>
- if (!tsym.isCompleting || tsym.isAliasType) tsym.info.typeParams
- else
- // We are facing a problem when computing the type parameters of an uncompleted
- // abstract type. We can't access the bounds of the symbol yet because that
- // would cause a cause a cyclic reference. So we return `Nil` instead
- // and try to make up for it later. The acrobatics in Scala2Unpicker#readType
- // for reading a TypeRef show what's needed.
- Nil
- }
+ else if (!tsym.isCompleting) tsym.info.typeParams
+ else Nil
case self: RefinedType =>
- // inlined and optimized version of
- // val sym = self.LambdaTrait
- // if (sym.exists) return sym.typeParams
- if (self.refinedName == tpnme.hkApply) {
- val sym = self.parent.classSymbol
- if (sym.isLambdaTrait) return sym.typeParams
- }
- self.parent.typeParams.filterNot(_.name == self.refinedName)
- case self: SingletonType =>
+ self.parent.typeParams.filterNot(_.paramName == self.refinedName)
+ case self: RecType =>
+ self.parent.typeParams
+ case _: SingletonType =>
Nil
+ case self: WildcardType =>
+ self.optBounds.typeParams
case self: TypeProxy =>
- self.underlying.typeParams
+ self.superType.typeParams
case _ =>
Nil
}
}
- /** The Lambda trait underlying a type lambda */
- def LambdaTrait(implicit ctx: Context): Symbol = self.stripTypeVar match {
- case RefinedType(parent, tpnme.hkApply) =>
- val sym = self.classSymbol
- if (sym.isLambdaTrait) sym else NoSymbol
- case TypeBounds(lo, hi) => hi.LambdaTrait
- case _ => NoSymbol
- }
+ /** If `self` is a higher-kinded type, its type parameters, otherwise Nil */
+ final def hkTypeParams(implicit ctx: Context): List[TypeParamInfo] =
+ if (isHK) typeParams else Nil
- /** Is receiver type higher-kinded (i.e. of kind != "*")? */
- def isHK(implicit ctx: Context): Boolean = self.dealias match {
- case self: TypeRef => self.info.isHK
- case RefinedType(_, name) => name == tpnme.hkApply
- case TypeBounds(_, hi) => hi.isHK
- case _ => false
- }
-
- /** is receiver of the form T#$Apply? */
- def isHKApply: Boolean = self match {
- case TypeRef(_, name) => name == tpnme.hkApply
- case _ => false
+ /** If `self` is a generic class, its type parameter symbols, otherwise Nil */
+ final def typeParamSymbols(implicit ctx: Context): List[TypeSymbol] = typeParams match {
+ case (_: Symbol) :: _ =>
+ assert(typeParams.forall(_.isInstanceOf[Symbol]))
+ typeParams.asInstanceOf[List[TypeSymbol]]
+ case _ => Nil
}
- /** True if it can be determined without forcing that the class symbol
- * of this application exists and is not a lambda trait.
- * Equivalent to
- *
- * self.classSymbol.exists && !self.classSymbol.isLambdaTrait
- *
- * but without forcing anything.
+ /** The named type parameters declared or inherited by this type.
+ * These are all uninstantiated named type parameters of this type or one
+ * of its base types.
*/
- def classNotLambda(implicit ctx: Context): Boolean = self.stripTypeVar match {
+ final def namedTypeParams(implicit ctx: Context): Set[TypeSymbol] = self match {
+ case self: ClassInfo =>
+ self.cls.namedTypeParams
case self: RefinedType =>
- self.parent.classNotLambda
- case self: TypeRef =>
- self.denot.exists && {
- val sym = self.symbol
- if (sym.isClass) !sym.isLambdaTrait
- else sym.isCompleted && self.info.isAlias && self.info.bounds.hi.classNotLambda
- }
+ self.parent.namedTypeParams.filterNot(_.name == self.refinedName)
+ case self: SingletonType =>
+ Set()
+ case self: TypeProxy =>
+ self.underlying.namedTypeParams
case _ =>
- false
+ Set()
}
- /** Replace references to type parameters with references to hk arguments `this.$hk_i`
- * Care is needed not to cause cyclic reference errors, hence `SafeSubstMap`.
+ /** The smallest supertype of this type that instantiated none of the named type parameters
+ * in `params`. That is, for each named type parameter `p` in `params`, either there is
+ * no type field named `p` in this type, or `p` is a named type parameter of this type.
+ * The first case is important for the recursive case of AndTypes, because some of their operands might
+ * be missing the named parameter altogether, but the AndType as a whole can still
+ * contain it.
*/
- private[TypeApplications] def internalizeFrom[T <: Type](tparams: List[Symbol])(implicit ctx: Context): RefinedType => T =
- (rt: RefinedType) =>
- new ctx.SafeSubstMap(tparams , argRefs(rt, tparams.length))
- .apply(self).asInstanceOf[T]
+ final def widenToNamedTypeParams(params: Set[TypeSymbol])(implicit ctx: Context): Type = {
+
+ /** Is widening not needed for `tp`? */
+ def isOK(tp: Type) = {
+ val ownParams = tp.namedTypeParams
+ def isMissingOrOpen(param: TypeSymbol) = {
+ val ownParam = tp.nonPrivateMember(param.name).symbol
+ !ownParam.exists || ownParams.contains(ownParam.asType)
+ }
+ params.forall(isMissingOrOpen)
+ }
+
+ /** Widen type by forming the intersection of its widened parents */
+ def widenToParents(tp: Type) = {
+ val parents = tp.parents.map(p =>
+ tp.baseTypeWithArgs(p.symbol).widenToNamedTypeParams(params))
+ parents.reduceLeft(ctx.typeComparer.andType(_, _))
+ }
+
+ if (isOK(self)) self
+ else self match {
+ case self @ AppliedType(tycon, args) if !isOK(tycon) =>
+ widenToParents(self)
+ case self: TypeRef if self.symbol.isClass =>
+ widenToParents(self)
+ case self: RefinedType =>
+ val parent1 = self.parent.widenToNamedTypeParams(params)
+ if (params.exists(_.name == self.refinedName)) parent1
+ else self.derivedRefinedType(parent1, self.refinedName, self.refinedInfo)
+ case self: TypeProxy =>
+ self.superType.widenToNamedTypeParams(params)
+ case self: AndOrType =>
+ self.derivedAndOrType(
+ self.tp1.widenToNamedTypeParams(params), self.tp2.widenToNamedTypeParams(params))
+ }
+ }
+
+ /** Is self type higher-kinded (i.e. of kind != "*")? */
+ def isHK(implicit ctx: Context): Boolean = self.dealias match {
+ case self: TypeRef => self.info.isHK
+ case self: RefinedType => false
+ case self: PolyType => true
+ case self: SingletonType => false
+ case self: TypeVar =>
+ // Using `origin` instead of `underlying`, as is done for typeParams,
+ // avoids having to set ephemeral in some cases.
+ self.origin.isHK
+ case self: WildcardType => self.optBounds.isHK
+ case self: TypeProxy => self.superType.isHK
+ case _ => false
+ }
+
+ /** Dealias type if it can be done without forcing the TypeRef's info */
+ def safeDealias(implicit ctx: Context): Type = self match {
+ case self: TypeRef if self.denot.exists && self.symbol.isAliasType =>
+ self.superType.stripTypeVar.safeDealias
+ case _ =>
+ self
+ }
/** Lambda abstract `self` with given type parameters. Examples:
*
* type T[X] = U becomes type T = [X] -> U
- * type T[X] >: L <: U becomes type T >: L <: ([X] -> _ <: U)
+ * type T[X] >: L <: U becomes type T >: L <: ([X] -> U)
+ *
+ * TODO: Handle parameterized lower bounds
*/
- def LambdaAbstract(tparams: List[Symbol])(implicit ctx: Context): Type = {
-
- /** Replace references to type parameters with references to hk arguments `this.$hk_i`
- * Care is needed not to cause cycles, hence `SafeSubstMap`.
- */
- def internalize[T <: Type](tp: T) =
- (rt: RefinedType) =>
- new ctx.SafeSubstMap(tparams, argRefs(rt, tparams.length))
- .apply(tp).asInstanceOf[T]
-
- def expand(tp: Type) = {
- TypeLambda(
- tparams.map(_.variance),
- tparams.map(tparam => internalize(self.memberInfo(tparam).bounds)),
- internalize(tp))
- }
+ def LambdaAbstract(tparams: List[TypeParamInfo])(implicit ctx: Context): Type = {
+ def expand(tp: Type) =
+ PolyType(
+ tpnme.syntheticLambdaParamNames(tparams.length), tparams.map(_.paramVariance))(
+ tl => tparams.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds),
+ tl => tl.lifted(tparams, tp))
self match {
case self: TypeAlias =>
self.derivedTypeAlias(expand(self.alias))
case self @ TypeBounds(lo, hi) =>
- self.derivedTypeBounds(lo, expand(TypeBounds.upper(hi)))
+ self.derivedTypeBounds(
+ if (lo.isRef(defn.NothingClass)) lo else expand(lo),
+ expand(hi))
case _ => expand(self)
}
}
- /** A type ref is eta expandable if it refers to a non-lambda class.
- * In that case we can look for parameterized base types of the type
- * to eta expand them.
- */
- def isEtaExpandable(implicit ctx: Context) = self match {
- case self: TypeRef => self.symbol.isClass && !self.name.isLambdaTraitName
- case _ => false
- }
-
/** Convert a type constructor `TC` which has type parameters `T1, ..., Tn`
* in a context where type parameters `U1,...,Un` are expected to
*
@@ -375,23 +365,18 @@ class TypeApplications(val self: Type) extends AnyVal {
* v1 is compatible with v2, if v1 = v2 or v2 is non-variant.
*/
def EtaExpand(tparams: List[TypeSymbol])(implicit ctx: Context): Type = {
- val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParams
+ val tparamsToUse = if (variancesConform(typeParams, tparams)) tparams else typeParamSymbols
self.appliedTo(tparams map (_.typeRef)).LambdaAbstract(tparamsToUse)
//.ensuring(res => res.EtaReduce =:= self, s"res = $res, core = ${res.EtaReduce}, self = $self, hc = ${res.hashCode}")
}
- /** Eta expand the prefix in front of any refinements. */
- def EtaExpandCore(implicit ctx: Context): Type = self.stripTypeVar match {
- case self: RefinedType =>
- self.derivedRefinedType(self.parent.EtaExpandCore, self.refinedName, self.refinedInfo)
- case _ =>
- self.EtaExpand(self.typeParams)
- }
+ /** If self is not higher-kinded, eta expand it. */
+ def ensureHK(implicit ctx: Context): Type =
+ if (isHK) self else EtaExpansion(self)
/** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */
- def etaExpandIfHK(bound: Type)(implicit ctx: Context): Type = {
- val boundLambda = bound.LambdaTrait
- val hkParams = boundLambda.typeParams
+ def EtaExpandIfHK(bound: Type)(implicit ctx: Context): Type = {
+ val hkParams = bound.hkTypeParams
if (hkParams.isEmpty) self
else self match {
case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length =>
@@ -432,21 +417,21 @@ class TypeApplications(val self: Type) extends AnyVal {
* is covariant is irrelevant, so can be ignored.
*/
def adaptHkVariances(bound: Type)(implicit ctx: Context): Type = {
- val boundLambda = bound.LambdaTrait
- val hkParams = boundLambda.typeParams
+ val hkParams = bound.hkTypeParams
if (hkParams.isEmpty) self
else {
def adaptArg(arg: Type): Type = arg match {
- case arg: TypeRef if arg.symbol.isLambdaTrait &&
- !arg.symbol.typeParams.corresponds(hkParams)(_.variance == _.variance) &&
- arg.symbol.typeParams.corresponds(hkParams)(varianceConforms) =>
- arg.prefix.select(boundLambda)
- case arg: RefinedType =>
- arg.derivedRefinedType(adaptArg(arg.parent), arg.refinedName, arg.refinedInfo)
+ case arg @ PolyType(tparams, body) if
+ !tparams.corresponds(hkParams)(_.paramVariance == _.paramVariance) &&
+ tparams.corresponds(hkParams)(varianceConforms) =>
+ PolyType(tparams.map(_.paramName), hkParams.map(_.paramVariance))(
+ tl => arg.paramBounds.map(_.subst(arg, tl).bounds),
+ tl => arg.resultType.subst(arg, tl)
+ )
case arg @ TypeAlias(alias) =>
arg.derivedTypeAlias(adaptArg(alias))
case arg @ TypeBounds(lo, hi) =>
- arg.derivedTypeBounds(lo, adaptArg(hi))
+ arg.derivedTypeBounds(adaptArg(lo), adaptArg(hi))
case _ =>
arg
}
@@ -454,99 +439,106 @@ class TypeApplications(val self: Type) extends AnyVal {
}
}
- /** Encode
+ /** The type representing
*
* T[U1, ..., Un]
*
* where
* @param self = `T`
* @param args = `U1,...,Un`
- * performing the following simplifications
- *
- * 1. If `T` is an eta expansion `[X1,..,Xn] -> C[X1,...,Xn]` of class `C` compute
- * `C[U1, ..., Un]` instead.
- * 2. If `T` is some other type lambda `[X1,...,Xn] -> S` none of the arguments
- * `U1,...,Un` is a wildcard, compute `[X1:=U1, ..., Xn:=Un]S` instead.
- * 3. If `T` is a polytype, instantiate it to `U1,...,Un`.
*/
final def appliedTo(args: List[Type])(implicit ctx: Context): Type = /*>|>*/ track("appliedTo") /*<|<*/ {
- def substHkArgs = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case TypeRef(RefinedThis(rt), name) if rt.eq(self) && name.isHkArgName =>
- args(name.hkArgIndex)
- case _ =>
- mapOver(tp)
- }
- }
- if (args.isEmpty || ctx.erasedTypes) self
- else self.stripTypeVar match {
- case EtaExpansion(self1) =>
- self1.appliedTo(args)
- case TypeLambda(_, _, body) if !args.exists(_.isInstanceOf[TypeBounds]) =>
- substHkArgs(body)
- case self: PolyType =>
- self.instantiate(args)
- case _ =>
- appliedTo(args, typeParams)
- }
- }
-
- /** Encode application `T[U1, ..., Un]` without simplifications, where
- * @param self = `T`
- * @param args = `U1, ..., Un`
- * @param tparams are assumed to be the type parameters of `T`.
- */
- final def appliedTo(args: List[Type], typParams: List[TypeSymbol])(implicit ctx: Context): Type = {
- def matchParams(t: Type, tparams: List[TypeSymbol], args: List[Type])(implicit ctx: Context): Type = args match {
+ val typParams = self.typeParams
+ def matchParams(t: Type, tparams: List[TypeParamInfo], args: List[Type])(implicit ctx: Context): Type = args match {
case arg :: args1 =>
try {
val tparam :: tparams1 = tparams
- matchParams(RefinedType(t, tparam.name, arg.toBounds(tparam)), tparams1, args1)
+ matchParams(RefinedType(t, tparam.paramName, arg.toBounds(tparam)), tparams1, args1)
} catch {
case ex: MatchError =>
- println(s"applied type mismatch: $self $args, typeParams = $typParams") // !!! DEBUG
+ println(s"applied type mismatch: $self with underlying ${self.underlyingIfProxy}, args = $args, typeParams = $typParams") // !!! DEBUG
//println(s"precomplete decls = ${self.typeSymbol.unforcedDecls.toList.map(_.denot).mkString("\n ")}")
throw ex
}
case nil => t
}
- assert(args.nonEmpty)
- matchParams(self, typParams, args) match {
- case refined @ RefinedType(_, pname) if pname.isHkArgName =>
- TypeRef(refined, tpnme.hkApply)
- case refined =>
- refined
+ val stripped = self.stripTypeVar
+ val dealiased = stripped.safeDealias
+ if (args.isEmpty || ctx.erasedTypes) self
+ else dealiased match {
+ case dealiased: PolyType =>
+ def tryReduce =
+ if (!args.exists(_.isInstanceOf[TypeBounds])) {
+ val followAlias = Config.simplifyApplications && {
+ dealiased.resType match {
+ case AppliedType(tyconBody, _) =>
+ variancesConform(typParams, tyconBody.typeParams)
+ // Reducing is safe for type inference, as kind of type constructor does not change
+ case _ => false
+ }
+ }
+ if ((dealiased eq stripped) || followAlias) dealiased.instantiate(args)
+ else HKApply(self, args)
+ }
+ else dealiased.resType match {
+ case AppliedType(tycon, args1) if tycon.safeDealias ne tycon =>
+ // In this case we should always dealias since we cannot handle
+ // higher-kinded applications to wildcard arguments.
+ dealiased
+ .derivedPolyType(resType = tycon.safeDealias.appliedTo(args1))
+ .appliedTo(args)
+ case _ =>
+ val reducer = new Reducer(dealiased, args)
+ val reduced = reducer(dealiased.resType)
+ if (reducer.allReplaced) reduced
+ else HKApply(dealiased, args)
+ }
+ tryReduce
+ case dealiased: AndOrType =>
+ dealiased.derivedAndOrType(dealiased.tp1.appliedTo(args), dealiased.tp2.appliedTo(args))
+ case dealiased: TypeAlias =>
+ dealiased.derivedTypeAlias(dealiased.alias.appliedTo(args))
+ case dealiased: TypeBounds =>
+ dealiased.derivedTypeBounds(dealiased.lo.appliedTo(args), dealiased.hi.appliedTo(args))
+ case dealiased: LazyRef =>
+ LazyRef(() => dealiased.ref.appliedTo(args))
+ case dealiased: WildcardType =>
+ dealiased
+ case dealiased: TypeRef if dealiased.symbol == defn.NothingClass =>
+ dealiased
+ case _ if typParams.isEmpty || typParams.head.isInstanceOf[LambdaParam] =>
+ HKApply(self, args)
+ case dealiased =>
+ matchParams(dealiased, typParams, args)
}
}
final def appliedTo(arg: Type)(implicit ctx: Context): Type = appliedTo(arg :: Nil)
final def appliedTo(arg1: Type, arg2: Type)(implicit ctx: Context): Type = appliedTo(arg1 :: arg2 :: Nil)
+ final def applyIfParameterized(args: List[Type])(implicit ctx: Context): Type =
+ if (typeParams.nonEmpty) appliedTo(args) else self
+
/** A cycle-safe version of `appliedTo` where computing type parameters do not force
* the typeconstructor. Instead, if the type constructor is completing, we make
* up hk type parameters matching the arguments. This is needed when unpickling
* Scala2 files such as `scala.collection.generic.Mapfactory`.
*/
- final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = {
- val safeTypeParams = self match {
- case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting =>
- // This happens when unpickling e.g. scala$collection$generic$GenMapFactory$$CC
- ctx.warning(i"encountered F-bounded higher-kinded type parameters for ${self.symbol}; assuming they are invariant")
- defn.LambdaTrait(args map alwaysZero).typeParams
- case _ =>
- typeParams
- }
- appliedTo(args, safeTypeParams)
+ final def safeAppliedTo(args: List[Type])(implicit ctx: Context) = self match {
+ case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting =>
+ HKApply(self, args)
+ case _ =>
+ appliedTo(args)
}
/** Turn this type, which is used as an argument for
* type parameter `tparam`, into a TypeBounds RHS
*/
- final def toBounds(tparam: Symbol)(implicit ctx: Context): TypeBounds = self match {
+ final def toBounds(tparam: TypeParamInfo)(implicit ctx: Context): TypeBounds = self match {
case self: TypeBounds => // this can happen for wildcard args
self
case _ =>
- val v = tparam.variance
+ val v = tparam.paramVariance
/* Not neeeded.
if (v > 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.upper(self)
else if (v < 0 && !(tparam is Local) && !(tparam is ExpandedTypeParam)) TypeBounds.lower(self)
@@ -560,7 +552,11 @@ class TypeApplications(val self: Type) extends AnyVal {
*/
final def baseArgInfos(base: Symbol)(implicit ctx: Context): List[Type] =
if (self derivesFrom base)
- base.typeParams map (param => self.member(param.name).info.argInfo)
+ self.dealias match {
+ case self: TypeRef if !self.symbol.isClass => self.superType.baseArgInfos(base)
+ case self: HKApply => self.superType.baseArgInfos(base)
+ case _ => base.typeParams.map(param => self.member(param.name).info.argInfo)
+ }
else
Nil
@@ -582,14 +578,6 @@ class TypeApplications(val self: Type) extends AnyVal {
final def baseArgTypesHi(base: Symbol)(implicit ctx: Context): List[Type] =
baseArgInfos(base) mapConserve boundsToHi
- /** The first type argument of the base type instance wrt `base` of this type */
- final def firstBaseArgInfo(base: Symbol)(implicit ctx: Context): Type = base.typeParams match {
- case param :: _ if self derivesFrom base =>
- self.member(param.name).info.argInfo
- case _ =>
- NoType
- }
-
/** The base type including all type arguments and applicable refinements
* of this type. Refinements are applicable if they refine a member of
* the parent type which furthermore is not a name-mangled type parameter.
@@ -603,10 +591,12 @@ class TypeApplications(val self: Type) extends AnyVal {
case TypeBounds(_, hi) => hi.baseTypeWithArgs(base)
case _ => default
}
- case tp @ RefinedType(parent, name) if !tp.member(name).symbol.is(ExpandedTypeParam) =>
+ case tp @ RefinedType(parent, name, _) if !tp.member(name).symbol.is(ExpandedTypeParam) =>
tp.wrapIfMember(parent.baseTypeWithArgs(base))
case tp: TermRef =>
tp.underlying.baseTypeWithArgs(base)
+ case tp: HKApply =>
+ tp.superType.baseTypeWithArgs(base)
case AndType(tp1, tp2) =>
tp1.baseTypeWithArgs(base) & tp2.baseTypeWithArgs(base)
case OrType(tp1, tp2) =>
@@ -661,17 +651,16 @@ class TypeApplications(val self: Type) extends AnyVal {
/** The core type without any type arguments.
* @param `typeArgs` must be the type arguments of this type.
*/
- final def withoutArgs(typeArgs: List[Type]): Type = typeArgs match {
- case _ :: typeArgs1 =>
- val RefinedType(tycon, _) = self
- tycon.withoutArgs(typeArgs1)
- case nil =>
- self
- }
-
- final def typeConstructor(implicit ctx: Context): Type = self.stripTypeVar match {
- case AppliedType(tycon, _) => tycon
- case self => self
+ final def withoutArgs(typeArgs: List[Type]): Type = self match {
+ case HKApply(tycon, args) => tycon
+ case _ =>
+ typeArgs match {
+ case _ :: typeArgs1 =>
+ val RefinedType(tycon, _, _) = self
+ tycon.withoutArgs(typeArgs1)
+ case nil =>
+ self
+ }
}
/** If this is the image of a type argument; recover the type argument,
@@ -693,33 +682,6 @@ class TypeApplications(val self: Type) extends AnyVal {
def elemType(implicit ctx: Context): Type = self match {
case defn.ArrayOf(elemtp) => elemtp
case JavaArrayType(elemtp) => elemtp
- case _ => firstBaseArgInfo(defn.SeqClass)
- }
-
- /** Does this type contain RefinedThis type with `target` as its underling
- * refinement type?
- */
- def containsRefinedThis(target: Type)(implicit ctx: Context): Boolean = {
- def recur(tp: Type): Boolean = tp.stripTypeVar match {
- case RefinedThis(tp) =>
- tp eq target
- case tp: NamedType =>
- if (tp.symbol.isClass) !tp.symbol.isStatic && recur(tp.prefix)
- else tp.info match {
- case TypeAlias(alias) => recur(alias)
- case _ => recur(tp.prefix)
- }
- case tp: RefinedType =>
- recur(tp.refinedInfo) || recur(tp.parent)
- case tp: TypeBounds =>
- recur(tp.lo) || recur(tp.hi)
- case tp: AnnotatedType =>
- recur(tp.underlying)
- case tp: AndOrType =>
- recur(tp.tp1) || recur(tp.tp2)
- case _ =>
- false
- }
- recur(self)
+ case _ => baseArgInfos(defn.SeqClass).headOption.getOrElse(NoType)
}
}
diff --git a/src/dotty/tools/dotc/core/TypeComparer.scala b/src/dotty/tools/dotc/core/TypeComparer.scala
index 4e7a4a75d..b495f00d0 100644
--- a/src/dotty/tools/dotc/core/TypeComparer.scala
+++ b/src/dotty/tools/dotc/core/TypeComparer.scala
@@ -3,14 +3,12 @@ package dotc
package core
import Types._, Contexts._, Symbols._, Flags._, Names._, NameOps._, Denotations._
-import typer.Mode
import Decorators._
import StdNames.{nme, tpnme}
import collection.mutable
-import printing.Disambiguation.disambiguated
import util.{Stats, DotClass, SimpleMap}
import config.Config
-import config.Printers._
+import config.Printers.{typr, constr, subtyping, noPrinter}
import TypeErasure.{erasedLub, erasedGlb}
import TypeApplications._
import scala.util.control.NonFatal
@@ -78,6 +76,19 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
myNothingType
}
+ /** Indicates whether a previous subtype check used GADT bounds */
+ var GADTused = false
+
+ /** Record that GADT bounds of `sym` were used in a subtype check.
+ * But exclude constructor type parameters, as these are aliased
+ * to the corresponding class parameters, which does not constitute
+ * a true usage of a GADT symbol.
+ */
+ private def GADTusage(sym: Symbol) = {
+ if (!sym.owner.isConstructor) GADTused = true
+ true
+ }
+
// Subtype testing `<:<`
def topLevelSubType(tp1: Type, tp2: Type): Boolean = {
@@ -89,7 +100,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
assert(isSatisfiable, constraint.show)
}
- protected def isSubType(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSubType ${traceInfo(tp1, tp2)}", subtyping) /*<|<*/ {
+ protected def isSubType(tp1: Type, tp2: Type): Boolean = ctx.traceIndented(s"isSubType ${traceInfo(tp1, tp2)}", subtyping) {
if (tp2 eq NoType) false
else if (tp1 eq tp2) true
else {
@@ -166,7 +177,13 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
// However the original judgment should be true.
case _ =>
}
- val sym1 = tp1.symbol
+ val sym1 =
+ if (tp1.symbol.is(ModuleClass) && tp2.symbol.is(ModuleVal))
+ // For convenience we want X$ <:< X.type
+ // This is safe because X$ self-type is X.type
+ tp1.symbol.companionModule
+ else
+ tp1.symbol
if ((sym1 ne NoSymbol) && (sym1 eq tp2.symbol))
ctx.erasedTypes ||
sym1.isStaticOwner ||
@@ -179,11 +196,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
&& !tp1.isInstanceOf[WithFixedSym]
&& !tp2.isInstanceOf[WithFixedSym]
) ||
- compareHkApply(tp1, tp2, inOrder = true) ||
- compareHkApply(tp2, tp1, inOrder = false) ||
thirdTryNamed(tp1, tp2)
case _ =>
- compareHkApply(tp2, tp1, inOrder = false) ||
secondTry(tp1, tp2)
}
}
@@ -238,10 +252,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case OrType(tp21, tp22) =>
if (tp21.stripTypeVar eq tp22.stripTypeVar) isSubType(tp1, tp21)
else secondTry(tp1, tp2)
- case TypeErasure.ErasedValueType(cls2, underlying2) =>
+ case TypeErasure.ErasedValueType(tycon1, underlying2) =>
def compareErasedValueType = tp1 match {
- case TypeErasure.ErasedValueType(cls1, underlying1) =>
- (cls1 eq cls2) && isSameType(underlying1, underlying2)
+ case TypeErasure.ErasedValueType(tycon2, underlying1) =>
+ (tycon1.symbol eq tycon2.symbol) && isSameType(underlying1, underlying2)
case _ =>
secondTry(tp1, tp2)
}
@@ -260,7 +274,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
if (tp1.prefix.isStable) return false
case _ =>
}
- compareHkApply(tp1, tp2, inOrder = true) ||
thirdTry(tp1, tp2)
case tp1: PolyParam =>
def flagNothingBound = {
@@ -311,8 +324,18 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case AndType(tp11, tp12) =>
if (tp11.stripTypeVar eq tp12.stripTypeVar) isSubType(tp11, tp2)
else thirdTry(tp1, tp2)
- case OrType(tp11, tp12) =>
- isSubType(tp11, tp2) && isSubType(tp12, tp2)
+ case tp1 @ OrType(tp11, tp12) =>
+ def joinOK = tp2.dealias match {
+ case tp12: HKApply =>
+ // If we apply the default algorithm for `A[X] | B[Y] <: C[Z]` where `C` is a
+ // type parameter, we will instantiate `C` to `A` and then fail when comparing
+ // with `B[Y]`. To do the right thing, we need to instantiate `C` to the
+ // common superclass of `A` and `B`.
+ isSubType(tp1.join, tp2)
+ case _ =>
+ false
+ }
+ joinOK || isSubType(tp11, tp2) && isSubType(tp12, tp2)
case ErrorType =>
true
case _ =>
@@ -325,7 +348,8 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
val gbounds2 = ctx.gadt.bounds(tp2.symbol)
(gbounds2 != null) &&
(isSubTypeWhenFrozen(tp1, gbounds2.lo) ||
- narrowGADTBounds(tp2, tp1, isUpper = false))
+ narrowGADTBounds(tp2, tp1, isUpper = false)) &&
+ GADTusage(tp2.symbol)
}
((frozenConstraint || !isCappable(tp1)) && isSubType(tp1, lo2) ||
compareGADT ||
@@ -369,16 +393,64 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
// This twist is needed to make collection/generic/ParFactory.scala compile
fourthTry(tp1, tp2) || compareRefinedSlow
case _ =>
- compareRefinedSlow ||
- fourthTry(tp1, tp2) ||
- compareHkLambda(tp2, tp1, inOrder = false) ||
- compareAliasedRefined(tp2, tp1, inOrder = false)
+ compareRefinedSlow || fourthTry(tp1, tp2)
}
else // fast path, in particular for refinements resulting from parameterization.
- isSubType(tp1, skipped2) &&
- isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2)
+ isSubRefinements(tp1w.asInstanceOf[RefinedType], tp2, skipped2) &&
+ isSubType(tp1, skipped2)
}
compareRefined
+ case tp2: RecType =>
+ def compareRec = tp1.safeDealias match {
+ case tp1: RecType =>
+ val rthis1 = RecThis(tp1)
+ isSubType(tp1.parent, tp2.parent.substRecThis(tp2, rthis1))
+ case _ =>
+ val tp1stable = ensureStableSingleton(tp1)
+ isSubType(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable))
+ }
+ compareRec
+ case tp2 @ HKApply(tycon2, args2) =>
+ compareHkApply2(tp1, tp2, tycon2, args2)
+ case tp2 @ PolyType(tparams2, body2) =>
+ def compareHkLambda: Boolean = tp1.stripTypeVar match {
+ case tp1 @ PolyType(tparams1, body1) =>
+ /* Don't compare bounds of lambdas under language:Scala2, or t2994 will fail
+ * The issue is that, logically, bounds should compare contravariantly,
+ * but that would invalidate a pattern exploited in t2994:
+ *
+ * [X0 <: Number] -> Number <:< [X0] -> Any
+ *
+ * Under the new scheme, `[X0] -> Any` is NOT a kind that subsumes
+ * all other bounds. You'd have to write `[X0 >: Any <: Nothing] -> Any` instead.
+ * This might look weird, but is the only logically correct way to do it.
+ *
+ * Note: it would be nice if this could trigger a migration warning, but I
+ * am not sure how, since the code is buried so deep in subtyping logic.
+ */
+ def boundsOK =
+ ctx.scala2Mode ||
+ tparams1.corresponds(tparams2)((tparam1, tparam2) =>
+ isSubType(tparam2.paramBounds.subst(tp2, tp1), tparam1.paramBounds))
+ val saved = comparedPolyTypes
+ comparedPolyTypes += tp1
+ comparedPolyTypes += tp2
+ try
+ variancesConform(tparams1, tparams2) &&
+ boundsOK &&
+ isSubType(body1, body2.subst(tp2, tp1))
+ finally comparedPolyTypes = saved
+ case _ =>
+ if (!tp1.isHK) {
+ tp2 match {
+ case EtaExpansion(tycon2) if tycon2.symbol.isClass =>
+ return isSubType(tp1, tycon2)
+ case _ =>
+ }
+ }
+ fourthTry(tp1, tp2)
+ }
+ compareHkLambda
case OrType(tp21, tp22) =>
// Rewrite T1 <: (T211 & T212) | T22 to T1 <: (T211 | T22) and T1 <: (T212 | T22)
// and analogously for T1 <: T21 | (T221 & T222)
@@ -395,11 +467,11 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
return isSubType(tp1, OrType(tp21, tp221)) && isSubType(tp1, OrType(tp21, tp222))
case _ =>
}
- eitherIsSubType(tp1, tp21, tp1, tp22) || fourthTry(tp1, tp2)
+ either(isSubType(tp1, tp21), isSubType(tp1, tp22)) || fourthTry(tp1, tp2)
case tp2 @ MethodType(_, formals2) =>
def compareMethod = tp1 match {
case tp1 @ MethodType(_, formals1) =>
- (tp1.signature sameParams tp2.signature) &&
+ (tp1.signature consistentParams tp2.signature) &&
matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
tp1.isImplicit == tp2.isImplicit && // needed?
isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1))
@@ -407,16 +479,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
false
}
compareMethod
- case tp2: PolyType =>
- def comparePoly = tp1 match {
- case tp1: PolyType =>
- (tp1.signature sameParams tp2.signature) &&
- matchingTypeParams(tp1, tp2) &&
- isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1))
- case _ =>
- false
- }
- comparePoly
case tp2 @ ExprType(restpe2) =>
def compareExpr = tp1 match {
// We allow ()T to be a subtype of => T.
@@ -460,41 +522,42 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
val gbounds1 = ctx.gadt.bounds(tp1.symbol)
(gbounds1 != null) &&
(isSubTypeWhenFrozen(gbounds1.hi, tp2) ||
- narrowGADTBounds(tp1, tp2, isUpper = true))
+ narrowGADTBounds(tp1, tp2, isUpper = true)) &&
+ GADTusage(tp1.symbol)
}
isSubType(hi1, tp2) || compareGADT
case _ =>
def isNullable(tp: Type): Boolean = tp.dealias match {
case tp: TypeRef => tp.symbol.isNullableClass
- case RefinedType(parent, _) => isNullable(parent)
+ case tp: RefinedOrRecType => isNullable(tp.parent)
case AndType(tp1, tp2) => isNullable(tp1) && isNullable(tp2)
case OrType(tp1, tp2) => isNullable(tp1) || isNullable(tp2)
case _ => false
}
- (tp1.symbol eq NothingClass) && tp2.isInstanceOf[ValueType] ||
+ (tp1.symbol eq NothingClass) && tp2.isValueTypeOrLambda ||
(tp1.symbol eq NullClass) && isNullable(tp2)
}
case tp1: SingletonType =>
/** if `tp2 == p.type` and `p: q.type` then try `tp1 <:< q.type` as a last effort.*/
def comparePaths = tp2 match {
case tp2: TermRef =>
- tp2.info match {
- case tp2i: TermRef =>
- isSubType(tp1, tp2i)
- case ExprType(tp2i: TermRef) if (ctx.phase.id > ctx.gettersPhase.id) =>
- // After getters, val x: T becomes def x: T
- isSubType(tp1, tp2i)
- case _ =>
- false
+ tp2.info.widenExpr match {
+ case tp2i: SingletonType =>
+ isSubType(tp1, tp2i) // see z1720.scala for a case where this can arise even in typer.
+ case _ => false
}
case _ =>
false
}
isNewSubType(tp1.underlying.widenExpr, tp2) || comparePaths
case tp1: RefinedType =>
- isNewSubType(tp1.parent, tp2) ||
- compareHkLambda(tp1, tp2, inOrder = true) ||
- compareAliasedRefined(tp1, tp2, inOrder = true)
+ isNewSubType(tp1.parent, tp2)
+ case tp1: RecType =>
+ isNewSubType(tp1.parent, tp2)
+ case tp1 @ HKApply(tycon1, args1) =>
+ compareHkApply1(tp1, tycon1, args1, tp2)
+ case EtaExpansion(tycon1) =>
+ isSubType(tycon1, tp2)
case AndType(tp11, tp12) =>
// Rewrite (T111 | T112) & T12 <: T2 to (T111 & T12) <: T2 and (T112 | T12) <: T2
// and analogously for T11 & (T121 | T122) & T12 <: T2
@@ -511,7 +574,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
return isSubType(AndType(tp11, tp121), tp2) && isSubType(AndType(tp11, tp122), tp2)
case _ =>
}
- eitherIsSubType(tp11, tp2, tp12, tp2)
+ either(isSubType(tp11, tp2), isSubType(tp12, tp2))
case JavaArrayType(elem1) =>
def compareJavaArray = tp2 match {
case JavaArrayType(elem2) => isSubType(elem1, elem2)
@@ -525,13 +588,179 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
false
}
+ /** Subtype test for the hk application `tp2 = tycon2[args2]`.
+ */
+ def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = {
+ val tparams = tycon2.typeParams
+ if (tparams.isEmpty) return false // can happen for ill-typed programs, e.g. neg/tcpoly_overloaded.scala
+
+ /** True if `tp1` and `tp2` have compatible type constructors and their
+ * corresponding arguments are subtypes relative to their variance (see `isSubArgs`).
+ */
+ def isMatchingApply(tp1: Type): Boolean = tp1 match {
+ case HKApply(tycon1, args1) =>
+ tycon1.dealias match {
+ case tycon1: PolyParam =>
+ (tycon1 == tycon2 ||
+ canConstrain(tycon1) && tryInstantiate(tycon1, tycon2)) &&
+ isSubArgs(args1, args2, tparams)
+ case tycon1: TypeRef =>
+ tycon2.dealias match {
+ case tycon2: TypeRef if tycon1.symbol == tycon2.symbol =>
+ isSubType(tycon1.prefix, tycon2.prefix) &&
+ isSubArgs(args1, args2, tparams)
+ case _ =>
+ false
+ }
+ case tycon1: TypeVar =>
+ isMatchingApply(tycon1.underlying)
+ case tycon1: AnnotatedType =>
+ isMatchingApply(tycon1.underlying)
+ case _ =>
+ false
+ }
+ case _ =>
+ false
+ }
+
+ /** `param2` can be instantiated to a type application prefix of the LHS
+ * or to a type application prefix of one of the LHS base class instances
+ * and the resulting type application is a supertype of `tp1`,
+ * or fallback to fourthTry.
+ */
+ def canInstantiate(tycon2: PolyParam): Boolean = {
+
+ /** Let
+ *
+ * `tparams_1, ..., tparams_k-1` be the type parameters of the rhs
+ * `tparams1_1, ..., tparams1_n-1` be the type parameters of the constructor of the lhs
+ * `args1_1, ..., args1_n-1` be the type arguments of the lhs
+ * `d = n - k`
+ *
+ * Returns `true` iff `d >= 0` and `tycon2` can be instantiated to
+ *
+ * [tparams1_d, ... tparams1_n-1] -> tycon1a[args_1, ..., args_d-1, tparams_d, ... tparams_n-1]
+ *
+ * such that the resulting type application is a supertype of `tp1`.
+ */
+ def tyconOK(tycon1a: Type, args1: List[Type]) = {
+ var tycon1b = tycon1a
+ val tparams1a = tycon1a.typeParams
+ val lengthDiff = tparams1a.length - tparams.length
+ lengthDiff >= 0 && {
+ val tparams1 = tparams1a.drop(lengthDiff)
+ variancesConform(tparams1, tparams) && {
+ if (lengthDiff > 0)
+ tycon1b = PolyType(tparams1.map(_.paramName), tparams1.map(_.paramVariance))(
+ tl => tparams1.map(tparam => tl.lifted(tparams, tparam.paramBounds).bounds),
+ tl => tycon1a.appliedTo(args1.take(lengthDiff) ++
+ tparams1.indices.toList.map(PolyParam(tl, _))))
+ (ctx.mode.is(Mode.TypevarsMissContext) ||
+ tryInstantiate(tycon2, tycon1b.ensureHK)) &&
+ isSubType(tp1, tycon1b.appliedTo(args2))
+ }
+ }
+ }
+
+ tp1.widen match {
+ case tp1w @ HKApply(tycon1, args1) =>
+ tyconOK(tycon1, args1)
+ case tp1w =>
+ tp1w.typeSymbol.isClass && {
+ val classBounds = tycon2.classSymbols
+ def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match {
+ case bc :: bcs1 =>
+ classBounds.exists(bc.derivesFrom) &&
+ tyconOK(tp1w.baseTypeRef(bc), tp1w.baseArgInfos(bc)) ||
+ liftToBase(bcs1)
+ case _ =>
+ false
+ }
+ liftToBase(tp1w.baseClasses)
+ } ||
+ fourthTry(tp1, tp2)
+ }
+ }
+
+ /** Fall back to comparing either with `fourthTry` or against the lower
+ * approximation of the rhs.
+ * @param tyconLo The type constructor's lower approximation.
+ */
+ def fallback(tyconLo: Type) =
+ either(fourthTry(tp1, tp2), isSubType(tp1, tyconLo.applyIfParameterized(args2)))
+
+ /** Let `tycon2bounds` be the bounds of the RHS type constructor `tycon2`.
+ * Let `app2 = tp2` where the type constructor of `tp2` is replaced by
+ * `tycon2bounds.lo`.
+ * If both bounds are the same, continue with `tp1 <:< app2`.
+ * otherwise continue with either
+ *
+ * tp1 <:< tp2 using fourthTry (this might instantiate params in tp1)
+ * tp1 <:< app2 using isSubType (this might instantiate params in tp2)
+ */
+ def compareLower(tycon2bounds: TypeBounds, tyconIsTypeRef: Boolean): Boolean =
+ if (tycon2bounds.lo eq tycon2bounds.hi)
+ isSubType(tp1,
+ if (tyconIsTypeRef) tp2.superType
+ else tycon2bounds.lo.applyIfParameterized(args2))
+ else
+ fallback(tycon2bounds.lo)
+
+ tycon2 match {
+ case param2: PolyParam =>
+ isMatchingApply(tp1) || {
+ if (canConstrain(param2)) canInstantiate(param2)
+ else compareLower(bounds(param2), tyconIsTypeRef = false)
+ }
+ case tycon2: TypeRef =>
+ isMatchingApply(tp1) ||
+ compareLower(tycon2.info.bounds, tyconIsTypeRef = true)
+ case _: TypeVar | _: AnnotatedType =>
+ isSubType(tp1, tp2.superType)
+ case tycon2: HKApply =>
+ fallback(tycon2.lowerBound)
+ case _ =>
+ false
+ }
+ }
+
+ /** Subtype test for the hk application `tp1 = tycon1[args1]`.
+ */
+ def compareHkApply1(tp1: HKApply, tycon1: Type, args1: List[Type], tp2: Type): Boolean =
+ tycon1 match {
+ case param1: PolyParam =>
+ def canInstantiate = tp2 match {
+ case AppliedType(tycon2, args2) =>
+ tryInstantiate(param1, tycon2.ensureHK) && isSubArgs(args1, args2, tycon2.typeParams)
+ case _ =>
+ false
+ }
+ canConstrain(param1) && canInstantiate ||
+ isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2)
+ case tycon1: TypeProxy =>
+ isSubType(tp1.superType, tp2)
+ case _ =>
+ false
+ }
+
+ /** Subtype test for corresponding arguments in `args1`, `args2` according to
+ * variances in type parameters `tparams`.
+ */
+ def isSubArgs(args1: List[Type], args2: List[Type], tparams: List[TypeParamInfo]): Boolean =
+ if (args1.isEmpty) args2.isEmpty
+ else args2.nonEmpty && {
+ val v = tparams.head.paramVariance
+ (v > 0 || isSubType(args2.head, args1.head)) &&
+ (v < 0 || isSubType(args1.head, args2.head))
+ } && isSubArgs(args1.tail, args2.tail, tparams)
+
/** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where
* - `B` derives from one of the class symbols of `tp2`,
* - the type parameters of `B` match one-by-one the variances of `tparams`,
* - `B` satisfies predicate `p`.
*/
- private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeSymbol], p: Type => Boolean): Boolean = {
- val classBounds = tp2.member(tpnme.hkApply).info.classSymbols
+ private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeParamInfo], p: Type => Boolean): Boolean = {
+ val classBounds = tp2.classSymbols
def recur(bcs: List[ClassSymbol]): Boolean = bcs match {
case bc :: bcs1 =>
val baseRef = tp1.baseTypeRef(bc)
@@ -546,108 +775,28 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
recur(tp1.baseClasses)
}
- /** If `projection` is a hk projection T#$apply with a constrainable poly param
- * as type constructor and `other` is not a hk projection, then perform the following
- * steps:
- *
- * (1) If not `inOrder` then perform the next steps until they all succeed
- * for each base type of other which
- * - derives from a class bound of `projection`,
- * - has the same number of type parameters than `projection`
- * - has type parameter variances which conform to those of `projection`.
- * If `inOrder` then perform the same steps on the original `other` type.
- *
- * (2) Try to eta expand the constructor of `other`.
- *
- * (3a) In mode `TypevarsMissConetxt` replace the projection's hk constructor parameter
- * by the eta expansion of step (2) reapplied to the projection's arguments.
- * (3b) In normal mode, try to unify the projection's hk constructor parameter with
- * the eta expansion of step(2)
- *
- * (4) If `inOrder`, test `projection <: other` else test `other <: projection`.
+ /** Replace any top-level recursive type `{ z => T }` in `tp` with
+ * `[z := anchor]T`.
*/
- def compareHkApply(projection: NamedType, other: Type, inOrder: Boolean): Boolean = {
- def tryInfer(tp: Type): Boolean = ctx.traceIndented(i"compareHK($projection, $other, inOrder = $inOrder, constr = $tp)", subtyping) {
- tp match {
- case tp: TypeVar => tryInfer(tp.underlying)
- case param: PolyParam if canConstrain(param) =>
-
- def unifyWith(liftedOther: Type): Boolean = {
- subtyping.println(i"unify with $liftedOther")
- liftedOther.typeConstructor.widen match {
- case tycon: TypeRef if tycon.isEtaExpandable && tycon.typeParams.nonEmpty =>
- val (ok, projection1) =
- if (ctx.mode.is(Mode.TypevarsMissContext))
- (true, EtaExpansion(tycon).appliedTo(projection.argInfos))
- else
- (tryInstantiate(param, EtaExpansion(tycon)), projection)
- ok &&
- (if (inOrder) isSubType(projection1, other) else isSubType(other, projection1))
- case _ =>
- false
- }
- }
- val hkTypeParams = param.typeParams
- subtyping.println(i"classBounds = ${projection.prefix.member(tpnme.hkApply).info.classSymbols}")
- subtyping.println(i"base classes = ${other.baseClasses}")
- subtyping.println(i"type params = $hkTypeParams")
- if (inOrder) unifyWith(other)
- else testLifted(other, projection.prefix, hkTypeParams, unifyWith)
- case _ =>
- false
- }
+ private def fixRecs(anchor: SingletonType, tp: Type): Type = {
+ def fix(tp: Type): Type = tp.stripTypeVar match {
+ case tp: RecType => fix(tp.parent).substRecThis(tp, anchor)
+ case tp @ RefinedType(parent, rname, rinfo) => tp.derivedRefinedType(fix(parent), rname, rinfo)
+ case tp: PolyParam => fixOrElse(bounds(tp).hi, tp)
+ case tp: TypeProxy => fixOrElse(tp.underlying, tp)
+ case tp: AndOrType => tp.derivedAndOrType(fix(tp.tp1), fix(tp.tp2))
+ case tp => tp
}
- projection.name == tpnme.hkApply && !other.isHKApply &&
- tryInfer(projection.prefix.typeConstructor.dealias)
- }
-
- /** Compare type lambda with non-lambda type. */
- def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) = rt match {
- case TypeLambda(vs, args, body) =>
- other.isInstanceOf[TypeRef] &&
- args.length == other.typeParams.length && {
- val applied = other.appliedTo(argRefs(rt, args.length))
- if (inOrder) isSubType(body, applied)
- else body match {
- case body: TypeBounds => body.contains(applied)
- case _ => isSubType(applied, body)
- }
- }
- case _ =>
- false
- }
-
- /** Say we are comparing a refined type `P{type M = U}` or `P{type M >: L <: U}`.
- * If P#M refers to a BaseTypeArg aliased to some other typeref P#N,
- * do the same comparison with `P{type N = U}` or `P{type N >: L <: U}`, respectively.
- * This allows to handle situations involving named type params like this one:
- *
- * trait Lambda[type Elem]
- * trait Lst[T] extends Lambda[T]
- *
- * compareAliasedRefined is necessary so we establish that
- *
- * Lst[Int] = Lst[Elem = Int]
- */
- private def compareAliasedRefined(rt: RefinedType, other: Type, inOrder: Boolean) = {
- val mbr = refinedSymbol(rt)
- mbr.is(BaseTypeArg) && {
- mbr.info match {
- case TypeAlias(TypeRef(_, aliasName)) =>
- val rt1 = rt.derivedRefinedType(rt.parent, aliasName, rt.refinedInfo)
- subtyping.println(i"rewiring $rt to $rt1 in comparison with $other")
- if (inOrder) isSubType(rt1, other) else isSubType(other, rt1)
- case _ =>
- false
- }
+ def fixOrElse(tp: Type, fallback: Type) = {
+ val tp1 = fix(tp)
+ if (tp1 ne tp) tp1 else fallback
}
+ fix(tp)
}
- /** The symbol referred to in the refinement of `rt` */
- private def refinedSymbol(rt: RefinedType) = rt.parent.member(rt.refinedName).symbol
-
- /** Returns true iff either `tp11 <:< tp21` or `tp12 <:< tp22`, trying at the same time
- * to keep the constraint as wide as possible. Specifically, if
+ /** Returns true iff the result of evaluating either `op1` or `op2` is true,
+ * trying at the same time to keep the constraint as wide as possible.
+ * E.g, if
*
* tp11 <:< tp12 = true with post-constraint c1
* tp12 <:< tp22 = true with post-constraint c2
@@ -674,15 +823,18 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* Here, each precondition leads to a different constraint, and neither of
* the two post-constraints subsumes the other.
*/
- private def eitherIsSubType(tp11: Type, tp21: Type, tp12: Type, tp22: Type) = {
+ private def either(op1: => Boolean, op2: => Boolean): Boolean = {
val preConstraint = constraint
- isSubType(tp11, tp21) && {
+ op1 && {
val leftConstraint = constraint
constraint = preConstraint
- if (isSubType(tp12, tp22) && !subsumes(leftConstraint, constraint, preConstraint))
+ if (!(op2 && subsumes(leftConstraint, constraint, preConstraint))) {
+ if (constr != noPrinter && !subsumes(constraint, leftConstraint, preConstraint))
+ constr.println(i"CUT - prefer $leftConstraint over $constraint")
constraint = leftConstraint
+ }
true
- } || isSubType(tp12, tp22)
+ } || op2
}
/** Like tp1 <:< tp2, but returns false immediately if we know that
@@ -700,26 +852,32 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* rebase both itself and the member info of `tp` on a freshly created skolem type.
*/
protected def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = {
- val rebindNeeded = tp2.refinementRefersToThis
- val base = if (rebindNeeded) ensureStableSingleton(tp1) else tp1
- val rinfo2 = if (rebindNeeded) tp2.refinedInfo.substRefinedThis(tp2, base) else tp2.refinedInfo
+ val rinfo2 = tp2.refinedInfo
+ val mbr = tp1.member(name)
+
def qualifies(m: SingleDenotation) = isSubType(m.info, rinfo2)
- def memberMatches(mbr: Denotation): Boolean = mbr match { // inlined hasAltWith for performance
+
+ def memberMatches: Boolean = mbr match { // inlined hasAltWith for performance
case mbr: SingleDenotation => qualifies(mbr)
case _ => mbr hasAltWith qualifies
}
- /*>|>*/ ctx.traceIndented(i"hasMatchingMember($base . $name :? ${tp2.refinedInfo}) ${base.member(name).info.show} $rinfo2", subtyping) /*<|<*/ {
- memberMatches(base member name) ||
- tp1.isInstanceOf[SingletonType] &&
- { // special case for situations like:
- // class C { type T }
- // val foo: C
- // foo.type <: C { type T = foo.T }
- rinfo2 match {
- case rinfo2: TypeAlias => (base select name) =:= rinfo2.alias
- case _ => false
- }
- }
+
+ // special case for situations like:
+ // class C { type T }
+ // val foo: C
+ // foo.type <: C { type T {= , <: , >:} foo.T }
+ def selfReferentialMatch = tp1.isInstanceOf[SingletonType] && {
+ rinfo2 match {
+ case rinfo2: TypeBounds =>
+ val mbr1 = tp1.select(name)
+ !defn.isBottomType(tp1.widen) &&
+ (mbr1 =:= rinfo2.hi || (rinfo2.hi ne rinfo2.lo) && mbr1 =:= rinfo2.lo)
+ case _ => false
+ }
+ }
+
+ /*>|>*/ ctx.traceIndented(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}) ${mbr.info.show} $rinfo2", subtyping) /*<|<*/ {
+ memberMatches || selfReferentialMatch
}
}
@@ -738,11 +896,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* @return The parent type of `tp2` after skipping the matching refinements.
*/
private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match {
- case tp1 @ RefinedType(parent1, name1)
- if name1 == tp2.refinedName &&
- tp1.refinedInfo.isInstanceOf[TypeAlias] &&
- !tp2.refinementRefersToThis &&
- !tp1.refinementRefersToThis =>
+ case tp1 @ RefinedType(parent1, name1, rinfo1: TypeAlias) if name1 == tp2.refinedName =>
tp2.parent match {
case parent2: RefinedType => skipMatching(parent1, parent2)
case parent2 => parent2
@@ -773,7 +927,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
/** A type has been covered previously in subtype checking if it
* is some combination of TypeRefs that point to classes, where the
- * combiners are RefinedTypes, AndTypes or AnnotatedTypes.
+ * combiners are RefinedTypes, RecTypes, AndTypes or AnnotatedTypes.
* One exception: Refinements referring to basetype args are never considered
* to be already covered. This is necessary because such refined types might
* still need to be compared with a compareAliasRefined.
@@ -781,7 +935,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
private def isCovered(tp: Type): Boolean = tp.dealias.stripTypeVar match {
case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass
case tp: ProtoType => false
- case tp: RefinedType => isCovered(tp.parent) && !refinedSymbol(tp).is(BaseTypeArg)
+ case tp: RefinedOrRecType => isCovered(tp.parent)
case tp: AnnotatedType => isCovered(tp.underlying)
case AndType(tp1, tp2) => isCovered(tp1) && isCovered(tp2)
case _ => false
@@ -813,14 +967,24 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
private def narrowGADTBounds(tr: NamedType, bound: Type, isUpper: Boolean): Boolean =
ctx.mode.is(Mode.GADTflexible) && {
val tparam = tr.symbol
- typr.println(s"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.isRef(tparam)}")
- !bound.isRef(tparam) && {
- val oldBounds = ctx.gadt.bounds(tparam)
- val newBounds =
- if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound)
- else TypeBounds(oldBounds.lo | bound, oldBounds.hi)
- isSubType(newBounds.lo, newBounds.hi) &&
- { ctx.gadt.setBounds(tparam, newBounds); true }
+ typr.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.isRef(tparam)}")
+ if (bound.isRef(tparam)) false
+ else bound match {
+ case bound: TypeRef
+ if bound.symbol.is(BindDefinedType) && ctx.gadt.bounds.contains(bound.symbol) &&
+ !tr.symbol.is(BindDefinedType) =>
+ // Avoid having pattern-bound types in gadt bounds,
+ // as these might be eliminated once the pattern is typechecked.
+ // Pattern-bound type symbols should be narrowed first, only if that fails
+ // should symbols in the environment be constrained.
+ narrowGADTBounds(bound, tr, !isUpper)
+ case _ =>
+ val oldBounds = ctx.gadt.bounds(tparam)
+ val newBounds =
+ if (isUpper) TypeBounds(oldBounds.lo, oldBounds.hi & bound)
+ else TypeBounds(oldBounds.lo | bound, oldBounds.hi)
+ isSubType(newBounds.lo, newBounds.hi) &&
+ { ctx.gadt.setBounds(tparam, newBounds); true }
}
}
@@ -859,7 +1023,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
}
/** Are `syms1` and `syms2` parameter lists with pairwise equivalent types? */
- private def matchingParams(formals1: List[Type], formals2: List[Type], isJava1: Boolean, isJava2: Boolean): Boolean = formals1 match {
+ def matchingParams(formals1: List[Type], formals2: List[Type], isJava1: Boolean, isJava2: Boolean): Boolean = formals1 match {
case formal1 :: rest1 =>
formals2 match {
case formal2 :: rest2 =>
@@ -874,10 +1038,10 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
formals2.isEmpty
}
- /** Do poly types `poly1` and `poly2` have type parameters that
+ /** Do generic types `poly1` and `poly2` have type parameters that
* have the same bounds (after renaming one set to the other)?
*/
- private def matchingTypeParams(poly1: PolyType, poly2: PolyType): Boolean =
+ def matchingTypeParams(poly1: PolyType, poly2: PolyType): Boolean =
(poly1.paramBounds corresponds poly2.paramBounds)((b1, b2) =>
isSameType(b1, b2.subst(poly2, poly1)))
@@ -1047,7 +1211,7 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
val t2 = distributeAnd(tp2, tp1)
if (t2.exists) t2
else if (erased) erasedGlb(tp1, tp2, isJava = false)
- else liftIfHK(tp1, tp2, AndType(_, _))
+ else liftIfHK(tp1, tp2, AndType(_, _), _ & _)
}
}
@@ -1071,21 +1235,46 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
val t2 = distributeOr(tp2, tp1)
if (t2.exists) t2
else if (erased) erasedLub(tp1, tp2)
- else liftIfHK(tp1, tp2, OrType(_, _))
+ else liftIfHK(tp1, tp2, OrType(_, _), _ | _)
}
}
- /** `op(tp1, tp2)` unless `tp1` and `tp2` are type-constructors.
+ /** `op(tp1, tp2)` unless `tp1` and `tp2` are type-constructors with at least
+ * some unnamed type parameters.
* In the latter case, combine `tp1` and `tp2` under a type lambda like this:
*
* [X1, ..., Xn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn])
+ *
+ * Note: There is a tension between named and positional parameters here, which
+ * is impossible to resolve completely. Say you have
+ *
+ * C[type T], D[type U]
+ *
+ * Then do you expand `C & D` to `[T] -> C[T] & D[T]` or not? Under the named
+ * type parameter interpretation, this would be wrong whereas under the traditional
+ * higher-kinded interpretation this would be required. The problem arises from
+ * allowing both interpretations. A possible remedy is to be somehow stricter
+ * in where we allow which interpretation.
*/
- private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type) = {
+ private def liftIfHK(tp1: Type, tp2: Type, op: (Type, Type) => Type, original: (Type, Type) => Type) = {
val tparams1 = tp1.typeParams
val tparams2 = tp2.typeParams
- if (tparams1.isEmpty || tparams2.isEmpty) op(tp1, tp2)
- else if (tparams1.length != tparams2.length) mergeConflict(tp1, tp2)
- else hkCombine(tp1, tp2, tparams1, tparams2, op)
+ if (tparams1.isEmpty)
+ if (tparams2.isEmpty) op(tp1, tp2)
+ else original(tp1, tp2.appliedTo(tp2.typeParams.map(_.paramBoundsAsSeenFrom(tp2))))
+ else if (tparams2.isEmpty)
+ original(tp1.appliedTo(tp1.typeParams.map(_.paramBoundsAsSeenFrom(tp1))), tp2)
+ else
+ PolyType(
+ paramNames = tpnme.syntheticLambdaParamNames(tparams1.length),
+ variances = (tparams1, tparams2).zipped.map((tparam1, tparam2) =>
+ (tparam1.paramVariance + tparam2.paramVariance) / 2))(
+ paramBoundsExp = tl => (tparams1, tparams2).zipped.map((tparam1, tparam2) =>
+ tl.lifted(tparams1, tparam1.paramBoundsAsSeenFrom(tp1)).bounds &
+ tl.lifted(tparams2, tparam2.paramBoundsAsSeenFrom(tp2)).bounds),
+ resultTypeExp = tl =>
+ original(tl.lifted(tparams1, tp1).appliedTo(tl.paramRefs),
+ tl.lifted(tparams2, tp2).appliedTo(tl.paramRefs)))
}
/** Try to distribute `&` inside type, detect and handle conflicts
@@ -1098,45 +1287,28 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
case tp1: RefinedType =>
tp2 match {
case tp2: RefinedType if tp1.refinedName == tp2.refinedName =>
- tp1.derivedRefinedType(
- tp1.parent & tp2.parent,
- tp1.refinedName,
- tp1.refinedInfo & tp2.refinedInfo.substRefinedThis(tp2, RefinedThis(tp1)))
+ // Given two refinements `T1 { X = S1 }` and `T2 { X = S2 }`, if `S1 =:= S2`
+ // (possibly by instantiating type parameters), rewrite to `T1 & T2 { X = S1 }`.
+ // Otherwise rewrite to `T1 & T2 { X B }` where `B` is the conjunction of
+ // the bounds of `X` in `T1` and `T2`.
+ // The first rule above is contentious because it cuts the constraint set.
+ // But without it we would replace the two aliases by
+ // `T { X >: S1 | S2 <: S1 & S2 }`, which looks weird and is probably
+ // not what's intended.
+ val rinfo1 = tp1.refinedInfo
+ val rinfo2 = tp2.refinedInfo
+ val parent = tp1.parent & tp2.parent
+ val rinfo =
+ if (rinfo1.isAlias && rinfo2.isAlias && isSameType(rinfo1, rinfo2))
+ rinfo1
+ else
+ rinfo1 & rinfo2
+ tp1.derivedRefinedType(parent, tp1.refinedName, rinfo)
case _ =>
NoType
}
- case tp1: TypeBounds =>
- tp2 match {
- case tp2: TypeBounds => tp1 & tp2
- case tp2: ClassInfo if tp1 contains tp2 => tp2
- case _ => mergeConflict(tp1, tp2)
- }
- case tp1: ClassInfo =>
- tp2 match {
- case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix & tp2.prefix)
- case tp2: TypeBounds if tp2 contains tp1 => tp1
- case _ => mergeConflict(tp1, tp2)
- }
- case tp1 @ MethodType(names1, formals1) =>
- tp2 match {
- case tp2 @ MethodType(names2, formals2)
- if matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
- tp1.isImplicit == tp2.isImplicit =>
- tp1.derivedMethodType(
- mergeNames(names1, names2, nme.syntheticParamName),
- formals1, tp1.resultType & tp2.resultType.subst(tp2, tp1))
- case _ =>
- mergeConflict(tp1, tp2)
- }
- case tp1: PolyType =>
- tp2 match {
- case tp2: PolyType if matchingTypeParams(tp1, tp2) =>
- tp1.derivedPolyType(
- mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
- tp1.paramBounds, tp1.resultType & tp2.resultType.subst(tp2, tp1))
- case _ =>
- mergeConflict(tp1, tp2)
- }
+ case tp1: RecType =>
+ tp1.rebind(distributeAnd(tp1.parent, tp2))
case ExprType(rt1) =>
tp2 match {
case ExprType(rt2) =>
@@ -1161,38 +1333,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
* The rhs is a proper supertype of the lhs.
*/
private def distributeOr(tp1: Type, tp2: Type): Type = tp1 match {
- case tp1: TypeBounds =>
- tp2 match {
- case tp2: TypeBounds => tp1 | tp2
- case tp2: ClassInfo if tp1 contains tp2 => tp1
- case _ => mergeConflict(tp1, tp2)
- }
- case tp1: ClassInfo =>
- tp2 match {
- case tp2: ClassInfo if tp1.cls eq tp2.cls => tp1.derivedClassInfo(tp1.prefix | tp2.prefix)
- case tp2: TypeBounds if tp2 contains tp1 => tp2
- case _ => mergeConflict(tp1, tp2)
- }
- case tp1 @ MethodType(names1, formals1) =>
- tp2 match {
- case tp2 @ MethodType(names2, formals2)
- if matchingParams(formals1, formals2, tp1.isJava, tp2.isJava) &&
- tp1.isImplicit == tp2.isImplicit =>
- tp1.derivedMethodType(
- mergeNames(names1, names2, nme.syntheticParamName),
- formals1, tp1.resultType | tp2.resultType.subst(tp2, tp1))
- case _ =>
- mergeConflict(tp1, tp2)
- }
- case tp1: PolyType =>
- tp2 match {
- case tp2: PolyType if matchingTypeParams(tp1, tp2) =>
- tp1.derivedPolyType(
- mergeNames(tp1.paramNames, tp2.paramNames, tpnme.syntheticTypeParamName),
- tp1.paramBounds, tp1.resultType | tp2.resultType.subst(tp2, tp1))
- case _ =>
- mergeConflict(tp1, tp2)
- }
case ExprType(rt1) =>
ExprType(rt1 | tp2.widenExpr)
case tp1: TypeVar if tp1.isInstantiated =>
@@ -1203,24 +1343,6 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
NoType
}
- /** Handle merge conflict by throwing a `MergeError` exception */
- private def mergeConflict(tp1: Type, tp2: Type): Type = {
- def showType(tp: Type) = tp match {
- case ClassInfo(_, cls, _, _, _) => cls.showLocated
- case bounds: TypeBounds => i"type bounds $bounds"
- case _ => tp.show
- }
- throw new MergeError(s"cannot merge ${showType(tp1)} with ${showType(tp2)}", tp1, tp2)
- }
-
- /** Merge two lists of names. If names in corresponding positions match, keep them,
- * otherwise generate new synthetic names.
- */
- private def mergeNames[N <: Name](names1: List[N], names2: List[N], syntheticName: Int => N): List[N] = {
- for ((name1, name2, idx) <- (names1, names2, 0 until names1.length).zipped)
- yield if (name1 == name2) name1 else syntheticName(idx)
- }.toList
-
/** Show type, handling type types better than the default */
private def showType(tp: Type)(implicit ctx: Context) = tp match {
case ClassInfo(_, cls, _, _, _) => cls.showLocated
@@ -1280,12 +1402,12 @@ class TypeComparer(initctx: Context) extends DotClass with ConstraintHandling {
/** Show subtype goal that led to an assertion failure */
def showGoal(tp1: Type, tp2: Type)(implicit ctx: Context) = {
- println(disambiguated(implicit ctx => s"assertion failure for ${tp1.show} <:< ${tp2.show}, frozen = $frozenConstraint"))
+ println(ex"assertion failure for $tp1 <:< $tp2, frozen = $frozenConstraint")
def explainPoly(tp: Type) = tp match {
- case tp: PolyParam => ctx.println(s"polyparam ${tp.show} found in ${tp.binder.show}")
- case tp: TypeRef if tp.symbol.exists => ctx.println(s"typeref ${tp.show} found in ${tp.symbol.owner.show}")
- case tp: TypeVar => ctx.println(s"typevar ${tp.show}, origin = ${tp.origin}")
- case _ => ctx.println(s"${tp.show} is a ${tp.getClass}")
+ case tp: PolyParam => ctx.echo(s"polyparam ${tp.show} found in ${tp.binder.show}")
+ case tp: TypeRef if tp.symbol.exists => ctx.echo(s"typeref ${tp.show} found in ${tp.symbol.owner.show}")
+ case tp: TypeVar => ctx.echo(s"typevar ${tp.show}, origin = ${tp.origin}")
+ case _ => ctx.echo(s"${tp.show} is a ${tp.getClass}")
}
explainPoly(tp1)
explainPoly(tp2)
@@ -1362,25 +1484,18 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) {
}
override def addConstraint(param: PolyParam, bound: Type, fromBelow: Boolean): Boolean =
- traceIndented(s"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint") {
+ traceIndented(i"add constraint $param ${if (fromBelow) ">:" else "<:"} $bound $frozenConstraint, constraint = ${ctx.typerState.constraint}") {
super.addConstraint(param, bound, fromBelow)
}
override def copyIn(ctx: Context) = new ExplainingTypeComparer(ctx)
- override def compareHkApply(projection: NamedType, other: Type, inOrder: Boolean) =
- if (projection.name == tpnme.hkApply)
- traceIndented(i"compareHkApply $projection, $other, $inOrder") {
- super.compareHkApply(projection, other, inOrder)
- }
- else super.compareHkApply(projection, other, inOrder)
-
- override def compareHkLambda(rt: RefinedType, other: Type, inOrder: Boolean) =
- if (rt.refinedName == tpnme.hkApply)
- traceIndented(i"compareHkLambda $rt, $other, $inOrder") {
- super.compareHkLambda(rt, other, inOrder)
- }
- else super.compareHkLambda(rt, other, inOrder)
+ override def compareHkApply2(tp1: Type, tp2: HKApply, tycon2: Type, args2: List[Type]): Boolean = {
+ def addendum = ""
+ traceIndented(i"compareHkApply $tp1, $tp2$addendum") {
+ super.compareHkApply2(tp1, tp2, tycon2, args2)
+ }
+ }
override def toString = "Subtype trace:" + { try b.toString finally b.clear() }
}
diff --git a/src/dotty/tools/dotc/core/TypeErasure.scala b/src/dotty/tools/dotc/core/TypeErasure.scala
index 26cac4f72..254ea3277 100644
--- a/src/dotty/tools/dotc/core/TypeErasure.scala
+++ b/src/dotty/tools/dotc/core/TypeErasure.scala
@@ -6,7 +6,6 @@ import Symbols._, Types._, Contexts._, Flags._, Names._, StdNames._, Decorators.
import Uniques.unique
import dotc.transform.ExplicitOuter._
import dotc.transform.ValueClasses._
-import typer.Mode
import util.DotClass
/** Erased types are:
@@ -67,20 +66,20 @@ object TypeErasure {
* Nothing. This is because this type is only useful for type adaptation (see
* [[Erasure.Boxing#adaptToType]]).
*
- * @param cls The value class symbol
+ * @param tycon A TypeRef referring to the value class symbol
* @param erasedUnderlying The erased type of the single field of the value class
*/
- abstract case class ErasedValueType(cls: ClassSymbol, erasedUnderlying: Type)
+ abstract case class ErasedValueType(tycon: TypeRef, erasedUnderlying: Type)
extends CachedGroundType with ValueType {
- override def computeHash = doHash(cls, erasedUnderlying)
+ override def computeHash = doHash(tycon, erasedUnderlying)
}
- final class CachedErasedValueType(cls: ClassSymbol, erasedUnderlying: Type)
- extends ErasedValueType(cls, erasedUnderlying)
+ final class CachedErasedValueType(tycon: TypeRef, erasedUnderlying: Type)
+ extends ErasedValueType(tycon, erasedUnderlying)
object ErasedValueType {
- def apply(cls: ClassSymbol, erasedUnderlying: Type)(implicit ctx: Context) = {
- unique(new CachedErasedValueType(cls, erasedUnderlying))
+ def apply(tycon: TypeRef, erasedUnderlying: Type)(implicit ctx: Context) = {
+ unique(new CachedErasedValueType(tycon, erasedUnderlying))
}
}
@@ -278,6 +277,22 @@ object TypeErasure {
else tp1
}
}
+
+ /** Does the (possibly generic) type `tp` have the same erasure in all its
+ * possible instantiations?
+ */
+ def hasStableErasure(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case tp: TypeRef =>
+ tp.info match {
+ case TypeAlias(alias) => hasStableErasure(alias)
+ case _: ClassInfo => true
+ case _ => false
+ }
+ case tp: PolyParam => false
+ case tp: TypeProxy => hasStableErasure(tp.superType)
+ case tp: AndOrType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2)
+ case _ => false
+ }
}
import TypeErasure._
@@ -341,8 +356,6 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
SuperType(this(thistpe), this(supertpe))
case ExprType(rt) =>
defn.FunctionClass(0).typeRef
- case tp: TypeProxy =>
- this(tp.underlying)
case AndType(tp1, tp2) =>
erasedGlb(this(tp1), this(tp2), isJava)
case OrType(tp1, tp2) =>
@@ -357,11 +370,6 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
case rt =>
tp.derivedMethodType(tp.paramNames, formals, rt)
}
- case tp: PolyType =>
- this(tp.resultType) match {
- case rt: MethodType => rt
- case rt => MethodType(Nil, Nil, rt)
- }
case tp @ ClassInfo(pre, cls, classParents, decls, _) =>
if (cls is Package) tp
else {
@@ -375,7 +383,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
tr1 :: trs1.filterNot(_ isRef defn.ObjectClass)
case nil => nil
}
- val erasedDecls = decls.filteredScope(d => !d.isType || d.isClass)
+ val erasedDecls = decls.filteredScope(sym => !sym.isType || sym.isClass)
tp.derivedClassInfo(NoPrefix, parents, erasedDecls, erasedRef(tp.selfType))
// can't replace selftype by NoType because this would lose the sourceModule link
}
@@ -383,6 +391,8 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
tp
case tp: WildcardType if wildcardOK =>
tp
+ case tp: TypeProxy =>
+ this(tp.underlying)
}
private def eraseArray(tp: RefinedType)(implicit ctx: Context) = {
@@ -390,13 +400,13 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
def arrayErasure(tpToErase: Type) =
erasureFn(isJava, semiEraseVCs = false, isConstructor, wildcardOK)(tpToErase)
if (elemtp derivesFrom defn.NullClass) JavaArrayType(defn.ObjectType)
- else if (isUnboundedGeneric(elemtp)) defn.ObjectType
+ else if (isUnboundedGeneric(elemtp) && !isJava) defn.ObjectType
else JavaArrayType(arrayErasure(elemtp))
}
- /** The erasure of a symbol's info. This is different from `apply` in the way `ExprType`s are
- * treated. `eraseInfo` maps them them to nullary method types, whereas `apply` maps them
- * to `Function0`.
+ /** The erasure of a symbol's info. This is different from `apply` in the way `ExprType`s and
+ * `PolyType`s are treated. `eraseInfo` maps them them to method types, whereas `apply` maps them
+ * to the underlying type.
*/
def eraseInfo(tp: Type, sym: Symbol)(implicit ctx: Context) = tp match {
case ExprType(rt) =>
@@ -406,13 +416,18 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
// forwarders to mixin methods.
// See doc comment for ElimByName for speculation how we could improve this.
else MethodType(Nil, Nil, eraseResult(rt))
+ case tp: PolyType =>
+ eraseResult(tp.resultType) match {
+ case rt: MethodType => rt
+ case rt => MethodType(Nil, Nil, rt)
+ }
case tp => this(tp)
}
private def eraseDerivedValueClassRef(tref: TypeRef)(implicit ctx: Context): Type = {
val cls = tref.symbol.asClass
val underlying = underlyingOfValueClass(cls)
- if (underlying.exists) ErasedValueType(cls, valueErasure(underlying))
+ if (underlying.exists) ErasedValueType(tref, valueErasure(underlying))
else NoType
}
@@ -431,7 +446,7 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
// constructor method should not be semi-erased.
else if (isConstructor && isDerivedValueClass(sym)) eraseNormalClassRef(tp)
else this(tp)
- case RefinedType(parent, _) if !(parent isRef defn.ArrayClass) =>
+ case RefinedType(parent, _, _) if !(parent isRef defn.ArrayClass) =>
eraseResult(parent)
case _ =>
this(tp)
@@ -475,6 +490,9 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
sigName(tp.widen)
case ExprType(rt) =>
sigName(defn.FunctionOf(Nil, rt))
+ case tp: TypeVar =>
+ val inst = tp.instanceOpt
+ if (inst.exists) sigName(inst) else tpnme.Uninstantiated
case tp: TypeProxy =>
sigName(tp.underlying)
case ErrorType | WildcardType =>
@@ -491,4 +509,6 @@ class TypeErasure(isJava: Boolean, semiEraseVCs: Boolean, isConstructor: Boolean
println(s"no sig for $tp")
throw ex
}
+
+
}
diff --git a/src/dotty/tools/dotc/core/TypeOps.scala b/src/dotty/tools/dotc/core/TypeOps.scala
index 371be1586..92e5f9d57 100644
--- a/src/dotty/tools/dotc/core/TypeOps.scala
+++ b/src/dotty/tools/dotc/core/TypeOps.scala
@@ -4,8 +4,9 @@ package core
import Contexts._, Types._, Symbols._, Names._, Flags._, Scopes._
import SymDenotations._, Denotations.SingleDenotation
-import config.Printers._
+import config.Printers.typr
import util.Positions._
+import NameOps._
import Decorators._
import StdNames._
import Annotations._
@@ -35,7 +36,7 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
* Instead we produce an annotated type that marks the prefix as unsafe:
*
* (x: (C @ UnsafeNonvariant)#T)C#T
-
+ *
* We also set a global state flag `unsafeNonvariant` to the current run.
* When typing a Select node, typer will check that flag, and if it
* points to the current run will scan the result type of the select for
@@ -122,100 +123,20 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
def currentVariance = variance
}
- /** Approximate a type `tp` with a type that does not contain skolem types.
- */
- final def deskolemize(tp: Type): Type = deskolemize(tp, 1, Set())
-
- private def deskolemize(tp: Type, variance: Int, seen: Set[SkolemType]): Type = {
- def approx(lo: Type = defn.NothingType, hi: Type = defn.AnyType, newSeen: Set[SkolemType] = seen) =
- if (variance == 0) NoType
- else deskolemize(if (variance < 0) lo else hi, variance, newSeen)
- tp match {
+ /** Approximate a type `tp` with a type that does not contain skolem types. */
+ object deskolemize extends ApproximatingTypeMap {
+ private var seen: Set[SkolemType] = Set()
+ def apply(tp: Type) = tp match {
case tp: SkolemType =>
if (seen contains tp) NoType
- else approx(hi = tp.info, newSeen = seen + tp)
- case tp: NamedType =>
- val sym = tp.symbol
- if (sym.isStatic) tp
else {
- val pre1 = deskolemize(tp.prefix, variance, seen)
- if (pre1 eq tp.prefix) tp
- else {
- val d = tp.prefix.member(tp.name)
- d.info match {
- case TypeAlias(alias) => deskolemize(alias, variance, seen)
- case _ =>
- if (pre1.exists && !pre1.isRef(defn.NothingClass)) tp.derivedSelect(pre1)
- else {
- ctx.log(s"deskolem: $tp: ${tp.info}")
- tp.info match {
- case TypeBounds(lo, hi) => approx(lo, hi)
- case info => approx(defn.NothingType, info)
- }
- }
- }
- }
+ val saved = seen
+ seen += tp
+ try approx(hi = tp.info)
+ finally seen = saved
}
- case _: ThisType | _: BoundType | _: SuperType | NoType | NoPrefix =>
- tp
- case tp: RefinedType =>
- val parent1 = deskolemize(tp.parent, variance, seen)
- if (parent1.exists) {
- val refinedInfo1 = deskolemize(tp.refinedInfo, variance, seen)
- if (refinedInfo1.exists)
- tp.derivedRefinedType(parent1, tp.refinedName, refinedInfo1)
- else
- approx(hi = parent1)
- }
- else approx()
- case tp: TypeAlias =>
- val alias1 = deskolemize(tp.alias, variance * tp.variance, seen)
- if (alias1.exists) tp.derivedTypeAlias(alias1)
- else approx(hi = TypeBounds.empty)
- case tp: TypeBounds =>
- val lo1 = deskolemize(tp.lo, -variance, seen)
- val hi1 = deskolemize(tp.hi, variance, seen)
- if (lo1.exists && hi1.exists) tp.derivedTypeBounds(lo1, hi1)
- else approx(hi =
- if (lo1.exists) TypeBounds.lower(lo1)
- else if (hi1.exists) TypeBounds.upper(hi1)
- else TypeBounds.empty)
- case tp: ClassInfo =>
- val pre1 = deskolemize(tp.prefix, variance, seen)
- if (pre1.exists) tp.derivedClassInfo(pre1)
- else NoType
- case tp: AndOrType =>
- val tp1d = deskolemize(tp.tp1, variance, seen)
- val tp2d = deskolemize(tp.tp2, variance, seen)
- if (tp1d.exists && tp2d.exists)
- tp.derivedAndOrType(tp1d, tp2d)
- else if (tp.isAnd)
- approx(hi = tp1d & tp2d) // if one of tp1d, tp2d exists, it is the result of tp1d & tp2d
- else
- approx(lo = tp1d & tp2d)
- case tp: WildcardType =>
- val bounds1 = deskolemize(tp.optBounds, variance, seen)
- if (bounds1.exists) tp.derivedWildcardType(bounds1)
- else WildcardType
case _ =>
- if (tp.isInstanceOf[MethodicType]) assert(variance != 0, tp)
- deskolemizeMap.mapOver(tp, variance, seen)
- }
- }
-
- object deskolemizeMap extends TypeMap {
- private var seen: Set[SkolemType] = _
- def apply(tp: Type) = deskolemize(tp, variance, seen)
- def mapOver(tp: Type, variance: Int, seen: Set[SkolemType]) = {
- val savedVariance = this.variance
- val savedSeen = this.seen
- this.variance = variance
- this.seen = seen
- try super.mapOver(tp)
- finally {
- this.variance = savedVariance
- this.seen = savedSeen
- }
+ mapOver(tp)
}
}
@@ -251,96 +172,138 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
}
/** Approximate union type by intersection of its dominators.
- * See Type#approximateUnion for an explanation.
+ * That is, replace a union type Tn | ... | Tn
+ * by the smallest intersection type of base-class instances of T1,...,Tn.
+ * Example: Given
+ *
+ * trait C[+T]
+ * trait D
+ * class A extends C[A] with D
+ * class B extends C[B] with D with E
+ *
+ * we approximate `A | B` by `C[A | B] with D`
*/
- def approximateUnion(tp: Type): Type = {
+ def orDominator(tp: Type): Type = {
+
/** a faster version of cs1 intersect cs2 */
def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = {
val cs2AsSet = new util.HashSet[ClassSymbol](100)
cs2.foreach(cs2AsSet.addEntry)
cs1.filter(cs2AsSet.contains)
}
+
/** The minimal set of classes in `cs` which derive all other classes in `cs` */
def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match {
case c :: rest =>
val accu1 = if (accu exists (_ derivesFrom c)) accu else c :: accu
if (cs == c.baseClasses) accu1 else dominators(rest, accu1)
}
+
+ def mergeRefined(tp1: Type, tp2: Type): Type = {
+ def fail = throw new AssertionError(i"Failure to join alternatives $tp1 and $tp2")
+ tp1 match {
+ case tp1 @ RefinedType(parent1, name1, rinfo1) =>
+ tp2 match {
+ case RefinedType(parent2, `name1`, rinfo2) =>
+ tp1.derivedRefinedType(
+ mergeRefined(parent1, parent2), name1, rinfo1 | rinfo2)
+ case _ => fail
+ }
+ case tp1 @ TypeRef(pre1, name1) =>
+ tp2 match {
+ case tp2 @ TypeRef(pre2, `name1`) =>
+ tp1.derivedSelect(pre1 | pre2)
+ case _ => fail
+ }
+ case _ => fail
+ }
+ }
+
def approximateOr(tp1: Type, tp2: Type): Type = {
def isClassRef(tp: Type): Boolean = tp match {
case tp: TypeRef => tp.symbol.isClass
case tp: RefinedType => isClassRef(tp.parent)
case _ => false
}
- def next(tp: TypeProxy) = tp.underlying match {
- case TypeBounds(_, hi) => hi
- case nx => nx
- }
- /** If `tp1` and `tp2` are typebounds, try to make one fit into the other
- * or to make them equal, by instantiating uninstantiated type variables.
- */
- def homogenizedUnion(tp1: Type, tp2: Type): Type = {
- tp1 match {
- case tp1: TypeBounds =>
- tp2 match {
- case tp2: TypeBounds =>
- def fitInto(tp1: TypeBounds, tp2: TypeBounds): Unit = {
- val nestedCtx = ctx.fresh.setNewTyperState
- if (tp2.boundsInterval.contains(tp1.boundsInterval)(nestedCtx))
- nestedCtx.typerState.commit()
- }
- fitInto(tp1, tp2)
- fitInto(tp2, tp1)
- case _ =>
- }
- case _ =>
- }
- tp1 | tp2
- }
tp1 match {
- case tp1: RefinedType =>
- tp2 match {
- case tp2: RefinedType if tp1.refinedName == tp2.refinedName =>
- return tp1.derivedRefinedType(
- approximateUnion(OrType(tp1.parent, tp2.parent)),
- tp1.refinedName,
- homogenizedUnion(tp1.refinedInfo, tp2.refinedInfo).substRefinedThis(tp2, RefinedThis(tp1)))
- //.ensuring { x => println(i"approx or $tp1 | $tp2 = $x\n constr = ${ctx.typerState.constraint}"); true } // DEBUG
- case _ =>
- }
- case _ =>
- }
- tp1 match {
+ case tp1: RecType =>
+ tp1.rebind(approximateOr(tp1.parent, tp2))
case tp1: TypeProxy if !isClassRef(tp1) =>
- approximateUnion(next(tp1) | tp2)
+ orDominator(tp1.superType | tp2)
case _ =>
tp2 match {
+ case tp2: RecType =>
+ tp2.rebind(approximateOr(tp1, tp2.parent))
case tp2: TypeProxy if !isClassRef(tp2) =>
- approximateUnion(tp1 | next(tp2))
+ orDominator(tp1 | tp2.superType)
case _ =>
val commonBaseClasses = tp.mapReduceOr(_.baseClasses)(intersect)
val doms = dominators(commonBaseClasses, Nil)
- def baseTp(cls: ClassSymbol): Type =
- if (tp1.typeParams.nonEmpty) tp.baseTypeRef(cls)
- else tp.baseTypeWithArgs(cls)
+ def baseTp(cls: ClassSymbol): Type = {
+ val base =
+ if (tp1.typeParams.nonEmpty) tp.baseTypeRef(cls)
+ else tp.baseTypeWithArgs(cls)
+ base.mapReduceOr(identity)(mergeRefined)
+ }
doms.map(baseTp).reduceLeft(AndType.apply)
}
}
}
- if (ctx.featureEnabled(defn.LanguageModuleClass, nme.keepUnions)) tp
- else tp match {
+
+ tp match {
case tp: OrType =>
approximateOr(tp.tp1, tp.tp2)
- case tp @ AndType(tp1, tp2) =>
- tp derived_& (approximateUnion(tp1), approximateUnion(tp2))
- case tp: RefinedType =>
- tp.derivedRefinedType(approximateUnion(tp.parent), tp.refinedName, tp.refinedInfo)
case _ =>
tp
}
}
+ /** Given a disjunction T1 | ... | Tn of types with potentially embedded
+ * type variables, constrain type variables further if this eliminates
+ * some of the branches of the disjunction. Do this also for disjunctions
+ * embedded in intersections, as parents in refinements, and in recursive types.
+ *
+ * For instance, if `A` is an unconstrained type variable, then
+ *
+ * ArrayBuffer[Int] | ArrayBuffer[A]
+ *
+ * is approximated by constraining `A` to be =:= to `Int` and returning `ArrayBuffer[Int]`
+ * instead of `ArrayBuffer[_ >: Int | A <: Int & A]`
+ */
+ def harmonizeUnion(tp: Type): Type = tp match {
+ case tp: OrType =>
+ joinIfScala2(typeComparer.fluidly(tp.tp1 | tp.tp2))
+ case tp @ AndType(tp1, tp2) =>
+ tp derived_& (harmonizeUnion(tp1), harmonizeUnion(tp2))
+ case tp: RefinedType =>
+ tp.derivedRefinedType(harmonizeUnion(tp.parent), tp.refinedName, tp.refinedInfo)
+ case tp: RecType =>
+ tp.rebind(harmonizeUnion(tp.parent))
+ case _ =>
+ tp
+ }
+
+ /** Under -language:Scala2: Replace or-types with their joins */
+ private def joinIfScala2(tp: Type) = tp match {
+ case tp: OrType if scala2Mode => tp.join
+ case _ => tp
+ }
+
+ /** Not currently needed:
+ *
+ def liftToRec(f: (Type, Type) => Type)(tp1: Type, tp2: Type)(implicit ctx: Context) = {
+ def f2(tp1: Type, tp2: Type): Type = tp2 match {
+ case tp2: RecType => tp2.rebind(f(tp1, tp2.parent))
+ case _ => f(tp1, tp2)
+ }
+ tp1 match {
+ case tp1: RecType => tp1.rebind(f2(tp1.parent, tp2))
+ case _ => f2(tp1, tp2)
+ }
+ }
+ */
+
private def enterArgBinding(formal: Symbol, info: Type, cls: ClassSymbol, decls: Scope) = {
val lazyInfo = new LazyType { // needed so we do not force `formal`.
def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
@@ -421,13 +384,23 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
* to the current scope, provided (1) variances of both aliases are the same, and
* (2) X is not yet defined in current scope. This "short-circuiting" prevents
* long chains of aliases which would have to be traversed in type comparers.
+ *
+ * Note: Test i1401.scala shows that `forwardRefs` is also necessary
+ * for typechecking in the case where self types refer to type parameters
+ * that are upper-bounded by subclass instances.
*/
def forwardRefs(from: Symbol, to: Type, prefs: List[TypeRef]) = to match {
case to @ TypeBounds(lo1, hi1) if lo1 eq hi1 =>
- for (pref <- prefs)
- for (argSym <- pref.decls)
- if (argSym is BaseTypeArg)
- forwardRef(argSym, from, to, cls, decls)
+ for (pref <- prefs) {
+ def forward(): Unit =
+ for (argSym <- pref.decls)
+ if (argSym is BaseTypeArg)
+ forwardRef(argSym, from, to, cls, decls)
+ pref.info match {
+ case info: TempClassInfo => info.addSuspension(forward)
+ case _ => forward()
+ }
+ }
case _ =>
}
@@ -440,28 +413,37 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
var formals: SimpleMap[TypeName, Symbol] = SimpleMap.Empty // A map of all formal parent parameter
// Strip all refinements from parent type, populating `refinements` and `formals` maps.
- def normalizeToRef(tp: Type): TypeRef = tp.dealias match {
- case tp: TypeRef =>
- tp
- case tp @ RefinedType(tp1, name: TypeName) =>
- tp.refinedInfo match {
- case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) =>
- // Don't record refinements of the form X = this.X (These can arise using named parameters).
- typr.println(s"dropping refinement $tp")
- case _ =>
- val prevInfo = refinements(name)
- refinements = refinements.updated(name,
- if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo)
- formals = formals.updated(name, tp1.typeParamNamed(name))
- }
- normalizeToRef(tp1)
- case ErrorType =>
- defn.AnyType
- case AnnotatedType(tpe, _) =>
- normalizeToRef(tpe)
- case _ =>
- throw new TypeError(s"unexpected parent type: $tp")
+ def normalizeToRef(tp: Type): TypeRef = {
+ def fail = throw new TypeError(s"unexpected parent type: $tp")
+ tp.dealias match {
+ case tp: TypeRef =>
+ tp
+ case tp @ RefinedType(tp1, name: TypeName, rinfo) =>
+ rinfo match {
+ case TypeAlias(TypeRef(pre, name1)) if name1 == name && (pre =:= cls.thisType) =>
+ // Don't record refinements of the form X = this.X (These can arise using named parameters).
+ typr.println(s"dropping refinement $tp")
+ case _ =>
+ val prevInfo = refinements(name)
+ refinements = refinements.updated(name,
+ if (prevInfo == null) tp.refinedInfo else prevInfo & tp.refinedInfo)
+ formals = formals.updated(name, tp1.typeParamNamed(name))
+ }
+ normalizeToRef(tp1)
+ case ErrorType =>
+ defn.AnyType
+ case AnnotatedType(tpe, _) =>
+ normalizeToRef(tpe)
+ case HKApply(tycon: TypeRef, args) =>
+ tycon.info match {
+ case TypeAlias(alias) => normalizeToRef(alias.appliedTo(args))
+ case _ => fail
+ }
+ case _ =>
+ fail
+ }
}
+
val parentRefs = parents map normalizeToRef
// Enter all refinements into current scope.
@@ -470,9 +452,9 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
s"redefinition of ${decls.lookup(name).debugString} in ${cls.showLocated}")
enterArgBinding(formals(name), refinedInfo, cls, decls)
}
- // Forward definitions in super classes that have one of the refined paramters
+ // Forward definitions in super classes that have one of the refined parameters
// as aliases directly to the refined info.
- // Note that this cannot be fused bwith the previous loop because we now
+ // Note that this cannot be fused with the previous loop because we now
// assume that all arguments have been entered in `decls`.
refinements foreachBinding { (name, refinedInfo) =>
forwardRefs(formals(name), refinedInfo, parentRefs)
@@ -534,17 +516,19 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
*/
def featureEnabled(owner: ClassSymbol, feature: TermName): Boolean = {
def toPrefix(sym: Symbol): String =
- if (sym eq defn.LanguageModuleClass) "" else toPrefix(sym.owner) + sym.name + "."
+ if (!sym.exists || (sym eq defn.LanguageModuleClass)) ""
+ else toPrefix(sym.owner) + sym.name + "."
def featureName = toPrefix(owner) + feature
- def hasImport(implicit ctx: Context): Boolean = (
- ctx.importInfo != null
- && ( (ctx.importInfo.site.widen.typeSymbol eq owner)
- && ctx.importInfo.originals.contains(feature)
- ||
- { var c = ctx.outer
- while (c.importInfo eq ctx.importInfo) c = c.outer
- hasImport(c)
- }))
+ def hasImport(implicit ctx: Context): Boolean = {
+ if (ctx.importInfo == null || (ctx.importInfo.site.widen.typeSymbol ne owner)) false
+ else if (ctx.importInfo.excluded.contains(feature)) false
+ else if (ctx.importInfo.originals.contains(feature)) true
+ else {
+ var c = ctx.outer
+ while (c.importInfo eq ctx.importInfo) c = c.outer
+ hasImport(c)
+ }
+ }
def hasOption = ctx.base.settings.language.value exists (s => s == featureName || s == "_")
hasImport(ctx.withPhase(ctx.typerPhase)) || hasOption
}
@@ -556,6 +540,9 @@ trait TypeOps { this: Context => // TODO: Make standalone object.
def scala2Mode =
featureEnabled(defn.LanguageModuleClass, nme.Scala2)
+ def dynamicsEnabled =
+ featureEnabled(defn.LanguageModuleClass, nme.dynamics)
+
def testScala2Mode(msg: String, pos: Position) = {
if (scala2Mode) migrationWarning(msg, pos)
scala2Mode
diff --git a/src/dotty/tools/dotc/core/TypeParamInfo.scala b/src/dotty/tools/dotc/core/TypeParamInfo.scala
new file mode 100644
index 000000000..647c895db
--- /dev/null
+++ b/src/dotty/tools/dotc/core/TypeParamInfo.scala
@@ -0,0 +1,40 @@
+package dotty.tools.dotc.core
+
+import Names.TypeName
+import Contexts.Context
+import Types.{Type, TypeBounds}
+
+/** A common super trait of Symbol and LambdaParam.
+ * Used to capture the attributes of type parameters which can be implemented as either.
+ */
+trait TypeParamInfo {
+
+ /** Is this the info of a type parameter? Will return `false` for symbols
+ * that are not type parameters.
+ */
+ def isTypeParam(implicit ctx: Context): Boolean
+
+ /** The name of the type parameter */
+ def paramName(implicit ctx: Context): TypeName
+
+ /** The info of the type parameter */
+ def paramBounds(implicit ctx: Context): TypeBounds
+
+ /** The info of the type parameter as seen from a prefix type.
+ * For type parameter symbols, this is the `memberInfo` as seen from `prefix`.
+ * For type lambda parameters, it's the same as `paramBounds` as
+ * `asSeenFrom` has already been applied to the whole type lambda.
+ */
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds
+
+ /** The parameter bounds, or the completer if the type parameter
+ * is an as-yet uncompleted symbol.
+ */
+ def paramBoundsOrCompleter(implicit ctx: Context): Type
+
+ /** The variance of the type parameter */
+ def paramVariance(implicit ctx: Context): Int
+
+ /** A type that refers to the parameter */
+ def paramRef(implicit ctx: Context): Type
+} \ No newline at end of file
diff --git a/src/dotty/tools/dotc/core/TyperState.scala b/src/dotty/tools/dotc/core/TyperState.scala
index 36f026107..5c476c1cb 100644
--- a/src/dotty/tools/dotc/core/TyperState.scala
+++ b/src/dotty/tools/dotc/core/TyperState.scala
@@ -59,6 +59,11 @@ class TyperState(r: Reporter) extends DotClass with Showable {
/** Commit state so that it gets propagated to enclosing context */
def commit()(implicit ctx: Context): Unit = unsupported("commit")
+ /** The closest ancestor of this typer state (including possibly this typer state itself)
+ * which is not yet committed, or which does not have a parent.
+ */
+ def uncommittedAncestor: TyperState = this
+
/** Make type variable instances permanent by assigning to `inst` field if
* type variable instantiation cannot be retracted anymore. Then, remove
* no-longer needed constraint entries.
@@ -83,7 +88,8 @@ extends TyperState(r) {
override def reporter = myReporter
- private var myConstraint: Constraint = previous.constraint
+ private val previousConstraint = previous.constraint
+ private var myConstraint: Constraint = previousConstraint
override def constraint = myConstraint
override def constraint_=(c: Constraint)(implicit ctx: Context) = {
@@ -96,7 +102,6 @@ extends TyperState(r) {
override def ephemeral = myEphemeral
override def ephemeral_=(x: Boolean): Unit = { myEphemeral = x }
-
override def fresh(isCommittable: Boolean): TyperState =
new MutableTyperState(this, new StoreReporter(reporter), isCommittable)
@@ -107,23 +112,43 @@ extends TyperState(r) {
isCommittable &&
(!previous.isInstanceOf[MutableTyperState] || previous.isGlobalCommittable)
+ private var isCommitted = false
+
+ override def uncommittedAncestor: TyperState =
+ if (isCommitted) previous.uncommittedAncestor else this
+
/** Commit typer state so that its information is copied into current typer state
* In addition (1) the owning state of undetermined or temporarily instantiated
* type variables changes from this typer state to the current one. (2) Variables
* that were temporarily instantiated in the current typer state are permanently
* instantiated instead.
+ *
+ * A note on merging: An interesting test case is isApplicableSafe.scala. It turns out that this
+ * requires a context merge using the new `&' operator. Sequence of actions:
+ * 1) Typecheck argument in typerstate 1.
+ * 2) Cache argument.
+ * 3) Evolve same typer state (to typecheck other arguments, say)
+ * leading to a different constraint.
+ * 4) Take typechecked argument in same state.
+ *
+ * It turns out that the merge is needed not just for
+ * isApplicableSafe but also for (e.g. erased-lubs.scala) as well as
+ * many parts of dotty itself.
*/
override def commit()(implicit ctx: Context) = {
val targetState = ctx.typerState
assert(isCommittable)
- targetState.constraint = constraint
+ targetState.constraint =
+ if (targetState.constraint eq previousConstraint) constraint
+ else targetState.constraint & constraint
constraint foreachTypeVar { tvar =>
if (tvar.owningState eq this)
tvar.owningState = targetState
}
- targetState.ephemeral = ephemeral
+ targetState.ephemeral |= ephemeral
targetState.gc()
reporter.flush()
+ isCommitted = true
}
override def gc()(implicit ctx: Context): Unit = {
diff --git a/src/dotty/tools/dotc/core/Types.scala b/src/dotty/tools/dotc/core/Types.scala
index c502162ab..1212cdd81 100644
--- a/src/dotty/tools/dotc/core/Types.scala
+++ b/src/dotty/tools/dotc/core/Types.scala
@@ -28,17 +28,18 @@ import Hashable._
import Uniques._
import collection.{mutable, Seq, breakOut}
import config.Config
-import config.Printers._
import annotation.tailrec
import Flags.FlagSet
-import typer.Mode
import language.implicitConversions
import scala.util.hashing.{ MurmurHash3 => hashing }
+import config.Printers.{core, typr, cyclicErrors}
object Types {
@sharable private var nextId = 0
+ implicit def eqType: Eq[Type, Type] = Eq
+
/** The class of types.
* The principal subclasses and sub-objects are as follows:
*
@@ -50,20 +51,22 @@ object Types {
* | | +--- SuperType
* | | +--- ConstantType
* | | +--- MethodParam
- * | | +----RefinedThis
+ * | | +----RecThis
* | | +--- SkolemType
* | +- PolyParam
- * | +- RefinedType
+ * | +- RefinedOrRecType -+-- RefinedType
+ * | | -+-- RecType
+ * | +- HKApply
* | +- TypeBounds
* | +- ExprType
* | +- AnnotatedType
* | +- TypeVar
+ * | +- PolyType
* |
* +- GroundType -+- AndType
* +- OrType
* +- MethodType -----+- ImplicitMethodType
* | +- JavaMethodType
- * +- PolyType
* +- ClassInfo
* |
* +- NoType
@@ -92,11 +95,14 @@ object Types {
/** Is this type a value type? */
final def isValueType: Boolean = this.isInstanceOf[ValueType]
+ /** Is the is value type or type lambda? */
+ final def isValueTypeOrLambda: Boolean = isValueType || this.isInstanceOf[PolyType]
+
/** Does this type denote a stable reference (i.e. singleton type)? */
final def isStable(implicit ctx: Context): Boolean = stripTypeVar match {
case tp: TermRef => tp.termSymbol.isStable && tp.prefix.isStable
case _: SingletonType | NoPrefix => true
- case tp: RefinedType => tp.parent.isStable
+ case tp: RefinedOrRecType => tp.parent.isStable
case _ => false
}
@@ -106,16 +112,15 @@ object Types {
* It makes no sense for it to be an alias type because isRef would always
* return false in that case.
*/
- def isRef(sym: Symbol)(implicit ctx: Context): Boolean = stripTypeVar match {
+ def isRef(sym: Symbol)(implicit ctx: Context): Boolean = stripAnnots.stripTypeVar match {
case this1: TypeRef =>
this1.info match { // see comment in Namer#typeDefSig
case TypeAlias(tp) => tp.isRef(sym)
case _ => this1.symbol eq sym
}
- case this1: RefinedType =>
- this1.parent.isRef(sym)
- case _ =>
- false
+ case this1: RefinedOrRecType => this1.parent.isRef(sym)
+ case this1: HKApply => this1.superType.isRef(sym)
+ case _ => false
}
/** Is this type a (neither aliased nor applied) reference to class `sym`? */
@@ -145,7 +150,7 @@ object Types {
def loop(tp: Type) = tp match {
case tp: TypeRef =>
val sym = tp.symbol
- if (sym.isClass) sym.derivesFrom(cls) else tp.underlying.derivesFrom(cls)
+ if (sym.isClass) sym.derivesFrom(cls) else tp.superType.derivesFrom(cls)
case tp: TypeProxy =>
tp.underlying.derivesFrom(cls)
case tp: AndType =>
@@ -174,7 +179,7 @@ object Types {
}
/** Is some part of this type produced as a repair for an error? */
- final def isErroneous(implicit ctx: Context): Boolean = existsPart(_.isError)
+ final def isErroneous(implicit ctx: Context): Boolean = existsPart(_.isError, forceLazy = false)
/** Does the type carry an annotation that is an instance of `cls`? */
final def hasAnnotation(cls: ClassSymbol)(implicit ctx: Context): Boolean = stripTypeVar match {
@@ -212,35 +217,12 @@ object Types {
/** Is this an alias TypeBounds? */
def isAlias: Boolean = this.isInstanceOf[TypeAlias]
- /** Is this type a transitive refinement of the given type?
- * This is true if the type consists of 0 or more refinements or other
- * non-singleton proxies that lead to the `prefix` type. ClassInfos with
- * the same class are counted as equal for this purpose.
- */
- def refines(prefix: Type)(implicit ctx: Context): Boolean = {
- val prefix1 = prefix.dealias
- def loop(tp: Type): Boolean =
- (tp eq prefix1) || {
- tp match {
- case base: ClassInfo =>
- prefix1 match {
- case prefix1: ClassInfo => base.cls eq prefix1.cls
- case _ => false
- }
- case base: SingletonType => false
- case base: TypeProxy => loop(base.underlying)
- case _ => false
- }
- }
- loop(this)
- }
-
// ----- Higher-order combinators -----------------------------------
/** Returns true if there is a part of this type that satisfies predicate `p`.
*/
- final def existsPart(p: Type => Boolean)(implicit ctx: Context): Boolean =
- new ExistsAccumulator(p).apply(false, this)
+ final def existsPart(p: Type => Boolean, forceLazy: Boolean = true)(implicit ctx: Context): Boolean =
+ new ExistsAccumulator(p, forceLazy).apply(false, this)
/** Returns true if all parts of this type satisfy predicate `p`.
*/
@@ -248,8 +230,8 @@ object Types {
!existsPart(!p(_))
/** Performs operation on all parts of this type */
- final def foreachPart(p: Type => Unit)(implicit ctx: Context): Unit =
- new ForeachAccumulator(p).apply((), this)
+ final def foreachPart(p: Type => Unit, stopAtStatic: Boolean = false)(implicit ctx: Context): Unit =
+ new ForeachAccumulator(p, stopAtStatic).apply((), this)
/** The parts of this type which are type or term refs */
final def namedParts(implicit ctx: Context): collection.Set[NamedType] =
@@ -290,8 +272,8 @@ object Types {
case _ => NoSymbol
}
- /** The least class or trait of which this type is a subtype, or
- * NoSymbol if none exists (either because this type is not a
+ /** The least class or trait of which this type is a subtype or parameterized
+ * instance, or NoSymbol if none exists (either because this type is not a
* value type, or because superclasses are ambiguous).
*/
final def classSymbol(implicit ctx: Context): Symbol = this match {
@@ -299,7 +281,7 @@ object Types {
constant.tpe.classSymbol
case tp: TypeRef =>
val sym = tp.symbol
- if (sym.isClass) sym else tp.underlying.classSymbol
+ if (sym.isClass) sym else tp.superType.classSymbol
case tp: ClassInfo =>
tp.cls
case tp: SingletonType =>
@@ -329,7 +311,7 @@ object Types {
tp.cls :: Nil
case tp: TypeRef =>
val sym = tp.symbol
- if (sym.isClass) sym.asClass :: Nil else tp.underlying.classSymbols
+ if (sym.isClass) sym.asClass :: Nil else tp.superType.classSymbols
case tp: TypeProxy =>
tp.underlying.classSymbols
case AndType(l, r) =>
@@ -419,8 +401,16 @@ object Types {
memberExcluding(name, Flags.Private)
}
- final def memberExcluding(name: Name, excluding: FlagSet)(implicit ctx: Context): Denotation =
- findMember(name, widenIfUnstable, excluding)
+ final def memberExcluding(name: Name, excluding: FlagSet)(implicit ctx: Context): Denotation = {
+ // We need a valid prefix for `asSeenFrom`
+ val pre = this match {
+ case tp: ClassInfo =>
+ tp.typeRef
+ case _ =>
+ widenIfUnstable
+ }
+ findMember(name, pre, excluding)
+ }
/** Find member of this type with given name and
* produce a denotation that contains the type of the member
@@ -443,14 +433,22 @@ object Types {
})
case tp: PolyParam =>
goParam(tp)
+ case tp: RecType =>
+ goRec(tp)
+ case tp: HKApply =>
+ goApply(tp)
case tp: TypeProxy =>
go(tp.underlying)
case tp: ClassInfo =>
tp.cls.findMember(name, pre, excluded)
case AndType(l, r) =>
goAnd(l, r)
- case OrType(l, r) =>
- goOr(l, r)
+ case tp: OrType =>
+ // we need to keep the invariant that `pre <: tp`. Branch `union-types-narrow-prefix`
+ // achieved that by narrowing `pre` to each alternative, but it led to merge errors in
+ // lots of places. The present strategy is instead of widen `tp` using `join` to be a
+ // supertype of `pre`.
+ go(tp.join)
case tp: JavaArrayType =>
defn.ObjectType.findMember(name, pre, excluded)
case ErrorType =>
@@ -458,11 +456,49 @@ object Types {
case _ =>
NoDenotation
}
+ def goRec(tp: RecType) =
+ if (tp.parent == null) NoDenotation
+ else {
+ //println(s"find member $pre . $name in $tp")
+
+ // We have to be careful because we might open the same (wrt eq) recursive type
+ // twice during findMember which risks picking the wrong prefix in the `substRecThis(rt, pre)`
+ // call below. To avoid this problem we do a defensive copy of the recursive
+ // type first. But if we do this always we risk being inefficient and we ran into
+ // stackoverflows when compiling pos/hk.scala under the refinement encoding
+ // of hk-types. So we only do a copy if the type
+ // is visited again in a recursive call to `findMember`, as tracked by `tp.opened`.
+ // Furthermore, if this happens we mark the original recursive type with `openedTwice`
+ // which means that we always defensively copy the type in the future. This second
+ // measure is necessary because findMember calls might be cached, so do not
+ // necessarily appear in nested order.
+ // Without the defensive copy, Typer.scala fails to compile at the line
+ //
+ // untpd.rename(lhsCore, setterName).withType(setterType), WildcardType)
+ //
+ // because the subtype check
+ //
+ // ThisTree[Untyped]#ThisTree[Typed] <: Tree[Typed]
+ //
+ // fails (in fact it thinks the underlying type of the LHS is `Tree[Untyped]`.)
+ //
+ // Without the `openedTwice` trick, Typer.scala fails to Ycheck
+ // at phase resolveSuper.
+ val rt =
+ if (tp.opened) { // defensive copy
+ tp.openedTwice = true
+ RecType(rt => tp.parent.substRecThis(tp, RecThis(rt)))
+ } else tp
+ rt.opened = true
+ try go(rt.parent).mapInfo(_.substRecThis(rt, pre))
+ finally {
+ if (!rt.openedTwice) rt.opened = false
+ }
+ }
+
def goRefined(tp: RefinedType) = {
val pdenot = go(tp.parent)
- val rinfo =
- if (tp.refinementRefersToThis) tp.refinedInfo.substRefinedThis(tp, pre)
- else tp.refinedInfo
+ val rinfo = tp.refinedInfo
if (name.isTypeName) { // simplified case that runs more efficiently
val jointInfo =
if (rinfo.isAlias) rinfo
@@ -488,6 +524,15 @@ object Types {
safeIntersection = ctx.pendingMemberSearches.contains(name))
}
}
+
+ def goApply(tp: HKApply) = tp.tycon match {
+ case tl: PolyType =>
+ go(tl.resType).mapInfo(info =>
+ tl.derivedLambdaAbstraction(tl.paramNames, tl.paramBounds, info).appliedTo(tp.args))
+ case _ =>
+ go(tp.superType)
+ }
+
def goThis(tp: ThisType) = {
val d = go(tp.underlying)
if (d.exists)
@@ -522,7 +567,6 @@ object Types {
def goAnd(l: Type, r: Type) = {
go(l) & (go(r), pre, safeIntersection = ctx.pendingMemberSearches.contains(name))
}
- def goOr(l: Type, r: Type) = go(l) | (go(r), pre)
{ val recCount = ctx.findMemberCount + 1
ctx.findMemberCount = recCount
@@ -530,10 +574,11 @@ object Types {
ctx.pendingMemberSearches = name :: ctx.pendingMemberSearches
}
+ //assert(ctx.findMemberCount < 20)
try go(this)
catch {
case ex: Throwable =>
- core.println(i"findMember exception for $this member $name")
+ core.println(i"findMember exception for $this member $name, pre = $pre")
throw ex // DEBUG
}
finally {
@@ -806,32 +851,49 @@ object Types {
case _ => this
}
- /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type
- * is no longer alias type, LazyRef, or instantiated type variable.
- */
- final def dealias(implicit ctx: Context): Type = this match {
+ /** Eliminate anonymous classes */
+ final def deAnonymize(implicit ctx: Context): Type = this match {
+ case tp:TypeRef if tp.symbol.isAnonymousClass =>
+ tp.symbol.asClass.typeRef.asSeenFrom(tp.prefix, tp.symbol.owner)
+ case tp => tp
+ }
+
+ private def dealias(keepAnnots: Boolean)(implicit ctx: Context): Type = this match {
case tp: TypeRef =>
if (tp.symbol.isClass) tp
else tp.info match {
- case TypeAlias(tp) => tp.dealias
+ case TypeAlias(tp) => tp.dealias(keepAnnots)
case _ => tp
}
case tp: TypeVar =>
val tp1 = tp.instanceOpt
- if (tp1.exists) tp1.dealias else tp
- case tp: LazyRef =>
- tp.ref.dealias
+ if (tp1.exists) tp1.dealias(keepAnnots) else tp
case tp: AnnotatedType =>
- tp.derivedAnnotatedType(tp.tpe.dealias, tp.annot)
- case tp => tp
- }
-
- /** If this is a TypeAlias type, its alias otherwise this type itself */
- final def followTypeAlias(implicit ctx: Context): Type = this match {
- case TypeAlias(alias) => alias
+ val tp1 = tp.tpe.dealias(keepAnnots)
+ if (keepAnnots) tp.derivedAnnotatedType(tp1, tp.annot) else tp1
+ case tp: LazyRef =>
+ tp.ref.dealias(keepAnnots)
+ case app @ HKApply(tycon, args) =>
+ val tycon1 = tycon.dealias(keepAnnots)
+ if (tycon1 ne tycon) app.superType.dealias(keepAnnots)
+ else this
case _ => this
}
+ /** Follow aliases and dereferences LazyRefs and instantiated TypeVars until type
+ * is no longer alias type, LazyRef, or instantiated type variable.
+ * Goes through annotated types and rewraps annotations on the result.
+ */
+ final def dealiasKeepAnnots(implicit ctx: Context): Type =
+ dealias(keepAnnots = true)
+
+ /** Follow aliases and dereferences LazyRefs, annotated types and instantiated
+ * TypeVars until type is no longer alias type, annotated type, LazyRef,
+ * or instantiated type variable.
+ */
+ final def dealias(implicit ctx: Context): Type =
+ dealias(keepAnnots = false)
+
/** Perform successive widenings and dealiasings until none can be applied anymore */
final def widenDealias(implicit ctx: Context): Type = {
val res = this.widen.dealias
@@ -846,14 +908,6 @@ object Types {
case _ => this
}
- /** If this is a refinement type, the unrefined parent,
- * else the type itself.
- */
- final def unrefine(implicit ctx: Context): Type = stripTypeVar match {
- case tp @ RefinedType(tycon, _) => tycon.unrefine
- case _ => this
- }
-
/** If this is a (possibly aliased, annotated, and/or parameterized) reference to
* a class, the class type ref, otherwise NoType.
* @param refinementOK If `true` we also skip non-parameter refinements.
@@ -863,12 +917,16 @@ object Types {
if (tp.symbol.isClass) tp
else if (tp.symbol.isAliasType) tp.underlying.underlyingClassRef(refinementOK)
else NoType
- case tp: AnnotatedType => tp.underlying.underlyingClassRef(refinementOK)
+ case tp: AnnotatedType =>
+ tp.underlying.underlyingClassRef(refinementOK)
case tp: RefinedType =>
def isParamName = tp.classSymbol.typeParams.exists(_.name == tp.refinedName)
if (refinementOK || isParamName) tp.underlying.underlyingClassRef(refinementOK)
else NoType
- case _ => NoType
+ case tp: RecType =>
+ tp.underlying.underlyingClassRef(refinementOK)
+ case _ =>
+ NoType
}
/** The iterator of underlying types as long as type is a TypeProxy.
@@ -891,6 +949,17 @@ object Types {
def narrow(implicit ctx: Context): TermRef =
TermRef(NoPrefix, ctx.newSkolem(this))
+ /** Useful for diagnostics: The underlying type if this type is a type proxy,
+ * otherwise NoType
+ */
+ def underlyingIfProxy(implicit ctx: Context) = this match {
+ case this1: TypeProxy => this1.underlying
+ case _ => NoType
+ }
+
+ /** If this is a FunProto or PolyProto, WildcardType, otherwise this. */
+ def notApplied: Type = this
+
// ----- Normalizing typerefs over refined types ----------------------------
/** If this normalizes* to a refinement type that has a refinement for `name` (which might be followed
@@ -906,62 +975,23 @@ object Types {
*
* P { type T = String, type R = P{...}.T } # R --> String
*
- * (2) The refinement is a fully instantiated type lambda, and the projected name is "$apply".
- * In this case the rhs of the apply is returned with all references to lambda argument types
- * substituted by their definitions.
- *
* (*) normalizes means: follow instantiated typevars and aliases.
*/
def lookupRefined(name: Name)(implicit ctx: Context): Type = {
def loop(pre: Type): Type = pre.stripTypeVar match {
case pre: RefinedType =>
- object instantiate extends TypeMap {
- var isSafe = true
- def apply(tp: Type): Type = tp match {
- case TypeRef(RefinedThis(`pre`), name) if name.isHkArgName =>
- member(name).info match {
- case TypeAlias(alias) => alias
- case _ => isSafe = false; tp
- }
- case tp: TypeVar if !tp.inst.exists =>
- isSafe = false
- tp
- case _ =>
- mapOver(tp)
- }
- }
- def instArg(tp: Type): Type = tp match {
- case tp @ TypeAlias(TypeRef(RefinedThis(`pre`), name)) if name.isHkArgName =>
- member(name).info match {
- case TypeAlias(alias) => tp.derivedTypeAlias(alias) // needed to keep variance
- case bounds => bounds
- }
- case _ =>
- instantiate(tp)
- }
- def instTop(tp: Type): Type = tp.stripTypeVar match {
- case tp: RefinedType =>
- tp.derivedRefinedType(instTop(tp.parent), tp.refinedName, instArg(tp.refinedInfo))
- case _ =>
- instantiate(tp)
- }
- /** Reduce rhs of $hkApply to make it stand alone */
- def betaReduce(tp: Type) = {
- val reduced = instTop(tp)
- if (instantiate.isSafe) reduced else NoType
- }
pre.refinedInfo match {
case TypeAlias(alias) =>
- if (pre.refinedName ne name) loop(pre.parent)
- else if (!pre.refinementRefersToThis) alias
- else alias match {
- case TypeRef(RefinedThis(`pre`), aliasName) => lookupRefined(aliasName) // (1)
- case _ => if (name == tpnme.hkApply) betaReduce(alias) else NoType // (2) // ### use TypeApplication's betaReduce
- }
+ if (pre.refinedName ne name) loop(pre.parent) else alias
case _ => loop(pre.parent)
}
- case RefinedThis(binder) =>
- binder.lookupRefined(name)
+ case pre: RecType =>
+ val candidate = loop(pre.parent)
+ if (candidate.exists && !pre.isReferredToBy(candidate)) {
+ //println(s"lookupRefined ${this.toString} . $name, pre: $pre ---> $candidate / ${candidate.toString}")
+ candidate
+ }
+ else NoType
case SkolemType(tp) =>
tp.lookupRefined(name)
case pre: WildcardType =>
@@ -1025,7 +1055,7 @@ object Types {
/** The full parent types, including all type arguments */
def parentsWithArgs(implicit ctx: Context): List[Type] = this match {
- case tp: TypeProxy => tp.underlying.parentsWithArgs
+ case tp: TypeProxy => tp.superType.parentsWithArgs
case _ => List()
}
@@ -1037,9 +1067,9 @@ object Types {
/** the self type of the underlying classtype */
def givenSelfType(implicit ctx: Context): Type = this match {
- case tp @ RefinedType(parent, name) => tp.wrapIfMember(parent.givenSelfType)
+ case tp: RefinedType => tp.wrapIfMember(tp.parent.givenSelfType)
case tp: ThisType => tp.tref.givenSelfType
- case tp: TypeProxy => tp.underlying.givenSelfType
+ case tp: TypeProxy => tp.superType.givenSelfType
case _ => NoType
}
@@ -1058,7 +1088,7 @@ object Types {
}
- /** The parameter types in the first parameter section of a PolyType or MethodType, Empty list for others */
+ /** The parameter types in the first parameter section of a generic type or MethodType, Empty list for others */
final def firstParamTypes(implicit ctx: Context): List[Type] = this match {
case mt: MethodType => mt.paramTypes
case pt: PolyType => pt.resultType.firstParamTypes
@@ -1150,9 +1180,9 @@ object Types {
final def substThisUnlessStatic(cls: ClassSymbol, tp: Type)(implicit ctx: Context): Type =
if (cls.isStaticOwner) this else ctx.substThis(this, cls, tp, null)
- /** Substitute all occurrences of `SkolemType(binder)` by `tp` */
- final def substRefinedThis(binder: Type, tp: Type)(implicit ctx: Context): Type =
- ctx.substRefinedThis(this, binder, tp, null)
+ /** Substitute all occurrences of `RecThis(binder)` by `tp` */
+ final def substRecThis(binder: RecType, tp: Type)(implicit ctx: Context): Type =
+ ctx.substRecThis(this, binder, tp, null)
/** Substitute a bound type by some other type */
final def substParam(from: ParamType, to: Type)(implicit ctx: Context): Type =
@@ -1171,8 +1201,8 @@ object Types {
/** Turn type into a function type.
* @pre this is a non-dependent method type.
- * @param drop The number of trailing parameters that should be dropped
- * when forming the function type.
+ * @param dropLast The number of trailing parameters that should be dropped
+ * when forming the function type.
*/
def toFunctionType(dropLast: Int = 0)(implicit ctx: Context): Type = this match {
case mt @ MethodType(_, formals) if !mt.isDependent || ctx.mode.is(Mode.AllowDependentFunctions) =>
@@ -1210,28 +1240,6 @@ object Types {
*/
def simplified(implicit ctx: Context) = ctx.simplify(this, null)
- /** Approximations of union types: We replace a union type Tn | ... | Tn
- * by the smallest intersection type of baseclass instances of T1,...,Tn.
- * Example: Given
- *
- * trait C[+T]
- * trait D
- * class A extends C[A] with D
- * class B extends C[B] with D with E
- *
- * we approximate `A | B` by `C[A | B] with D`
- *
- * As a second measure we also homogenize refinements containing
- * type variables. For instance, if `A` is an instantiatable type variable,
- * then
- *
- * ArrayBuffer[Int] | ArrayBuffer[A]
- *
- * is approximated by instantiating `A` to `Int` and returning `ArrayBuffer[Int]`
- * instead of `ArrayBuffer[_ >: Int | A <: Int & A]`
- */
- def approximateUnion(implicit ctx: Context) = ctx.approximateUnion(this)
-
/** customized hash code of this type.
* NotCached for uncached types. Cached types
* compute hash and use it as the type's hashCode.
@@ -1248,8 +1256,15 @@ object Types {
* Each implementation is expected to redefine the `underlying` method.
*/
abstract class TypeProxy extends Type {
+
/** The type to which this proxy forwards operations. */
def underlying(implicit ctx: Context): Type
+
+ /** The closest supertype of this type. This is the same as `underlying`,
+ * except for TypeRefs where the upper bound is returned, and HKApplys,
+ * where the upper bound of the constructor is re-applied to the arguments.
+ */
+ def superType(implicit ctx: Context): Type = underlying
}
// Every type has to inherit one of the following four abstract type classes.,
@@ -1308,13 +1323,15 @@ object Types {
/** A marker trait for types that apply only to type symbols */
trait TypeType extends Type
- /** A marker trait for types that apply only to term symbols */
+ /** A marker trait for types that apply only to term symbols or that
+ * represent higher-kinded types.
+ */
trait TermType extends Type
/** A marker trait for types that can be types of values or prototypes of value types */
trait ValueTypeOrProto extends TermType
- /** A marker trait for types that can be types of values */
+ /** A marker trait for types that can be types of values or that are higher-kinded */
trait ValueType extends ValueTypeOrProto
/** A marker trait for types that are guaranteed to contain only a
@@ -1401,6 +1418,9 @@ object Types {
else computeDenot
}
+ /** Hook for adding debug check code when denotations are assigned */
+ final def checkDenot()(implicit ctx: Context) = {}
+
/** A second fallback to recompute the denotation if necessary */
private def computeDenot(implicit ctx: Context): Denotation = {
val savedEphemeral = ctx.typerState.ephemeral
@@ -1436,6 +1456,7 @@ object Types {
// Don't use setDenot here; double binding checks can give spurious failures after erasure
lastDenotation = d
+ checkDenot()
lastSymbol = d.symbol
checkedPeriod = ctx.period
}
@@ -1449,7 +1470,11 @@ object Types {
asMemberOf(prefix) match {
case NoDenotation => d.current
case newd: SingleDenotation => newd
- case newd => newd.atSignature(d.signature).orElse(d.current)
+ case newd =>
+ newd.atSignature(d.signature) match {
+ case newd1: SingleDenotation if newd1.exists => newd1
+ case _ => d.current
+ }
}
private def denotOfSym(sym: Symbol)(implicit ctx: Context): Denotation = {
@@ -1478,11 +1503,11 @@ object Types {
(sym.owner.derivesFrom(lastSymbol.owner) ||
selfTypeOf(sym).derivesFrom(lastSymbol.owner) ||
selfTypeOf(lastSymbol).derivesFrom(sym.owner))),
- s"""data race? overwriting symbol of type ${this.show},
- |long form = $this of class ${this.getClass},
+ i"""data race? overwriting symbol of type $this,
+ |long form = $toString of class $getClass,
|last sym id = ${lastSymbol.id}, new sym id = ${sym.id},
|last owner = ${lastSymbol.owner}, new owner = ${sym.owner},
- |period = ${ctx.phase} at run ${ctx.runId}""".stripMargin)
+ |period = ${ctx.phase} at run ${ctx.runId}""")
}
protected def sig: Signature = Signature.NotAMethod
@@ -1503,6 +1528,7 @@ object Types {
// additional checks that intercept `denot` can be added here
lastDenotation = denot
+ checkDenot()
lastSymbol = denot.symbol
checkedPeriod = Nowhere
}
@@ -1542,15 +1568,16 @@ object Types {
}
}
- protected def asMemberOf(prefix: Type)(implicit ctx: Context) =
+ protected def asMemberOf(prefix: Type)(implicit ctx: Context): Denotation =
if (name.isShadowedName) prefix.nonPrivateMember(name.revertShadowed)
else prefix.member(name)
+
/** (1) Reduce a type-ref `W # X` or `W { ... } # U`, where `W` is a wildcard type
* to an (unbounded) wildcard type.
*
* (2) Reduce a type-ref `T { X = U; ... } # X` to `U`
- * provided `U` does not refer with a RefinedThis to the
+ * provided `U` does not refer with a RecThis to the
* refinement type `T { X = U; ... }`
*/
def reduceProjection(implicit ctx: Context): Type = {
@@ -1605,7 +1632,7 @@ object Types {
ctx.underlyingRecursions -= 1
}
- /** A selection of the same kind, but with potentially a differet prefix.
+ /** A selection of the same kind, but with potentially a different prefix.
* The following normalizations are performed for type selections T#A:
*
* T#A --> B if A is bound to an alias `= B` in T
@@ -1622,13 +1649,6 @@ object Types {
else if (isType) {
val res = prefix.lookupRefined(name)
if (res.exists) res
- else if (name == tpnme.hkApply && prefix.classNotLambda) {
- // After substitution we might end up with a type like
- // `C { type hk$0 = T0; ...; type hk$n = Tn } # $Apply`
- // where C is a class. In that case we eta expand `C`.
- if (defn.isBottomType(prefix)) prefix.classSymbol.typeRef
- else derivedSelect(prefix.EtaExpandCore)
- }
else if (Config.splitProjections)
prefix match {
case prefix: AndType =>
@@ -1733,6 +1753,11 @@ object Types {
type ThisType = TypeRef
override def underlying(implicit ctx: Context): Type = info
+
+ override def superType(implicit ctx: Context): Type = info match {
+ case TypeBounds(_, hi) => hi
+ case _ => info
+ }
}
final class TermRefWithSignature(prefix: Type, name: TermName, override val sig: Signature) extends TermRef(prefix, name) {
@@ -1741,7 +1766,7 @@ object Types {
override def loadDenot(implicit ctx: Context): Denotation = {
val d = super.loadDenot
if (sig eq Signature.OverloadedSignature) d
- else d.atSignature(sig)
+ else d.atSignature(sig).checkUnique
}
override def newLikeThis(prefix: Type)(implicit ctx: Context): TermRef = {
@@ -1749,7 +1774,7 @@ object Types {
if (symbol.exists && !candidate.symbol.exists) { // recompute from previous symbol
val ownSym = symbol
val newd = asMemberOf(prefix)
- candidate.withDenot(asMemberOf(prefix).suchThat(_ eq ownSym))
+ candidate.withDenot(newd.suchThat(_.signature == ownSym.signature))
}
else candidate
}
@@ -1763,6 +1788,7 @@ object Types {
false
}
override def computeHash = doHash((name, sig), prefix)
+ override def toString = super.toString ++ s"/withSig($sig)"
}
trait WithFixedSym extends NamedType {
@@ -1902,15 +1928,9 @@ object Types {
}
object TypeRef {
- def checkProjection(prefix: Type, name: TypeName)(implicit ctx: Context) =
- if (name == tpnme.hkApply && prefix.classNotLambda)
- assert(false, s"bad type : $prefix.$name does not allow $$Apply projection")
-
/** Create type ref with given prefix and name */
- def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef = {
- if (Config.checkProjections) checkProjection(prefix, name)
+ def apply(prefix: Type, name: TypeName)(implicit ctx: Context): TypeRef =
ctx.uniqueNamedTypes.enterIfNew(prefix, name).asInstanceOf[TypeRef]
- }
/** Create type ref to given symbol */
def apply(prefix: Type, sym: TypeSymbol)(implicit ctx: Context): TypeRef =
@@ -1919,10 +1939,8 @@ object Types {
/** Create a non-member type ref (which cannot be reloaded using `member`),
* with given prefix, name, and symbol.
*/
- def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef = {
- if (Config.checkProjections) checkProjection(prefix, name)
+ def withFixedSym(prefix: Type, name: TypeName, sym: TypeSymbol)(implicit ctx: Context): TypeRef =
unique(new TypeRefWithFixedSym(prefix, name, sym))
- }
/** Create a type ref referring to given symbol with given name.
* This is very similar to TypeRef(Type, Symbol),
@@ -2020,46 +2038,29 @@ object Types {
override def hashCode = ref.hashCode + 37
}
- // --- Refined Type ---------------------------------------------------------
+ // --- Refined Type and RecType ------------------------------------------------
+
+ abstract class RefinedOrRecType extends CachedProxyType with ValueType {
+ def parent: Type
+ }
/** A refined type parent { refinement }
* @param refinedName The name of the refinement declaration
* @param infoFn: A function that produces the info of the refinement declaration,
* given the refined type itself.
*/
- abstract case class RefinedType(parent: Type, refinedName: Name)
- extends CachedProxyType with BindingType with ValueType {
-
- val refinedInfo: Type
-
- private var refinementRefersToThisCache: Boolean = _
- private var refinementRefersToThisKnown: Boolean = false
-
- def refinementRefersToThis(implicit ctx: Context): Boolean = {
- if (!refinementRefersToThisKnown) {
- refinementRefersToThisCache = refinedInfo.containsRefinedThis(this)
- refinementRefersToThisKnown = true
- }
- refinementRefersToThisCache
- }
+ abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedOrRecType {
override def underlying(implicit ctx: Context) = parent
private def badInst =
throw new AssertionError(s"bad instantiation: $this")
- def checkInst(implicit ctx: Context): this.type = {
- if (refinedName == tpnme.hkApply)
- parent.stripTypeVar match {
- case RefinedType(_, name) if name.isHkArgName => // ok
- case _ => badInst
- }
- this
- }
+ def checkInst(implicit ctx: Context): this.type = this // debug hook
- def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): RefinedType =
+ def derivedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type)(implicit ctx: Context): Type =
if ((parent eq this.parent) && (refinedName eq this.refinedName) && (refinedInfo eq this.refinedInfo)) this
- else RefinedType(parent, refinedName, rt => refinedInfo.substRefinedThis(this, RefinedThis(rt)))
+ else RefinedType(parent, refinedName, refinedInfo)
/** Add this refinement to `parent`, provided If `refinedName` is a member of `parent`. */
def wrapIfMember(parent: Type)(implicit ctx: Context): Type =
@@ -2078,25 +2079,16 @@ object Types {
override def toString = s"RefinedType($parent, $refinedName, $refinedInfo)"
}
- class CachedRefinedType(parent: Type, refinedName: Name, infoFn: RefinedType => Type) extends RefinedType(parent, refinedName) {
- val refinedInfo = infoFn(this)
- }
-
- class PreHashedRefinedType(parent: Type, refinedName: Name, override val refinedInfo: Type, hc: Int)
- extends RefinedType(parent, refinedName) {
+ class CachedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type, hc: Int)
+ extends RefinedType(parent, refinedName, refinedInfo) {
myHash = hc
override def computeHash = unsupported("computeHash")
}
object RefinedType {
- def make(parent: Type, names: List[Name], infoFns: List[RefinedType => Type])(implicit ctx: Context): Type =
+ def make(parent: Type, names: List[Name], infos: List[Type])(implicit ctx: Context): Type =
if (names.isEmpty) parent
- else make(RefinedType(parent, names.head, infoFns.head), names.tail, infoFns.tail)
-
- def apply(parent: Type, name: Name, infoFn: RefinedType => Type)(implicit ctx: Context): RefinedType = {
- assert(!ctx.erasedTypes || ctx.mode.is(Mode.Printing))
- ctx.base.uniqueRefinedTypes.enterIfNew(new CachedRefinedType(parent, name, infoFn)).checkInst
- }
+ else make(RefinedType(parent, names.head, infos.head), names.tail, infos.tail)
def apply(parent: Type, name: Name, info: Type)(implicit ctx: Context): RefinedType = {
assert(!ctx.erasedTypes)
@@ -2104,6 +2096,83 @@ object Types {
}
}
+ class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType {
+
+ // See discussion in findMember#goRec why these vars are needed
+ private[Types] var opened: Boolean = false
+ private[Types] var openedTwice: Boolean = false
+
+ val parent = parentExp(this)
+
+ override def underlying(implicit ctx: Context): Type = parent
+
+ def derivedRecType(parent: Type)(implicit ctx: Context): RecType =
+ if (parent eq this.parent) this
+ else RecType(rt => parent.substRecThis(this, RecThis(rt)))
+
+ def rebind(parent: Type)(implicit ctx: Context): Type =
+ if (parent eq this.parent) this
+ else RecType.closeOver(rt => parent.substRecThis(this, RecThis(rt)))
+
+ override def equals(other: Any) = other match {
+ case other: RecType => other.parent == this.parent
+ case _ => false
+ }
+
+ def isReferredToBy(tp: Type)(implicit ctx: Context): Boolean = {
+ val refacc = new TypeAccumulator[Boolean] {
+ override def apply(x: Boolean, tp: Type) = x || {
+ tp match {
+ case tp: TypeRef => apply(x, tp.prefix)
+ case tp: RecThis => RecType.this eq tp.binder
+ case tp: LazyRef => true // To be safe, assume a reference exists
+ case _ => foldOver(x, tp)
+ }
+ }
+ }
+ refacc.apply(false, tp)
+ }
+
+ override def computeHash = doHash(parent)
+ override def toString = s"RecType($parent | $hashCode)"
+
+ private def checkInst(implicit ctx: Context): this.type = this // debug hook
+ }
+
+ object RecType {
+
+ /** Create a RecType, normalizing its contents. This means:
+ *
+ * 1. Nested Rec types on the type's spine are merged with the outer one.
+ * 2. Any refinement of the form `type T = z.T` on the spine of the type
+ * where `z` refers to the created rec-type is replaced by
+ * `type T`. This avoids infinite recursons later when we
+ * try to follow these references.
+ * TODO: Figure out how to guarantee absence of cycles
+ * of length > 1
+ */
+ def apply(parentExp: RecType => Type)(implicit ctx: Context): RecType = {
+ val rt = new RecType(parentExp)
+ def normalize(tp: Type): Type = tp.stripTypeVar match {
+ case tp: RecType =>
+ normalize(tp.parent.substRecThis(tp, RecThis(rt)))
+ case tp @ RefinedType(parent, rname, rinfo) =>
+ val rinfo1 = rinfo match {
+ case TypeAlias(TypeRef(RecThis(`rt`), `rname`)) => TypeBounds.empty
+ case _ => rinfo
+ }
+ tp.derivedRefinedType(normalize(parent), rname, rinfo1)
+ case tp =>
+ tp
+ }
+ unique(rt.derivedRecType(normalize(rt.parent))).checkInst
+ }
+ def closeOver(parentExp: RecType => Type)(implicit ctx: Context) = {
+ val rt = this(parentExp)
+ if (rt.isReferredToBy(rt.parent)) rt else rt.parent
+ }
+ }
+
// --- AndType/OrType ---------------------------------------------------------------
trait AndOrType extends ValueType { // todo: check where we can simplify using AndOrType
@@ -2135,7 +2204,7 @@ object Types {
object AndType {
def apply(tp1: Type, tp2: Type)(implicit ctx: Context) = {
- assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType])
+ assert(tp1.isValueType && tp2.isValueType, i"$tp1 & $tp2 / " + s"$tp1 & $tp2")
unchecked(tp1, tp2)
}
def unchecked(tp1: Type, tp2: Type)(implicit ctx: Context) = {
@@ -2143,13 +2212,33 @@ object Types {
unique(new CachedAndType(tp1, tp2))
}
def make(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
- if (tp1 eq tp2) tp1 else apply(tp1, tp2)
+ if ((tp1 eq tp2) || (tp2 eq defn.AnyType))
+ tp1
+ else if (tp1 eq defn.AnyType)
+ tp2
+ else
+ apply(tp1, tp2)
}
abstract case class OrType(tp1: Type, tp2: Type) extends CachedGroundType with AndOrType {
+
assert(tp1.isInstanceOf[ValueType] && tp2.isInstanceOf[ValueType])
def isAnd = false
+ private[this] var myJoin: Type = _
+ private[this] var myJoinPeriod: Period = Nowhere
+
+ /** Replace or type by the closest non-or type above it */
+ def join(implicit ctx: Context): Type = {
+ if (myJoinPeriod != ctx.period) {
+ myJoin = ctx.orDominator(this)
+ core.println(i"join of $this == $myJoin")
+ assert(myJoin != this)
+ myJoinPeriod = ctx.period
+ }
+ myJoin
+ }
+
def derivedOrType(tp1: Type, tp2: Type)(implicit ctx: Context): Type =
if ((tp1 eq this.tp1) && (tp2 eq this.tp2)) this
else OrType.make(tp1, tp2)
@@ -2178,7 +2267,7 @@ object Types {
// and therefore two different poly types would never be equal.
/** A trait that mixes in functionality for signature caching */
- trait MethodicType extends Type {
+ trait MethodicType extends TermType {
private[this] var mySignature: Signature = _
private[this] var mySignatureRunId: Int = NoRunId
@@ -2198,7 +2287,7 @@ object Types {
final override def signature(implicit ctx: Context): Signature = {
if (ctx.runId != mySignatureRunId) {
mySignature = computeSignature
- mySignatureRunId = ctx.runId
+ if (!mySignature.isUnderDefined) mySignatureRunId = ctx.runId
}
mySignature
}
@@ -2221,9 +2310,11 @@ object Types {
if (dependencyStatus == FalseDeps) { // dealias all false dependencies
val dealiasMap = new TypeMap {
def apply(tp: Type) = tp match {
- case tp @ TypeRef(MethodParam(`thisMethodType`, _), name) => // follow type alias to avoid dependency
- val TypeAlias(alias) = tp.info
- apply(alias)
+ case tp @ TypeRef(pre, name) =>
+ tp.info match {
+ case TypeAlias(alias) if depStatus(pre) == TrueDeps => apply(alias)
+ case _ => mapOver(tp)
+ }
case _ =>
mapOver(tp)
}
@@ -2234,10 +2325,31 @@ object Types {
var myDependencyStatus: DependencyStatus = Unknown
- private def combine(x: DependencyStatus, y: DependencyStatus): DependencyStatus = {
- val status = (x & StatusMask) max (y & StatusMask)
- val provisional = (x | y) & Provisional
- (if (status == TrueDeps) status else status | provisional).toByte
+ private def depStatus(tp: Type)(implicit ctx: Context): DependencyStatus = {
+ def combine(x: DependencyStatus, y: DependencyStatus) = {
+ val status = (x & StatusMask) max (y & StatusMask)
+ val provisional = (x | y) & Provisional
+ (if (status == TrueDeps) status else status | provisional).toByte
+ }
+ val depStatusAcc = new TypeAccumulator[DependencyStatus] {
+ def apply(status: DependencyStatus, tp: Type) =
+ if (status == TrueDeps) status
+ else
+ tp match {
+ case MethodParam(`thisMethodType`, _) => TrueDeps
+ case tp: TypeRef =>
+ val status1 = foldOver(status, tp)
+ tp.info match { // follow type alias to avoid dependency
+ case TypeAlias(alias) if status1 == TrueDeps && status != TrueDeps =>
+ combine(apply(status, alias), FalseDeps)
+ case _ =>
+ status1
+ }
+ case tp: TypeVar if !tp.isInstantiated => combine(status, Provisional)
+ case _ => foldOver(status, tp)
+ }
+ }
+ depStatusAcc(NoDeps, tp)
}
/** The dependency status of this method. Some examples:
@@ -2251,22 +2363,7 @@ object Types {
private def dependencyStatus(implicit ctx: Context): DependencyStatus = {
if (myDependencyStatus != Unknown) myDependencyStatus
else {
- val isDepAcc = new TypeAccumulator[DependencyStatus] {
- def apply(x: DependencyStatus, tp: Type) =
- if (x == TrueDeps) x
- else
- tp match {
- case MethodParam(`thisMethodType`, _) => TrueDeps
- case tp @ TypeRef(MethodParam(`thisMethodType`, _), name) =>
- tp.info match { // follow type alias to avoid dependency
- case TypeAlias(alias) => combine(apply(x, alias), FalseDeps)
- case _ => TrueDeps
- }
- case tp: TypeVar if !tp.isInstantiated => combine(x, Provisional)
- case _ => foldOver(x, tp)
- }
- }
- val result = isDepAcc(NoDeps, resType)
+ val result = depStatus(resType)
if ((result & Provisional) == 0) myDependencyStatus = result
(result & StatusMask).toByte
}
@@ -2337,7 +2434,12 @@ object Types {
apply(nme.syntheticParamNames(paramTypes.length), paramTypes)(resultTypeExp)
def apply(paramTypes: List[Type], resultType: Type)(implicit ctx: Context): MethodType =
apply(nme.syntheticParamNames(paramTypes.length), paramTypes, resultType)
+
+ /** Produce method type from parameter symbols, with special mappings for repeated
+ * and inline parameters.
+ */
def fromSymbols(params: List[Symbol], resultType: Type)(implicit ctx: Context) = {
+ /** Replace @repeated annotations on Seq or Array types by <repeated> types */
def translateRepeated(tp: Type): Type = tp match {
case tp @ ExprType(tp1) => tp.derivedExprType(translateRepeated(tp1))
case AnnotatedType(tp, annot) if annot matches defn.RepeatedAnnot =>
@@ -2347,7 +2449,15 @@ object Types {
case tp =>
tp
}
- def paramInfo(param: Symbol): Type = translateRepeated(param.info)
+ /** Add @inlineParam to inline call-by-value parameters */
+ def translateInline(tp: Type): Type = tp match {
+ case _: ExprType => tp
+ case _ => AnnotatedType(tp, Annotation(defn.InlineParamAnnot))
+ }
+ def paramInfo(param: Symbol): Type = {
+ val paramType = translateRepeated(param.info)
+ if (param.is(Inline)) translateInline(paramType) else paramType
+ }
def transformResult(mt: MethodType) =
resultType.subst(params, (0 until params.length).toList map (MethodParam(mt, _)))
apply(params map (_.name.asTermName), params map paramInfo)(transformResult _)
@@ -2397,15 +2507,19 @@ object Types {
}
}
- abstract case class PolyType(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)
- extends CachedGroundType with BindingType with TermType with MethodOrPoly {
+ /** A type lambda of the form `[v_0 X_0, ..., v_n X_n] => T` */
+ class PolyType(val paramNames: List[TypeName], val variances: List[Int])(
+ paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)
+ extends CachedProxyType with BindingType with MethodOrPoly {
- val paramBounds = paramBoundsExp(this)
- val resType = resultTypeExp(this)
+ /** The bounds of the type parameters */
+ val paramBounds: List[TypeBounds] = paramBoundsExp(this)
- assert(resType ne null)
+ /** The result type of a PolyType / body of a type lambda */
+ val resType: Type = resultTypeExp(this)
- override def resultType(implicit ctx: Context) = resType
+ assert(resType.isInstanceOf[TermType], this)
+ assert(paramNames.nonEmpty)
protected def computeSignature(implicit ctx: Context) = resultSignature
@@ -2414,53 +2528,194 @@ object Types {
case _ => false
}
- def instantiate(argTypes: List[Type])(implicit ctx: Context): Type =
+ /** PolyParam references to all type parameters of this type */
+ lazy val paramRefs: List[PolyParam] = paramNames.indices.toList.map(PolyParam(this, _))
+
+ lazy val typeParams: List[LambdaParam] =
+ paramNames.indices.toList.map(new LambdaParam(this, _))
+
+ override def resultType(implicit ctx: Context) = resType
+ override def underlying(implicit ctx: Context) = resType
+
+ /** Instantiate result type by substituting parameters with given arguments */
+ final def instantiate(argTypes: List[Type])(implicit ctx: Context): Type =
resultType.substParams(this, argTypes)
- def instantiateBounds(argTypes: List[Type])(implicit ctx: Context): List[TypeBounds] =
+ /** Instantiate parameter bounds by substituting parameters with given arguments */
+ final def instantiateBounds(argTypes: List[Type])(implicit ctx: Context): List[TypeBounds] =
paramBounds.mapConserve(_.substParams(this, argTypes).bounds)
- def derivedPolyType(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context) =
- if ((paramNames eq this.paramNames) && (paramBounds eq this.paramBounds) && (resType eq this.resType)) this
- else duplicate(paramNames, paramBounds, resType)
-
- def duplicate(paramNames: List[TypeName] = this.paramNames, paramBounds: List[TypeBounds] = this.paramBounds, resType: Type)(implicit ctx: Context) =
- PolyType(paramNames)(
+ def newLikeThis(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): PolyType =
+ PolyType.apply(paramNames, variances)(
x => paramBounds mapConserve (_.subst(this, x).bounds),
x => resType.subst(this, x))
+ def derivedPolyType(paramNames: List[TypeName] = this.paramNames,
+ paramBounds: List[TypeBounds] = this.paramBounds,
+ resType: Type = this.resType)(implicit ctx: Context) =
+ if ((paramNames eq this.paramNames) && (paramBounds eq this.paramBounds) && (resType eq this.resType)) this
+ else newLikeThis(paramNames, paramBounds, resType)
+
+ def derivedLambdaAbstraction(paramNames: List[TypeName], paramBounds: List[TypeBounds], resType: Type)(implicit ctx: Context): Type =
+ resType match {
+ case resType @ TypeAlias(alias) =>
+ resType.derivedTypeAlias(newLikeThis(paramNames, paramBounds, alias))
+ case resType @ TypeBounds(lo, hi) =>
+ resType.derivedTypeBounds(
+ if (lo.isRef(defn.NothingClass)) lo else newLikeThis(paramNames, paramBounds, lo),
+ newLikeThis(paramNames, paramBounds, hi))
+ case _ =>
+ derivedPolyType(paramNames, paramBounds, resType)
+ }
+
+ /** Merge nested polytypes into one polytype. nested polytypes are normally not supported
+ * but can arise as temporary data structures.
+ */
+ def flatten(implicit ctx: Context): PolyType = resType match {
+ case that: PolyType =>
+ val shift = new TypeMap {
+ def apply(t: Type) = t match {
+ case PolyParam(`that`, n) => PolyParam(that, n + paramNames.length)
+ case t => mapOver(t)
+ }
+ }
+ PolyType(paramNames ++ that.paramNames)(
+ x => this.paramBounds.mapConserve(_.subst(this, x).bounds) ++
+ that.paramBounds.mapConserve(shift(_).subst(that, x).bounds),
+ x => shift(that.resultType).subst(that, x).subst(this, x))
+ case _ => this
+ }
+
+ /** The type `[tparams := paramRefs] tp`, where `tparams` can be
+ * either a list of type parameter symbols or a list of lambda parameters
+ */
+ def lifted(tparams: List[TypeParamInfo], tp: Type)(implicit ctx: Context): Type =
+ tparams match {
+ case LambdaParam(poly, _) :: _ => tp.subst(poly, this)
+ case tparams: List[Symbol @unchecked] => tp.subst(tparams, paramRefs)
+ }
+
override def equals(other: Any) = other match {
case other: PolyType =>
- other.paramNames == this.paramNames && other.paramBounds == this.paramBounds && other.resType == this.resType
+ other.paramNames == this.paramNames &&
+ other.paramBounds == this.paramBounds &&
+ other.resType == this.resType &&
+ other.variances == this.variances
case _ => false
}
- override def computeHash = {
- doHash(paramNames, resType, paramBounds)
- }
- override def toString = s"PolyType($paramNames, $paramBounds, $resType)"
- }
+ override def toString = s"PolyType($variances, $paramNames, $paramBounds, $resType)"
- class CachedPolyType(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)
- extends PolyType(paramNames)(paramBoundsExp, resultTypeExp)
+ override def computeHash = doHash(variances ::: paramNames, resType, paramBounds)
+ }
object PolyType {
- def apply(paramNames: List[TypeName])(paramBoundsExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = {
- unique(new CachedPolyType(paramNames)(paramBoundsExp, resultTypeExp))
+ def apply(paramNames: List[TypeName], variances: List[Int] = Nil)(
+ paramBoundsExp: PolyType => List[TypeBounds],
+ resultTypeExp: PolyType => Type)(implicit ctx: Context): PolyType = {
+ val vs = if (variances.isEmpty) paramNames.map(alwaysZero) else variances
+ unique(new PolyType(paramNames, vs)(paramBoundsExp, resultTypeExp))
}
- def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context) =
+ def fromSymbols(tparams: List[Symbol], resultType: Type)(implicit ctx: Context): Type =
if (tparams.isEmpty) resultType
- else {
- def transform(pt: PolyType, tp: Type) =
- tp.subst(tparams, (0 until tparams.length).toList map (PolyParam(pt, _)))
- apply(tparams map (_.name.asTypeName))(
- pt => tparams map (tparam => transform(pt, tparam.info).bounds),
- pt => transform(pt, resultType))
+ else apply(tparams map (_.name.asTypeName), tparams.map(_.variance))(
+ pt => tparams.map(tparam => pt.lifted(tparams, tparam.info).bounds),
+ pt => pt.lifted(tparams, resultType))
+
+ def unapply(tl: PolyType): Some[(List[LambdaParam], Type)] =
+ Some((tl.typeParams, tl.resType))
+
+ def any(n: Int)(implicit ctx: Context) =
+ apply(tpnme.syntheticLambdaParamNames(n), List.fill(n)(0))(
+ pt => List.fill(n)(TypeBounds.empty), pt => defn.AnyType)
+ }
+
+ // ----- HK types: LambdaParam, HKApply ---------------------
+
+ /** The parameter of a type lambda */
+ case class LambdaParam(tl: PolyType, n: Int) extends TypeParamInfo {
+ def isTypeParam(implicit ctx: Context) = true
+ def paramName(implicit ctx: Context): TypeName = tl.paramNames(n)
+ def paramBounds(implicit ctx: Context): TypeBounds = tl.paramBounds(n)
+ def paramBoundsAsSeenFrom(pre: Type)(implicit ctx: Context): TypeBounds = paramBounds
+ def paramBoundsOrCompleter(implicit ctx: Context): Type = paramBounds
+ def paramVariance(implicit ctx: Context): Int = tl.variances(n)
+ def toArg: Type = PolyParam(tl, n)
+ def paramRef(implicit ctx: Context): Type = PolyParam(tl, n)
+ }
+
+ /** A higher kinded type application `C[T_1, ..., T_n]` */
+ abstract case class HKApply(tycon: Type, args: List[Type])
+ extends CachedProxyType with ValueType {
+
+ private var validSuper: Period = Nowhere
+ private var cachedSuper: Type = _
+
+ override def underlying(implicit ctx: Context): Type = tycon
+
+ override def superType(implicit ctx: Context): Type = {
+ if (ctx.period != validSuper) {
+ cachedSuper = tycon match {
+ case tp: PolyType => defn.AnyType
+ case tp: TypeVar if !tp.inst.exists =>
+ // supertype not stable, since underlying might change
+ return tp.underlying.applyIfParameterized(args)
+ case tp: TypeProxy => tp.superType.applyIfParameterized(args)
+ case _ => defn.AnyType
+ }
+ validSuper = ctx.period
+ }
+ cachedSuper
+ }
+
+ def lowerBound(implicit ctx: Context) = tycon.stripTypeVar match {
+ case tycon: TypeRef =>
+ tycon.info match {
+ case TypeBounds(lo, hi) =>
+ if (lo eq hi) superType // optimization, can profit from caching in this case
+ else lo.applyIfParameterized(args)
+ case _ => NoType
+ }
+ case _ =>
+ NoType
+ }
+
+ def typeParams(implicit ctx: Context): List[TypeParamInfo] = {
+ val tparams = tycon.typeParams
+ if (tparams.isEmpty) PolyType.any(args.length).typeParams else tparams
+ }
+
+ def derivedAppliedType(tycon: Type, args: List[Type])(implicit ctx: Context): Type =
+ if ((tycon eq this.tycon) && (args eq this.args)) this
+ else tycon.appliedTo(args)
+
+ override def computeHash = doHash(tycon, args)
+
+ protected def checkInst(implicit ctx: Context): this.type = {
+ def check(tycon: Type): Unit = tycon.stripTypeVar match {
+ case tycon: TypeRef if !tycon.symbol.isClass =>
+ case _: PolyParam | ErrorType | _: WildcardType =>
+ case _: PolyType =>
+ assert(args.exists(_.isInstanceOf[TypeBounds]), s"unreduced type apply: $this")
+ case tycon: AnnotatedType =>
+ check(tycon.underlying)
+ case _ =>
+ assert(false, s"illegal type constructor in $this")
}
+ if (Config.checkHKApplications) check(tycon)
+ this
+ }
}
- // ----- Bound types: MethodParam, PolyParam, RefinedThis --------------------------
+ final class CachedHKApply(tycon: Type, args: List[Type]) extends HKApply(tycon, args)
+
+ object HKApply {
+ def apply(tycon: Type, args: List[Type])(implicit ctx: Context) =
+ unique(new CachedHKApply(tycon, args)).checkInst
+ }
+
+ // ----- Bound types: MethodParam, PolyParam --------------------------
abstract class BoundType extends CachedProxyType with ValueType {
type BT <: Type
@@ -2474,10 +2729,14 @@ object Types {
abstract class ParamType extends BoundType {
def paramNum: Int
+ def paramName: Name
}
abstract case class MethodParam(binder: MethodType, paramNum: Int) extends ParamType with SingletonType {
type BT = MethodType
+
+ def paramName = binder.paramNames(paramNum)
+
override def underlying(implicit ctx: Context): Type = binder.paramTypes(paramNum)
def copyBoundType(bt: BT) = new MethodParamImpl(bt, paramNum)
@@ -2490,7 +2749,7 @@ object Types {
false
}
- override def toString = s"MethodParam(${binder.paramNames(paramNum)})"
+ override def toString = s"MethodParam($paramName)"
}
class MethodParamImpl(binder: MethodType, paramNum: Int) extends MethodParam(binder, paramNum)
@@ -2520,9 +2779,19 @@ object Types {
case _ => false
}
- override def underlying(implicit ctx: Context): Type = binder.paramBounds(paramNum)
+ def paramName = binder.paramNames(paramNum)
+
+ override def underlying(implicit ctx: Context): Type = {
+ val bounds = binder.paramBounds
+ if (bounds == null) NoType // this can happen if the referenced generic type is not initialized yet
+ else bounds(paramNum)
+ }
// no customized hashCode/equals needed because cycle is broken in PolyType
- override def toString = s"PolyParam(${binder.paramNames(paramNum)})"
+ override def toString =
+ try s"PolyParam($paramName)"
+ catch {
+ case ex: IndexOutOfBoundsException => s"PolyParam(<bad index: $paramNum>)"
+ }
override def computeHash = doHash(paramNum, binder.identityHash)
@@ -2534,20 +2803,24 @@ object Types {
}
}
- /** a this-reference to an enclosing refined type `binder`. */
- case class RefinedThis(binder: RefinedType) extends BoundType with SingletonType {
- type BT = RefinedType
+ /** a self-reference to an enclosing recursive type. */
+ case class RecThis(binder: RecType) extends BoundType with SingletonType {
+ type BT = RecType
override def underlying(implicit ctx: Context) = binder
- def copyBoundType(bt: BT) = RefinedThis(bt)
+ def copyBoundType(bt: BT) = RecThis(bt)
- // need to customize hashCode and equals to prevent infinite recursion for
- // refinements that refer to the refinement type via this
+ // need to customize hashCode and equals to prevent infinite recursion
+ // between RecTypes and RecRefs.
override def computeHash = addDelta(binder.identityHash, 41)
override def equals(that: Any) = that match {
- case that: RefinedThis => this.binder eq that.binder
+ case that: RecThis => this.binder eq that.binder
case _ => false
}
- override def toString = s"RefinedThis(${binder.hashCode})"
+ override def toString =
+ try s"RecThis(${binder.hashCode})"
+ catch {
+ case ex: NullPointerException => s"RecThis(<under construction>)"
+ }
}
// ----- Skolem types -----------------------------------------------
@@ -2559,7 +2832,14 @@ object Types {
if (info eq this.info) this else SkolemType(info)
override def hashCode: Int = identityHash
override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
- override def toString = s"Skolem($info)"
+
+ private var myRepr: String = null
+ def repr(implicit ctx: Context) = {
+ if (myRepr == null) myRepr = ctx.freshName("?")
+ myRepr
+ }
+
+ override def toString = s"Skolem($hashCode)"
}
final class CachedSkolemType(info: Type) extends SkolemType(info)
@@ -2632,49 +2912,11 @@ object Types {
* is also a singleton type.
*/
def instantiate(fromBelow: Boolean)(implicit ctx: Context): Type = {
- def upperBound = ctx.typerState.constraint.fullUpperBound(origin)
- def isSingleton(tp: Type): Boolean = tp match {
- case tp: SingletonType => true
- case AndType(tp1, tp2) => isSingleton(tp1) | isSingleton(tp2)
- case OrType(tp1, tp2) => isSingleton(tp1) & isSingleton(tp2)
- case _ => false
- }
- def isFullyDefined(tp: Type): Boolean = tp match {
- case tp: TypeVar => tp.isInstantiated && isFullyDefined(tp.instanceOpt)
- case tp: TypeProxy => isFullyDefined(tp.underlying)
- case tp: AndOrType => isFullyDefined(tp.tp1) && isFullyDefined(tp.tp2)
- case _ => true
- }
- def isOrType(tp: Type): Boolean = tp.stripTypeVar.dealias match {
- case tp: OrType => true
- case AndType(tp1, tp2) => isOrType(tp1) | isOrType(tp2)
- case RefinedType(parent, _) => isOrType(parent)
- case WildcardType(bounds: TypeBounds) => isOrType(bounds.hi)
- case _ => false
- }
-
- // First, solve the constraint.
- var inst = ctx.typeComparer.approximation(origin, fromBelow)
-
- // Then, approximate by (1.) and (2.) and simplify as follows.
- // 1. If instance is from below and is a singleton type, yet
- // upper bound is not a singleton type, widen the instance.
- if (fromBelow && isSingleton(inst) && !isSingleton(upperBound))
- inst = inst.widen
-
- inst = inst.simplified
-
- // 2. If instance is from below and is a fully-defined union type, yet upper bound
- // is not a union type, approximate the union type from above by an intersection
- // of all common base types.
- if (fromBelow && isOrType(inst) && isFullyDefined(inst) && !isOrType(upperBound))
- inst = inst.approximateUnion
-
+ val inst = ctx.typeComparer.instanceType(origin, fromBelow)
if (ctx.typerState.isGlobalCommittable)
assert(!inst.isInstanceOf[PolyParam], i"bad inst $this := $inst, constr = ${ctx.typerState.constraint}")
// If this fails, you might want to turn on Config.debugCheckConstraintsClosed
// to help find the root of the problem.
-
instantiateWith(inst)
}
@@ -2766,13 +3008,9 @@ object Types {
/** The class type with all type parameters */
def fullyAppliedRef(implicit ctx: Context): Type = fullyAppliedRef(cls.typeRef, cls.typeParams)
- def rebase(tp: Type)(implicit ctx: Context): Type =
- if ((prefix eq cls.owner.thisType) || !cls.owner.isClass || ctx.erasedTypes) tp
- else tp.substThis(cls.owner.asClass, prefix)
+ private var typeRefCache: TypeRef = null
- private var typeRefCache: Type = null
-
- def typeRef(implicit ctx: Context): Type = {
+ def typeRef(implicit ctx: Context): TypeRef = {
def clsDenot = if (prefix eq cls.owner.thisType) cls.denot else cls.denot.copySymDenotation(info = this)
if (typeRefCache == null)
typeRefCache =
@@ -2781,7 +3019,7 @@ object Types {
typeRefCache
}
- def symbolicTypeRef(implicit ctx: Context): Type = TypeRef(prefix, cls)
+ def symbolicTypeRef(implicit ctx: Context): TypeRef = TypeRef(prefix, cls)
// cached because baseType needs parents
private var parentsCache: List[TypeRef] = null
@@ -2789,7 +3027,7 @@ object Types {
/** The parent type refs as seen from the given prefix */
override def parents(implicit ctx: Context): List[TypeRef] = {
if (parentsCache == null)
- parentsCache = cls.classParents.mapConserve(rebase(_).asInstanceOf[TypeRef])
+ parentsCache = cls.classParents.mapConserve(_.asSeenFrom(prefix, cls.owner).asInstanceOf[TypeRef])
parentsCache
}
@@ -2816,9 +3054,28 @@ object Types {
override def toString = s"ClassInfo($prefix, $cls)"
}
- final class CachedClassInfo(prefix: Type, cls: ClassSymbol, classParents: List[TypeRef], decls: Scope, selfInfo: DotClass)
+ class CachedClassInfo(prefix: Type, cls: ClassSymbol, classParents: List[TypeRef], decls: Scope, selfInfo: DotClass)
extends ClassInfo(prefix, cls, classParents, decls, selfInfo)
+ /** A class for temporary class infos where `parents` are not yet known. */
+ final class TempClassInfo(prefix: Type, cls: ClassSymbol, decls: Scope, selfInfo: DotClass)
+ extends CachedClassInfo(prefix, cls, Nil, decls, selfInfo) {
+
+ /** A list of actions that were because they rely on the class info of `cls` to
+ * be no longer temporary. These actions will be performed once `cls` gets a real
+ * ClassInfo.
+ */
+ private var suspensions: List[() => Unit] = Nil
+
+ def addSuspension(suspension: () => Unit): Unit = suspensions ::= suspension
+
+ /** Install classinfo with known parents in `denot` and resume all suspensions */
+ def finalize(denot: SymDenotation, parents: List[TypeRef])(implicit ctx: Context) = {
+ denot.info = derivedClassInfo(classParents = parents)
+ suspensions.foreach(_())
+ }
+ }
+
object ClassInfo {
def apply(prefix: Type, cls: ClassSymbol, classParents: List[TypeRef], decls: Scope, selfInfo: DotClass = NoType)(implicit ctx: Context) =
unique(new CachedClassInfo(prefix, cls, classParents, decls, selfInfo))
@@ -2884,9 +3141,10 @@ object Types {
/** If this type and that type have the same variance, this variance, otherwise 0 */
final def commonVariance(that: TypeBounds): Int = (this.variance + that.variance) / 2
+ override def computeHash = doHash(variance, lo, hi)
override def equals(that: Any): Boolean = that match {
case that: TypeBounds =>
- (this.lo eq that.lo) && (this.hi eq that.hi) && this.variance == that.variance
+ (this.lo eq that.lo) && (this.hi eq that.hi) && (this.variance == that.variance)
case _ =>
false
}
@@ -2895,9 +3153,7 @@ object Types {
if (lo eq hi) s"TypeAlias($lo, $variance)" else s"TypeBounds($lo, $hi)"
}
- class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) {
- override def computeHash = doHash(variance, lo, hi)
- }
+ class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi)
abstract class TypeAlias(val alias: Type, override val variance: Int) extends TypeBounds(alias, alias) {
/** pre: this is a type alias */
@@ -2927,7 +3183,6 @@ object Types {
class CachedTypeAlias(alias: Type, variance: Int, hc: Int) extends TypeAlias(alias, variance) {
myHash = hc
- override def computeHash = doHash(variance, lo, hi)
}
object TypeBounds {
@@ -2996,10 +3251,15 @@ object Types {
object ErrorType extends ErrorType
+ /* Type used to track Select nodes that could not resolve a member and their qualifier is a scala.Dynamic. */
+ object TryDynamicCallType extends ErrorType
+
/** Wildcard type, possibly with bounds */
abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType {
def derivedWildcardType(optBounds: Type)(implicit ctx: Context) =
- if (optBounds eq this.optBounds) this else WildcardType(optBounds.asInstanceOf[TypeBounds])
+ if (optBounds eq this.optBounds) this
+ else if (!optBounds.exists) WildcardType
+ else WildcardType(optBounds.asInstanceOf[TypeBounds])
override def computeHash = doHash(optBounds)
}
@@ -3081,64 +3341,91 @@ object Types {
protected var variance = 1
+ protected def derivedSelect(tp: NamedType, pre: Type): Type =
+ tp.derivedSelect(pre)
+ protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type): Type =
+ tp.derivedRefinedType(parent, tp.refinedName, info)
+ protected def derivedRecType(tp: RecType, parent: Type): Type =
+ tp.rebind(parent)
+ protected def derivedTypeAlias(tp: TypeAlias, alias: Type): Type =
+ tp.derivedTypeAlias(alias)
+ protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type =
+ tp.derivedTypeBounds(lo, hi)
+ protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type): Type =
+ tp.derivedSuperType(thistp, supertp)
+ protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type =
+ tp.derivedAppliedType(tycon, args)
+ protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type): Type =
+ tp.derivedAndOrType(tp1, tp2)
+ protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type =
+ tp.derivedAnnotatedType(underlying, annot)
+ protected def derivedWildcardType(tp: WildcardType, bounds: Type): Type =
+ tp.derivedWildcardType(bounds)
+ protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type =
+ tp.derivedClassInfo(pre)
+ protected def derivedJavaArrayType(tp: JavaArrayType, elemtp: Type): Type =
+ tp.derivedJavaArrayType(elemtp)
+ protected def derivedMethodType(tp: MethodType, formals: List[Type], restpe: Type): Type =
+ tp.derivedMethodType(tp.paramNames, formals, restpe)
+ protected def derivedExprType(tp: ExprType, restpe: Type): Type =
+ tp.derivedExprType(restpe)
+ protected def derivedPolyType(tp: PolyType, pbounds: List[TypeBounds], restpe: Type): Type =
+ tp.derivedPolyType(tp.paramNames, pbounds, restpe)
+
/** Map this function over given type */
def mapOver(tp: Type): Type = {
implicit val ctx: Context = this.ctx // Dotty deviation: implicits need explicit type
tp match {
case tp: NamedType =>
if (stopAtStatic && tp.symbol.isStatic) tp
- else {
- val saved = variance
- variance = 0
- val result = tp.derivedSelect(this(tp.prefix))
- variance = saved
- result
- }
+ else derivedSelect(tp, this(tp.prefix))
case _: ThisType
| _: BoundType
| NoPrefix => tp
case tp: RefinedType =>
- tp.derivedRefinedType(this(tp.parent), tp.refinedName, this(tp.refinedInfo))
+ derivedRefinedType(tp, this(tp.parent), this(tp.refinedInfo))
case tp: TypeAlias =>
val saved = variance
variance = variance * tp.variance
val alias1 = this(tp.alias)
variance = saved
- tp.derivedTypeAlias(alias1)
+ derivedTypeAlias(tp, alias1)
case tp: TypeBounds =>
variance = -variance
val lo1 = this(tp.lo)
variance = -variance
- tp.derivedTypeBounds(lo1, this(tp.hi))
+ derivedTypeBounds(tp, lo1, this(tp.hi))
case tp: MethodType =>
def mapOverMethod = {
variance = -variance
val ptypes1 = tp.paramTypes mapConserve this
variance = -variance
- tp.derivedMethodType(tp.paramNames, ptypes1, this(tp.resultType))
+ derivedMethodType(tp, ptypes1, this(tp.resultType))
}
mapOverMethod
case tp: ExprType =>
- tp.derivedExprType(this(tp.resultType))
+ derivedExprType(tp, this(tp.resultType))
case tp: PolyType =>
def mapOverPoly = {
variance = -variance
val bounds1 = tp.paramBounds.mapConserve(this).asInstanceOf[List[TypeBounds]]
variance = -variance
- tp.derivedPolyType(
- tp.paramNames, bounds1, this(tp.resultType))
+ derivedPolyType(tp, bounds1, this(tp.resultType))
}
mapOverPoly
+ case tp: RecType =>
+ derivedRecType(tp, this(tp.parent))
+
case tp @ SuperType(thistp, supertp) =>
- tp.derivedSuperType(this(thistp), this(supertp))
+ derivedSuperType(tp, this(thistp), this(supertp))
case tp: LazyRef =>
LazyRef(() => this(tp.ref))
@@ -3150,21 +3437,32 @@ object Types {
val inst = tp.instanceOpt
if (inst.exists) apply(inst) else tp
+ case tp: HKApply =>
+ def mapArg(arg: Type, tparam: TypeParamInfo): Type = {
+ val saved = variance
+ variance *= tparam.paramVariance
+ try this(arg)
+ finally variance = saved
+ }
+ derivedAppliedType(tp, this(tp.tycon),
+ tp.args.zipWithConserve(tp.typeParams)(mapArg))
+
case tp: AndOrType =>
- tp.derivedAndOrType(this(tp.tp1), this(tp.tp2))
+ derivedAndOrType(tp, this(tp.tp1), this(tp.tp2))
case tp: SkolemType =>
- tp.derivedSkolemType(this(tp.info))
+ tp
case tp @ AnnotatedType(underlying, annot) =>
val underlying1 = this(underlying)
- if (underlying1 eq underlying) tp else tp.derivedAnnotatedType(underlying1, mapOver(annot))
+ if (underlying1 eq underlying) tp
+ else derivedAnnotatedType(tp, underlying1, mapOver(annot))
case tp @ WildcardType =>
- tp.derivedWildcardType(mapOver(tp.optBounds))
+ derivedWildcardType(tp, mapOver(tp.optBounds))
case tp: JavaArrayType =>
- tp.derivedJavaArrayType(this(tp.elemType))
+ derivedJavaArrayType(tp, this(tp.elemType))
case tp: ProtoType =>
tp.map(this)
@@ -3191,8 +3489,8 @@ object Types {
def mapOver(tree: Tree): Tree = treeTypeMap(tree)
/** Can be overridden. By default, only the prefix is mapped. */
- protected def mapClassInfo(tp: ClassInfo): ClassInfo =
- tp.derivedClassInfo(this(tp.prefix))
+ protected def mapClassInfo(tp: ClassInfo): Type =
+ derivedClassInfo(tp, this(tp.prefix))
def andThen(f: Type => Type): TypeMap = new TypeMap {
override def stopAtStatic = thisMap.stopAtStatic
@@ -3218,6 +3516,58 @@ object Types {
def apply(tp: Type) = tp
}
+ abstract class ApproximatingTypeMap(implicit ctx: Context) extends TypeMap { thisMap =>
+ def approx(lo: Type = defn.NothingType, hi: Type = defn.AnyType) =
+ if (variance == 0) NoType
+ else apply(if (variance < 0) lo else hi)
+
+ override protected def derivedSelect(tp: NamedType, pre: Type) =
+ if (pre eq tp.prefix) tp
+ else tp.info match {
+ case TypeAlias(alias) => apply(alias) // try to heal by following aliases
+ case _ =>
+ if (pre.exists && !pre.isRef(defn.NothingClass) && variance > 0) tp.derivedSelect(pre)
+ else tp.info match {
+ case TypeBounds(lo, hi) => approx(lo, hi)
+ case _ => approx()
+ }
+ }
+ override protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type) =
+ if (parent.exists && info.exists) tp.derivedRefinedType(parent, tp.refinedName, info)
+ else approx(hi = parent)
+ override protected def derivedRecType(tp: RecType, parent: Type) =
+ if (parent.exists) tp.rebind(parent)
+ else approx()
+ override protected def derivedTypeAlias(tp: TypeAlias, alias: Type) =
+ if (alias.exists) tp.derivedTypeAlias(alias)
+ else approx(NoType, TypeBounds.empty)
+ override protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type) =
+ if (lo.exists && hi.exists) tp.derivedTypeBounds(lo, hi)
+ else approx(NoType,
+ if (lo.exists) TypeBounds.lower(lo)
+ else if (hi.exists) TypeBounds.upper(hi)
+ else TypeBounds.empty)
+ override protected def derivedSuperType(tp: SuperType, thistp: Type, supertp: Type) =
+ if (thistp.exists && supertp.exists) tp.derivedSuperType(thistp, supertp)
+ else NoType
+ override protected def derivedAppliedType(tp: HKApply, tycon: Type, args: List[Type]): Type =
+ if (tycon.exists && args.forall(_.exists)) tp.derivedAppliedType(tycon, args)
+ else approx() // This is rather coarse, but to do better is a bit complicated
+ override protected def derivedAndOrType(tp: AndOrType, tp1: Type, tp2: Type) =
+ if (tp1.exists && tp2.exists) tp.derivedAndOrType(tp1, tp2)
+ else if (tp.isAnd) approx(hi = tp1 & tp2) // if one of tp1d, tp2d exists, it is the result of tp1d & tp2d
+ else approx(lo = tp1 & tp2)
+ override protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation) =
+ if (underlying.exists) tp.derivedAnnotatedType(underlying, annot)
+ else NoType
+ override protected def derivedWildcardType(tp: WildcardType, bounds: Type) =
+ if (bounds.exists) tp.derivedWildcardType(bounds)
+ else WildcardType
+ override protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type =
+ if (pre.exists) tp.derivedClassInfo(pre)
+ else NoType
+ }
+
// ----- TypeAccumulators ----------------------------------------------------
abstract class TypeAccumulator[T](implicit protected val ctx: Context) extends ((T, Type) => T) {
@@ -3280,18 +3630,38 @@ object Types {
case ExprType(restpe) =>
this(x, restpe)
- case tp @ PolyType(pnames) =>
+ case tp: PolyType =>
variance = -variance
val y = foldOver(x, tp.paramBounds)
variance = -variance
this(y, tp.resultType)
+ case tp: RecType =>
+ this(x, tp.parent)
+
case SuperType(thistp, supertp) =>
this(this(x, thistp), supertp)
case tp @ ClassInfo(prefix, _, _, _, _) =>
this(x, prefix)
+ case tp @ HKApply(tycon, args) =>
+ def foldArgs(x: T, tparams: List[TypeParamInfo], args: List[Type]): T =
+ if (args.isEmpty) {
+ assert(tparams.isEmpty)
+ x
+ }
+ else {
+ val tparam = tparams.head
+ val saved = variance
+ variance *= tparam.paramVariance
+ val acc =
+ try this(x, args.head)
+ finally variance = saved
+ foldArgs(acc, tparams.tail, args.tail)
+ }
+ foldArgs(this(x, tycon), tp.typeParams, args)
+
case tp: AndOrType =>
this(this(x, tp.tp1), tp.tp2)
@@ -3310,6 +3680,9 @@ object Types {
case tp: JavaArrayType =>
this(x, tp.elemType)
+ case tp: LazyRef =>
+ this(x, tp.ref)
+
case tp: ProtoType =>
tp.fold(x, this)
@@ -3322,13 +3695,19 @@ object Types {
}
}
- class ExistsAccumulator(p: Type => Boolean)(implicit ctx: Context) extends TypeAccumulator[Boolean] {
- override def stopAtStatic = false
- def apply(x: Boolean, tp: Type) = x || p(tp) || foldOver(x, tp)
+ abstract class TypeTraverser(implicit ctx: Context) extends TypeAccumulator[Unit] {
+ def traverse(tp: Type): Unit
+ def apply(x: Unit, tp: Type): Unit = traverse(tp)
+ protected def traverseChildren(tp: Type) = foldOver((), tp)
}
- class ForeachAccumulator(p: Type => Unit)(implicit ctx: Context) extends TypeAccumulator[Unit] {
+ class ExistsAccumulator(p: Type => Boolean, forceLazy: Boolean = true)(implicit ctx: Context) extends TypeAccumulator[Boolean] {
override def stopAtStatic = false
+ def apply(x: Boolean, tp: Type) =
+ x || p(tp) || (forceLazy || !tp.isInstanceOf[LazyRef]) && foldOver(x, tp)
+ }
+
+ class ForeachAccumulator(p: Type => Unit, override val stopAtStatic: Boolean)(implicit ctx: Context) extends TypeAccumulator[Unit] {
def apply(x: Unit, tp: Type): Unit = foldOver(p(tp), tp)
}
@@ -3354,7 +3733,7 @@ object Types {
if (!excludeLowerBounds) apply(x, lo)
apply(x, hi)
case tp: ThisType =>
- apply(x, tp.underlying)
+ apply(x, tp.tref)
case tp: ConstantType =>
apply(x, tp.underlying)
case tp: MethodParam =>
@@ -3435,7 +3814,7 @@ object Types {
class MissingType(pre: Type, name: Name)(implicit ctx: Context) extends TypeError(
i"""cannot resolve reference to type $pre.$name
- |the classfile defining the type might be missing from the classpath${otherReason(pre)}""".stripMargin) {
+ |the classfile defining the type might be missing from the classpath${otherReason(pre)}""") {
if (ctx.debug) printStackTrace()
}
@@ -3453,7 +3832,7 @@ object Types {
object CyclicReference {
def apply(denot: SymDenotation)(implicit ctx: Context): CyclicReference = {
val ex = new CyclicReference(denot)
- if (!(ctx.mode is typer.Mode.CheckCyclic)) {
+ if (!(ctx.mode is Mode.CheckCyclic)) {
cyclicErrors.println(ex.getMessage)
for (elem <- ex.getStackTrace take 200)
cyclicErrors.println(elem.toString)
diff --git a/src/dotty/tools/dotc/core/Uniques.scala b/src/dotty/tools/dotc/core/Uniques.scala
index b00508d60..cb9670c69 100644
--- a/src/dotty/tools/dotc/core/Uniques.scala
+++ b/src/dotty/tools/dotc/core/Uniques.scala
@@ -107,8 +107,8 @@ object Uniques {
def enterIfNew(parent: Type, refinedName: Name, refinedInfo: Type): RefinedType = {
val h = doHash(refinedName, refinedInfo, parent)
- def newType = new PreHashedRefinedType(parent, refinedName, refinedInfo, h)
- if (monitored) recordCaching(h, classOf[PreHashedRefinedType])
+ def newType = new CachedRefinedType(parent, refinedName, refinedInfo, h)
+ if (monitored) recordCaching(h, classOf[CachedRefinedType])
if (h == NotCached) newType
else {
val r = findPrevious(h, parent, refinedName, refinedInfo)
diff --git a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
index 25558a79a..1570dbca0 100644
--- a/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
+++ b/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala
@@ -12,7 +12,6 @@ import scala.collection.{ mutable, immutable }
import scala.collection.mutable.{ ListBuffer, ArrayBuffer }
import scala.annotation.switch
import typer.Checking.checkNonCyclic
-import typer.Mode
import io.AbstractFile
import scala.util.control.NonFatal
@@ -57,26 +56,21 @@ class ClassfileParser(
case e: RuntimeException =>
if (ctx.debug) e.printStackTrace()
throw new IOException(
- sm"""class file $classfile is broken, reading aborted with ${e.getClass}
- |${Option(e.getMessage).getOrElse("")}""")
+ i"""class file $classfile is broken, reading aborted with ${e.getClass}
+ |${Option(e.getMessage).getOrElse("")}""")
}
private def parseHeader(): Unit = {
val magic = in.nextInt
if (magic != JAVA_MAGIC)
- throw new IOException("class file '" + in.file + "' "
- + "has wrong magic number 0x" + toHexString(magic)
- + ", should be 0x" + toHexString(JAVA_MAGIC))
+ throw new IOException(s"class file '${in.file}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}")
val minorVersion = in.nextChar.toInt
val majorVersion = in.nextChar.toInt
if ((majorVersion < JAVA_MAJOR_VERSION) ||
((majorVersion == JAVA_MAJOR_VERSION) &&
(minorVersion < JAVA_MINOR_VERSION)))
- throw new IOException("class file '" + in.file + "' "
- + "has unknown version "
- + majorVersion + "." + minorVersion
- + ", should be at least "
- + JAVA_MAJOR_VERSION + "." + JAVA_MINOR_VERSION)
+ throw new IOException(
+ s"class file '${in.file}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION")
}
/** Return the class symbol of the given name. */
@@ -91,6 +85,7 @@ class ClassfileParser(
val jflags = in.nextChar
val isAnnotation = hasAnnotation(jflags)
val sflags = classTranslation.flags(jflags)
+ val isEnum = (jflags & JAVA_ACC_ENUM) != 0
val nameIdx = in.nextChar
currentClassName = pool.getClassName(nameIdx)
@@ -146,6 +141,15 @@ class ClassfileParser(
setClassInfo(classRoot, classInfo)
setClassInfo(moduleRoot, staticInfo)
}
+
+ // eager load java enum definitions for exhaustivity check of pattern match
+ if (isEnum) {
+ instanceScope.toList.map(_.ensureCompleted())
+ staticScope.toList.map(_.ensureCompleted())
+ classRoot.setFlag(Flags.Enum)
+ moduleRoot.setFlag(Flags.Enum)
+ }
+
result
}
@@ -273,7 +277,7 @@ class ClassfileParser(
if (sig(index) == '<') {
accept('<')
var tp1: Type = tp
- var formals = tp.typeParams
+ var formals = tp.typeParamSymbols
while (sig(index) != '>') {
sig(index) match {
case variance @ ('+' | '-' | '*') =>
@@ -390,7 +394,7 @@ class ClassfileParser(
}
index += 1
}
- val ownTypeParams = newTParams.toList
+ val ownTypeParams = newTParams.toList.asInstanceOf[List[TypeSymbol]]
val tpe =
if ((owner == null) || !owner.isClass)
sig2type(tparams, skiptvs = false)
@@ -581,11 +585,8 @@ class ClassfileParser(
* parameters. For Java annotations we need to fake it by making up the constructor.
* Note that default getters have type Nothing. That's OK because we need
* them only to signal that the corresponding parameter is optional.
- * If the constructor takes as last parameter an array, it can also accept
- * a vararg argument. We solve this by creating two constructors, one with
- * an array, the other with a repeated parameter.
*/
- def addAnnotationConstructor(classInfo: Type, tparams: List[Symbol] = Nil)(implicit ctx: Context): Unit = {
+ def addAnnotationConstructor(classInfo: Type, tparams: List[TypeSymbol] = Nil)(implicit ctx: Context): Unit = {
def addDefaultGetter(attr: Symbol, n: Int) =
ctx.newSymbol(
owner = moduleRoot.symbol,
@@ -619,13 +620,26 @@ class ClassfileParser(
}
addConstr(paramTypes)
+
+ // The code below added an extra constructor to annotations where the
+ // last parameter of the constructor is an Array[X] for some X, the
+ // array was replaced by a vararg argument. Unfortunately this breaks
+ // inference when doing:
+ // @Annot(Array())
+ // The constructor is overloaded so the expected type of `Array()` is
+ // WildcardType, and the type parameter of the Array apply method gets
+ // instantiated to `Nothing` instead of `X`.
+ // I'm leaving this commented out in case we improve inference to make this work.
+ // Note that if this is reenabled then JavaParser will also need to be modified
+ // to add the extra constructor (this was not implemented before).
+ /*
if (paramTypes.nonEmpty)
paramTypes.last match {
case defn.ArrayOf(elemtp) =>
addConstr(paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp))
case _ =>
}
-
+ */
}
}
@@ -808,12 +822,12 @@ class ClassfileParser(
getMember(owner, innerName.toTypeName)
}
assert(result ne NoSymbol,
- sm"""failure to resolve inner class:
- |externalName = $externalName,
- |outerName = $outerName,
- |innerName = $innerName
- |owner.fullName = ${owner.showFullName}
- |while parsing ${classfile}""")
+ i"""failure to resolve inner class:
+ |externalName = $externalName,
+ |outerName = $outerName,
+ |innerName = $innerName
+ |owner.fullName = ${owner.showFullName}
+ |while parsing ${classfile}""")
result
case None =>
diff --git a/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
index d62762571..2c93819d5 100644
--- a/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
+++ b/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala
@@ -3,13 +3,13 @@ package dotc
package core
package tasty
-import Contexts._, SymDenotations._
+import Contexts._, SymDenotations._, Symbols._
import dotty.tools.dotc.ast.tpd
import TastyUnpickler._, TastyBuffer._
-import dotty.tools.dotc.core.tasty.DottyUnpickler.{SourceFileUnpickler, TreeSectionUnpickler, PositionsSectionUnpickler}
import util.Positions._
import util.{SourceFile, NoSource}
-import PositionUnpickler._
+import Annotations.Annotation
+import core.Mode
import classfile.ClassfileParser
object DottyUnpickler {
@@ -17,19 +17,15 @@ object DottyUnpickler {
/** Exception thrown if classfile is corrupted */
class BadSignature(msg: String) extends RuntimeException(msg)
- class SourceFileUnpickler extends SectionUnpickler[SourceFile]("Sourcefile") {
+ class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler])
+ extends SectionUnpickler[TreeUnpickler]("ASTs") {
def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
- new SourceFile(tastyName(reader.readNameRef()).toString, Seq())
+ new TreeUnpickler(reader, tastyName, posUnpickler)
}
- class TreeSectionUnpickler extends SectionUnpickler[TreeUnpickler]("ASTs") {
+ class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler]("Positions") {
def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
- new TreeUnpickler(reader, tastyName)
- }
-
- class PositionsSectionUnpickler extends SectionUnpickler[(Position, AddrToPosition)]("Positions") {
- def unpickle(reader: TastyReader, tastyName: TastyName.Table) =
- new PositionUnpickler(reader).unpickle()
+ new PositionUnpickler(reader)
}
}
@@ -38,9 +34,11 @@ object DottyUnpickler {
*/
class DottyUnpickler(bytes: Array[Byte]) extends ClassfileParser.Embedded {
import tpd._
+ import DottyUnpickler._
val unpickler = new TastyUnpickler(bytes)
- private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler).get
+ private val posUnpicklerOpt = unpickler.unpickle(new PositionsSectionUnpickler)
+ private val treeUnpickler = unpickler.unpickle(new TreeSectionUnpickler(posUnpicklerOpt)).get
/** Enter all toplevel classes and objects into their scopes
* @param roots a set of SymDenotations that should be overwritten by unpickling
@@ -48,14 +46,8 @@ class DottyUnpickler(bytes: Array[Byte]) extends ClassfileParser.Embedded {
def enter(roots: Set[SymDenotation])(implicit ctx: Context): Unit =
treeUnpickler.enterTopLevel(roots)
- /** The unpickled trees, and the source file they come from
- * @param readPositions if true, trees get decorated with position information.
- */
- def body(readPositions: Boolean = false)(implicit ctx: Context): (List[Tree], SourceFile) = {
- val source = unpickler.unpickle(new SourceFileUnpickler).getOrElse(NoSource)
- if (readPositions)
- for ((totalRange, positions) <- unpickler.unpickle(new PositionsSectionUnpickler))
- treeUnpickler.usePositions(totalRange, positions)
- (treeUnpickler.unpickle(), source)
+ /** The unpickled trees, and the source file they come from. */
+ def body(implicit ctx: Context): List[Tree] = {
+ treeUnpickler.unpickle()
}
}
diff --git a/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
index b0550b70a..63bb00a71 100644
--- a/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
+++ b/src/dotty/tools/dotc/core/tasty/PositionPickler.scala
@@ -3,7 +3,8 @@ package dotc
package core
package tasty
-import ast.tpd._
+import ast._
+import ast.Trees._
import ast.Trees.WithLazyField
import TastyFormat._
import core._
@@ -12,64 +13,47 @@ import collection.mutable
import TastyBuffer._
import util.Positions._
-object PositionPickler {
-
- trait DeferredPosition {
- var parentPos: Position = NoPosition
- }
-
- def traverse(x: Any, parentPos: Position, op: (Tree, Position) => Unit)(implicit ctx: Context): Unit =
- if (parentPos.exists)
- x match {
- case x: Tree @unchecked =>
- op(x, parentPos)
- x match {
- case x: MemberDef @unchecked => traverse(x.symbol.annotations, x.pos, op)
- case _ =>
- }
- traverse(x.productIterator, x.pos, op)
- case x: DeferredPosition =>
- x.parentPos = parentPos
- case xs: TraversableOnce[_] =>
- xs.foreach(traverse(_, parentPos, op))
- case _ =>
- }
-}
-import PositionPickler._
-
-class PositionPickler(pickler: TastyPickler, addrOfTree: Tree => Option[Addr]) {
+class PositionPickler(pickler: TastyPickler, addrOfTree: tpd.Tree => Option[Addr]) {
val buf = new TastyBuffer(5000)
pickler.newSection("Positions", buf)
import buf._
+ import ast.tpd._
+
+ def header(addrDelta: Int, hasStartDelta: Boolean, hasEndDelta: Boolean) = {
+ def toInt(b: Boolean) = if (b) 1 else 0
+ (addrDelta << 2) | (toInt(hasStartDelta) << 1) | toInt(hasEndDelta)
+ }
- def picklePositions(roots: List[Tree], totalRange: Position)(implicit ctx: Context) = {
+ def picklePositions(roots: List[Tree])(implicit ctx: Context) = {
var lastIndex = 0
- def record(tree: Tree, parentPos: Position): Unit =
- if (tree.pos.exists) {
- def msg = s"failure to pickle $tree at ${tree.pos}, parent = $parentPos"
- val endPos = tree.pos.end min parentPos.end
- // end positions can be larger than their parents
- // e.g. in the case of synthetic empty ranges, which are placed at the next token after
- // the current construct.
- val endDelta = endPos - parentPos.end
- val startPos =
- if (endDelta == 0) tree.pos.start max parentPos.start else tree.pos.start min endPos
- // Since end positions are corrected above, start positions have to follow suit.
- val startDelta = startPos - parentPos.start
- if (startDelta != 0 || endDelta != 0)
- for (addr <- addrOfTree(tree)) {
- buf.writeInt(addr.index - lastIndex)
- lastIndex = addr.index
- if (startDelta != 0) buf.writeInt(startDelta)
- if (endDelta != 0) {
- assert(endDelta < 0, msg)
- buf.writeInt(endDelta)
- } else
- assert(startDelta >= 0, msg)
+ var lastPos = Position(0, 0)
+ def pickleDeltas(index: Int, pos: Position) = {
+ val addrDelta = index - lastIndex
+ val startDelta = pos.start - lastPos.start
+ val endDelta = pos.end - lastPos.end
+ buf.writeInt(header(addrDelta, startDelta != 0, endDelta != 0))
+ if (startDelta != 0) buf.writeInt(startDelta)
+ if (endDelta != 0) buf.writeInt(endDelta)
+ lastIndex = index
+ lastPos = pos
+ }
+ def traverse(x: Any, parentPos: Position): Unit = x match {
+ case x: Tree @unchecked =>
+ if (x.pos.exists && x.pos.toSynthetic != parentPos.toSynthetic) {
+ addrOfTree(x) match {
+ case Some(addr) => pickleDeltas(addr.index, x.pos)
+ case _ =>
}
- }
-
- buf.writeNat(totalRange.end)
- traverse(roots, totalRange, record)
+ }
+ x match {
+ case x: MemberDef @unchecked => traverse(x.symbol.annotations, x.pos)
+ case _ =>
+ }
+ traverse(x.productIterator, x.pos)
+ case xs: TraversableOnce[_] =>
+ xs.foreach(traverse(_, parentPos))
+ case _ =>
+ }
+ traverse(roots, NoPosition)
}
}
diff --git a/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
index fa80a2769..c29aeba70 100644
--- a/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
+++ b/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala
@@ -6,33 +6,31 @@ package tasty
import util.Positions._
import collection.mutable
-import TastyBuffer.Addr
-
-object PositionUnpickler {
- type AddrToPosition = mutable.HashMap[Addr, Position]
-}
+import TastyBuffer.{Addr, NoAddr}
/** Unpickler for tree positions */
class PositionUnpickler(reader: TastyReader) {
- import PositionUnpickler._
import reader._
- def unpickle(): (Position, AddrToPosition) = {
- val positions = new mutable.HashMap[Addr, Position] // Dotty deviation: Can't use new AddrToPosition here. TODO: fix this!
- val sourceLength = readNat()
- def readDelta() = if (isAtEnd) 0 else readInt()
- var curIndex: Addr = Addr(readDelta())
+ private[tasty] lazy val positions = {
+ val positions = new mutable.HashMap[Addr, Position]
+ var curIndex = 0
+ var curStart = 0
+ var curEnd = 0
while (!isAtEnd) {
- val delta1 = readDelta()
- val delta2 = readDelta()
- val (startDelta, endDelta, indexDelta) =
- if (delta2 <= 0) (delta1, -delta2, readDelta())
- else if (delta1 < 0) (0, -delta1, delta2)
- else (delta1, 0, delta2)
- positions(curIndex) = Position(startDelta, endDelta, startDelta)
- // make non-synthetic position; will be made synthetic by normalization.
- curIndex += indexDelta
+ val header = readInt()
+ val addrDelta = header >> 2
+ val hasStart = (header & 2) != 0
+ val hasEnd = (header & 1) != 0
+ curIndex += addrDelta
+ assert(curIndex >= 0)
+ if (hasStart) curStart += readInt()
+ if (hasEnd) curEnd += readInt()
+ positions(Addr(curIndex)) = Position(curStart, curEnd)
}
- (Position(0, sourceLength), positions)
+ positions
}
+
+ def posAt(addr: Addr) = positions.getOrElse(addr, NoPosition)
}
+
diff --git a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
index f3dabb517..8e8d58b47 100644
--- a/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
+++ b/src/dotty/tools/dotc/core/tasty/TastyFormat.scala
@@ -74,7 +74,6 @@ Standard-Section: "ASTs" TopLevelStat*
SELECT possiblySigned_NameRef qual_Term
NEW cls_Type
SUPER Length this_Term mixinTrait_Type?
- PAIR Length left_Term right_Term
TYPED Length expr_Term ascription_Type
NAMEDARG Length paramName_NameRef arg_Term
ASSIGN Length lhs_Term rhs_Term
@@ -84,7 +83,7 @@ Standard-Section: "ASTs" TopLevelStat*
MATCH Length sel_Term CaseDef*
TRY Length expr_Term CaseDef* finalizer_Term?
RETURN Length meth_ASTRef expr_Term?
- REPEATED Length elem_Term*
+ REPEATED Length elem_Type elem_Term*
BIND Length boundName_NameRef patType_Type pat_Term
ALTERNATIVE Length alt_Term*
UNAPPLY Length fun_Term ImplicitArg* pat_Type pat_Term*
@@ -103,7 +102,7 @@ Standard-Section: "ASTs" TopLevelStat*
TERMREFpkg fullyQualified_NameRef
TERMREF possiblySigned_NameRef qual_Type
THIS clsRef_Type
- REFINEDthis refinedType_ASTRef
+ RECthis recType_ASTRef
SHARED path_ASTRef
Constant = UNITconst
@@ -126,6 +125,7 @@ Standard-Section: "ASTs" TopLevelStat*
TYPEREFsymbol sym_ASTRef qual_Type
TYPEREFpkg fullyQualified_NameRef
TYPEREF possiblySigned_NameRef qual_Type
+ RECtype parent_Type
SUPERtype Length this_Type underlying_Type
REFINEDtype Length underlying_Type refinement_NameRef info_Type
APPLIEDtype Length tycon_Type arg_Type*
@@ -137,6 +137,7 @@ Standard-Section: "ASTs" TopLevelStat*
BIND Length boundName_NameRef bounds_Type
// for type-variables defined in a type pattern
BYNAMEtype underlying_Type
+ LAMBDAtype Length result_Type NamesTypes // variance encoded in front of name: +/-/=
POLYtype Length result_Type NamesTypes // needed for refinements
METHODtype Length result_Type NamesTypes // needed for refinements
PARAMtype Length binder_ASTref paramNum_Nat // needed for refinements
@@ -184,23 +185,16 @@ Note: Tree tags are grouped into 5 categories that determine what follows, and t
Category 4 (tags 112-127): tag Nat AST
Category 5 (tags 128-255): tag Length <payload>
-Standard Section: "Sourcefile" sourcefile_NameRef
-
-Standard Section: "Positions" sourceLength_Nat Assoc*
-
- Assoc = addr_Delta offset_Delta offset_Delta?
- // addr_Delta :
- // Difference of address to last recorded node.
- // All but the first addr_Deltas are > 0, the first is >= 0.
- // 2nd offset_Delta:
- // Difference of end offset of addressed node vs parent node. Always <= 0
- // 1st offset Delta, if delta >= 0 or 2nd offset delta exists
- // Difference of start offset of addressed node vs parent node.
- // 1st offset Delta, if delta < 0 and 2nd offset delta does not exist:
- // Difference of end offset of addressed node vs parent node.
- // Offsets and addresses are difference encoded.
+Standard Section: "Positions" Assoc*
+
+ Assoc = Header offset_Delta? offset_Delta?
+ Header = addr_Delta + // in one Nat: difference of address to last recorded node << 2 +
+ hasStartDiff + // one bit indicating whether there follows a start address delta << 1
+ hasEndDiff // one bit indicating whether there follows an end address delta
// Nodes which have the same positions as their parents are omitted.
- Delta = Int // Difference between consecutive offsets / tree addresses,
+ // offset_Deltas give difference of start/end offset wrt to the
+ // same offset in the previously recorded node (or 0 for the first recorded node)
+ Delta = Int // Difference between consecutive offsets,
**************************************************************************************/
@@ -260,7 +254,7 @@ object TastyFormat {
final val TYPEREFdirect = 66
final val TERMREFpkg = 67
final val TYPEREFpkg = 68
- final val REFINEDthis = 69
+ final val RECthis = 69
final val BYTEconst = 70
final val SHORTconst = 71
final val CHARconst = 72
@@ -279,6 +273,7 @@ object TastyFormat {
final val IMPLICITarg = 101
final val PRIVATEqualified = 102
final val PROTECTEDqualified = 103
+ final val RECtype = 104
final val IDENT = 112
final val SELECT = 113
@@ -299,7 +294,8 @@ object TastyFormat {
final val RENAMED = 138
final val APPLY = 139
final val TYPEAPPLY = 140
- final val PAIR = 142
+
+
final val TYPED = 143
final val NAMEDARG = 144
final val ASSIGN = 145
@@ -326,7 +322,8 @@ object TastyFormat {
final val ORtype = 172
final val METHODtype = 174
final val POLYtype = 175
- final val PARAMtype = 176
+ final val LAMBDAtype = 176
+ final val PARAMtype = 177
final val ANNOTATION = 178
final val firstSimpleTreeTag = UNITconst
@@ -419,7 +416,7 @@ object TastyFormat {
case TYPEREFdirect => "TYPEREFdirect"
case TERMREFpkg => "TERMREFpkg"
case TYPEREFpkg => "TYPEREFpkg"
- case REFINEDthis => "REFINEDthis"
+ case RECthis => "RECthis"
case BYTEconst => "BYTEconst"
case SHORTconst => "SHORTconst"
case CHARconst => "CHARconst"
@@ -428,6 +425,7 @@ object TastyFormat {
case FLOATconst => "FLOATconst"
case DOUBLEconst => "DOUBLEconst"
case STRINGconst => "STRINGconst"
+ case RECtype => "RECtype"
case IDENT => "IDENT"
case SELECT => "SELECT"
@@ -449,7 +447,6 @@ object TastyFormat {
case APPLY => "APPLY"
case TYPEAPPLY => "TYPEAPPLY"
case NEW => "NEW"
- case PAIR => "PAIR"
case TYPED => "TYPED"
case NAMEDARG => "NAMEDARG"
case ASSIGN => "ASSIGN"
@@ -487,4 +484,19 @@ object TastyFormat {
case PRIVATEqualified => "PRIVATEqualified"
case PROTECTEDqualified => "PROTECTEDqualified"
}
+
+ /** @return If non-negative, the number of leading references of a length/trees entry.
+ * If negative, minus the number of leading non-reference trees.
+ */
+ def numRefs(tag: Int) = tag match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | RETURN | BIND |
+ SELFDEF | REFINEDtype => 1
+ case RENAMED | PARAMtype => 2
+ case POLYtype | METHODtype => -1
+ case _ => 0
+ }
+
+ /** Map between variances and name prefixes */
+ val varianceToPrefix = Map(-1 -> '-', 0 -> '=', 1 -> '+')
+ val prefixToVariance = Map('-' -> -1, '=' -> 0, '+' -> 1)
}
diff --git a/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
index 83e6020d5..98b0dc7c6 100644
--- a/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
+++ b/src/dotty/tools/dotc/core/tasty/TastyPickler.scala
@@ -31,7 +31,6 @@ class TastyPickler {
sections += ((nameBuffer.nameIndex(name), buf))
def assembleParts(): Array[Byte] = {
- treePkl.compactify()
def lengthWithLength(buf: TastyBuffer) = {
buf.assemble()
buf.length + natSize(buf.length)
diff --git a/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
index 915ae3f21..7fcd7c29e 100644
--- a/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
+++ b/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala
@@ -113,8 +113,8 @@ class TastyPrinter(bytes: Array[Byte])(implicit ctx: Context) {
class PositionSectionUnpickler extends SectionUnpickler[Unit]("Positions") {
def unpickle(reader: TastyReader, tastyName: TastyName.Table): Unit = {
print(s"${reader.endAddr.index - reader.currentAddr.index}")
- val (totalRange, positions) = new PositionUnpickler(reader).unpickle()
- println(s" position bytes in $totalRange:")
+ val positions = new PositionUnpickler(reader).positions
+ println(s" position bytes:")
val sorted = positions.toSeq.sortBy(_._1.index)
for ((addr, pos) <- sorted) println(s"${addr.index}: ${offsetToInt(pos.start)} .. ${pos.end}")
}
diff --git a/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/src/dotty/tools/dotc/core/tasty/TreePickler.scala
index f8f9c993f..8889e8a5c 100644
--- a/src/dotty/tools/dotc/core/tasty/TreePickler.scala
+++ b/src/dotty/tools/dotc/core/tasty/TreePickler.scala
@@ -7,6 +7,7 @@ import ast.Trees._
import TastyFormat._
import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, StdNames.tpnme, NameOps._
import collection.mutable
+import typer.Inliner
import NameOps._
import TastyBuffer._
import TypeApplications._
@@ -76,6 +77,10 @@ class TreePickler(pickler: TastyPickler) {
case Some(label) =>
if (label != NoAddr) writeRef(label) else pickleForwardSymRef(sym)
case None =>
+ // See pos/t1957.scala for an example where this can happen.
+ // I believe it's a bug in typer: the type of an implicit argument refers
+ // to a closure parameter outside the closure itself. TODO: track this down, so that we
+ // can eliminate this case.
ctx.log(i"pickling reference to as yet undefined $sym in ${sym.owner}", sym.pos)
pickleForwardSymRef(sym)
}
@@ -154,7 +159,7 @@ class TreePickler(pickler: TastyPickler) {
case ConstantType(value) =>
pickleConstant(value)
case tpe: TypeRef if tpe.info.isAlias && tpe.symbol.is(Flags.AliasPreferred) =>
- pickleType(tpe.info.bounds.hi)
+ pickleType(tpe.superType)
case tpe: WithFixedSym =>
val sym = tpe.symbol
def pickleRef() =
@@ -207,8 +212,8 @@ class TreePickler(pickler: TastyPickler) {
case tpe: SuperType =>
writeByte(SUPERtype)
withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe)}
- case tpe: RefinedThis =>
- writeByte(REFINEDthis)
+ case tpe: RecThis =>
+ writeByte(RECthis)
val binderAddr = pickledTypes.get(tpe.binder)
assert(binderAddr != null, tpe.binder)
writeRef(binderAddr.asInstanceOf[Addr])
@@ -217,10 +222,13 @@ class TreePickler(pickler: TastyPickler) {
case tpe: RefinedType =>
writeByte(REFINEDtype)
withLength {
- pickleType(tpe.parent)
pickleName(tpe.refinedName)
+ pickleType(tpe.parent)
pickleType(tpe.refinedInfo, richTypes = true)
}
+ case tpe: RecType =>
+ writeByte(RECtype)
+ pickleType(tpe.parent)
case tpe: TypeAlias =>
writeByte(TYPEALIAS)
withLength {
@@ -243,12 +251,14 @@ class TreePickler(pickler: TastyPickler) {
case tpe: ExprType =>
writeByte(BYNAMEtype)
pickleType(tpe.underlying)
+ case tpe: PolyType =>
+ writeByte(LAMBDAtype)
+ val paramNames = tpe.typeParams.map(tparam =>
+ varianceToPrefix(tparam.paramVariance) +: tparam.paramName)
+ pickleMethodic(tpe.resultType, paramNames, tpe.paramBounds)
case tpe: MethodType if richTypes =>
writeByte(METHODtype)
pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramTypes)
- case tpe: PolyType if richTypes =>
- writeByte(POLYtype)
- pickleMethodic(tpe.resultType, tpe.paramNames, tpe.paramBounds)
case tpe: PolyParam =>
if (!pickleParamType(tpe))
// TODO figure out why this case arises in e.g. pickling AbstractFileReader.
@@ -295,7 +305,7 @@ class TreePickler(pickler: TastyPickler) {
if (!tree.isEmpty) pickleTree(tree)
def pickleDef(tag: Int, sym: Symbol, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(implicit ctx: Context) = {
- assert(symRefs(sym) == NoAddr)
+ assert(symRefs(sym) == NoAddr, sym)
registerDef(sym)
writeByte(tag)
withLength {
@@ -380,9 +390,6 @@ class TreePickler(pickler: TastyPickler) {
case New(tpt) =>
writeByte(NEW)
pickleTpt(tpt)
- case Pair(left, right) =>
- writeByte(PAIR)
- withLength { pickleTree(left); pickleTree(right) }
case Typed(expr, tpt) =>
writeByte(TYPED)
withLength { pickleTree(expr); pickleTpt(tpt) }
@@ -421,6 +428,15 @@ class TreePickler(pickler: TastyPickler) {
case SeqLiteral(elems, elemtpt) =>
writeByte(REPEATED)
withLength { pickleTree(elemtpt); elems.foreach(pickleTree) }
+ case tree: Inlined =>
+ // Why drop Inlined info when pickling?
+ // Since we never inline inside an inlined method, we know that
+ // any code that continas an Inlined tree is not inlined itself.
+ // So position information for inline expansion is no longer needed.
+ // The only reason to keep the inline info around would be to have fine-grained
+ // position information in the linker. We should come back to this
+ // point once we know more what we would do with such information.
+ pickleTree(Inliner.dropInlined(tree))
case TypeTree(original) =>
pickleTpt(tree)
case Bind(name, body) =>
@@ -484,7 +500,7 @@ class TreePickler(pickler: TastyPickler) {
withLength {
pickleTree(expr)
selectors foreach {
- case Pair(Ident(from), Ident(to)) =>
+ case Thicket(Ident(from) :: Ident(to) :: Nil) =>
writeByte(RENAMED)
withLength { pickleName(from); pickleName(to) }
case Ident(name) =>
@@ -546,19 +562,19 @@ class TreePickler(pickler: TastyPickler) {
sym.annotations.foreach(pickleAnnotation)
}
- def pickleAnnotation(ann: Annotation)(implicit ctx: Context) = {
- writeByte(ANNOTATION)
- withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) }
- }
+ def pickleAnnotation(ann: Annotation)(implicit ctx: Context) =
+ if (ann.symbol != defn.BodyAnnot) { // inline bodies are reconstituted automatically when unpickling
+ writeByte(ANNOTATION)
+ withLength { pickleType(ann.symbol.typeRef); pickleTree(ann.tree) }
+ }
def pickle(trees: List[Tree])(implicit ctx: Context) = {
trees.foreach(tree => if (!tree.isEmpty) pickleTree(tree))
- assert(forwardSymRefs.isEmpty, i"unresolved symbols: ${forwardSymRefs.keySet.toList}%, %")
+ assert(forwardSymRefs.isEmpty, i"unresolved symbols: ${forwardSymRefs.keySet.toList}%, % when pickling ${ctx.source}")
}
def compactify() = {
buf.compactify()
- assert(forwardSymRefs.isEmpty, s"unresolved symbols: ${forwardSymRefs.keySet.toList}%, %")
def updateMapWithDeltas[T](mp: collection.mutable.Map[T, Addr]) =
for (key <- mp.keysIterator.toBuffer[T]) mp(key) = adjusted(mp(key))
diff --git a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
index eb3369184..f67159808 100644
--- a/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
+++ b/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala
@@ -9,66 +9,68 @@ import util.Positions._
import ast.{tpd, Trees, untpd}
import Trees._
import Decorators._
-import TastyUnpickler._, TastyBuffer._, PositionPickler._
+import TastyUnpickler._, TastyBuffer._
import scala.annotation.{tailrec, switch}
import scala.collection.mutable.ListBuffer
import scala.collection.{ mutable, immutable }
-import typer.Mode
import config.Printers.pickling
/** Unpickler for typed trees
- * @param reader the reader from which to unpickle
- * @param tastyName the nametable
+ * @param reader the reader from which to unpickle
+ * @param tastyName the nametable
+ * @param posUNpicklerOpt the unpickler for positions, if it exists
*/
-class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
+class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table, posUnpicklerOpt: Option[PositionUnpickler]) {
import TastyFormat._
import TastyName._
+ import TreeUnpickler._
import tpd._
- private var readPositions = false
- private var totalRange = NoPosition
- private var positions: collection.Map[Addr, Position] = _
+ /** A map from addresses of definition entries to the symbols they define */
+ private val symAtAddr = new mutable.HashMap[Addr, Symbol]
- /** Make a subsequent call to `unpickle` return trees with positions
- * @param totalRange the range position enclosing all returned trees,
- * or NoPosition if positions should not be unpickled
- * @param positions a map from tree addresses to their positions relative
- * to positions of parent nodes.
+ /** A temporary map from addresses of definition entries to the trees they define.
+ * Used to remember trees of symbols that are created by a completion. Emptied
+ * once the tree is inlined into a larger tree.
*/
- def usePositions(totalRange: Position, positions: collection.Map[Addr, Position]): Unit = {
- readPositions = true
- this.totalRange = totalRange
- this.positions = positions
- }
-
- private val symAtAddr = new mutable.HashMap[Addr, Symbol]
- private val unpickledSyms = new mutable.HashSet[Symbol]
private val treeAtAddr = new mutable.HashMap[Addr, Tree]
- private val typeAtAddr = new mutable.HashMap[Addr, Type] // currently populated only for types that are known to be SHAREd.
- private var stubs: Set[Symbol] = Set()
+ /** A map from addresses of type entries to the types they define.
+ * Currently only populated for types that might be recursively referenced
+ * from within themselves (i.e. RefinedTypes, PolyTypes, MethodTypes).
+ */
+ private val typeAtAddr = new mutable.HashMap[Addr, Type]
+
+ /** The root symbol denotation which are defined by the Tasty file associated with this
+ * TreeUnpickler. Set by `enterTopLevel`.
+ */
private var roots: Set[SymDenotation] = null
- private def registerSym(addr: Addr, sym: Symbol) = {
+ /** The root symbols that are defined in this Tasty file. This
+ * is a subset of `roots.map(_.symbol)`.
+ */
+ private var seenRoots: Set[Symbol] = Set()
+
+ /** The root owner tree. See `OwnerTree` class definition. Set by `enterTopLevel`. */
+ private var ownerTree: OwnerTree = _
+
+ private def registerSym(addr: Addr, sym: Symbol) =
symAtAddr(addr) = sym
- unpickledSyms += sym
- }
/** Enter all toplevel classes and objects into their scopes
* @param roots a set of SymDenotations that should be overwritten by unpickling
*/
def enterTopLevel(roots: Set[SymDenotation])(implicit ctx: Context): Unit = {
this.roots = roots
- new TreeReader(reader).fork.indexStats(reader.endAddr)
+ var rdr = new TreeReader(reader).fork
+ ownerTree = new OwnerTree(NoAddr, 0, rdr.fork, reader.endAddr)
+ rdr.indexStats(reader.endAddr)
}
/** The unpickled trees */
def unpickle()(implicit ctx: Context): List[Tree] = {
assert(roots != null, "unpickle without previous enterTopLevel")
- val stats = new TreeReader(reader)
- .readTopLevel()(ctx.addMode(Mode.AllowDependentFunctions))
- normalizePos(stats, totalRange)
- stats
+ new TreeReader(reader).readTopLevel()(ctx.addMode(Mode.AllowDependentFunctions))
}
def toTermName(tname: TastyName): TermName = tname match {
@@ -85,10 +87,12 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
def toTermName(ref: NameRef): TermName = toTermName(tastyName(ref))
def toTypeName(ref: NameRef): TypeName = toTermName(ref).toTypeName
- class Completer(reader: TastyReader) extends LazyType {
+ class Completer(owner: Symbol, reader: TastyReader) extends LazyType {
import reader._
def complete(denot: SymDenotation)(implicit ctx: Context): Unit = {
- treeAtAddr(currentAddr) = new TreeReader(reader).readIndexedDef()
+ treeAtAddr(currentAddr) =
+ new TreeReader(reader).readIndexedDef()(
+ ctx.withPhaseNoLater(ctx.picklerPhase).withOwner(owner))
}
}
@@ -108,6 +112,47 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
def skipParams(): Unit =
while (nextByte == PARAMS || nextByte == TYPEPARAM) skipTree()
+ /** Record all directly nested definitions and templates in current tree
+ * as `OwnerTree`s in `buf`
+ */
+ def scanTree(buf: ListBuffer[OwnerTree], mode: MemberDefMode = AllDefs): Unit = {
+ val start = currentAddr
+ val tag = readByte()
+ tag match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | TEMPLATE =>
+ val end = readEnd()
+ for (i <- 0 until numRefs(tag)) readNat()
+ if (tag == TEMPLATE) scanTrees(buf, end, MemberDefsOnly)
+ if (mode != NoMemberDefs) buf += new OwnerTree(start, tag, fork, end)
+ goto(end)
+ case tag =>
+ if (mode == MemberDefsOnly) skipTree(tag)
+ else if (tag >= firstLengthTreeTag) {
+ val end = readEnd()
+ var nrefs = numRefs(tag)
+ if (nrefs < 0) {
+ for (i <- nrefs until 0) scanTree(buf)
+ goto(end)
+ }
+ else {
+ for (i <- 0 until nrefs) readNat()
+ scanTrees(buf, end)
+ }
+ }
+ else if (tag >= firstNatASTTreeTag) { readNat(); scanTree(buf) }
+ else if (tag >= firstASTTreeTag) scanTree(buf)
+ else if (tag >= firstNatTreeTag) readNat()
+ }
+ }
+
+ /** Record all directly nested definitions and templates between current address and `end`
+ * as `OwnerTree`s in `buf`
+ */
+ def scanTrees(buf: ListBuffer[OwnerTree], end: Addr, mode: MemberDefMode = AllDefs): Unit = {
+ while (currentAddr.index < end.index) scanTree(buf, mode)
+ assert(currentAddr.index == end.index)
+ }
+
/** The next tag, following through SHARED tags */
def nextUnsharedTag: Int = {
val tag = nextByte
@@ -134,9 +179,9 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
// ------ Reading types -----------------------------------------------------
/** Read names in an interleaved sequence of (parameter) names and types/bounds */
- def readParamNames[N <: Name](end: Addr): List[N] =
+ def readParamNames(end: Addr): List[Name] =
until(end) {
- val name = readName().asInstanceOf[N]
+ val name = readName()
skipTree()
name
}
@@ -146,19 +191,25 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
until(end) { readNat(); readType().asInstanceOf[T] }
/** Read referece to definition and return symbol created at that definition */
- def readSymRef()(implicit ctx: Context): Symbol = {
- val start = currentAddr
- val addr = readAddr()
- symAtAddr get addr match {
- case Some(sym) => sym
- case None =>
- // Create a stub; owner might be wrong but will be overwritten later.
- forkAt(addr).createSymbol()
- val sym = symAtAddr(addr)
- ctx.log(i"forward reference to $sym")
- stubs += sym
- sym
- }
+ def readSymRef()(implicit ctx: Context): Symbol = symbolAt(readAddr())
+
+ /** The symbol at given address; createa new one if none exists yet */
+ def symbolAt(addr: Addr)(implicit ctx: Context): Symbol = symAtAddr.get(addr) match {
+ case Some(sym) =>
+ sym
+ case None =>
+ val sym = forkAt(addr).createSymbol()(ctx.withOwner(ownerTree.findOwner(addr)))
+ ctx.log(i"forward reference to $sym")
+ sym
+ }
+
+ /** The symbol defined by current definition */
+ def symbolAtCurrent()(implicit ctx: Context): Symbol = symAtAddr.get(currentAddr) match {
+ case Some(sym) =>
+ assert(ctx.owner == sym.owner, i"owner discrepancy for $sym, expected: ${ctx.owner}, found: ${sym.owner}")
+ sym
+ case None =>
+ createSymbol()
}
/** Read a type */
@@ -175,11 +226,11 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
def readLengthType(): Type = {
val end = readEnd()
- def readNamesSkipParams[N <: Name]: (List[N], TreeReader) = {
+ def readNamesSkipParams: (List[Name], TreeReader) = {
val nameReader = fork
nameReader.skipTree() // skip result
val paramReader = nameReader.fork
- (nameReader.readParamNames[N](end), paramReader)
+ (nameReader.readParamNames(end), paramReader)
}
val result =
@@ -187,11 +238,11 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case SUPERtype =>
SuperType(readType(), readType())
case REFINEDtype =>
- val parent = readType()
var name: Name = readName()
+ val parent = readType()
val ttag = nextUnsharedTag
if (ttag == TYPEBOUNDS || ttag == TYPEALIAS) name = name.toTypeName
- RefinedType(parent, name, rt => registeringType(rt, readType()))
+ RefinedType(parent, name, readType())
// Note that the lambda "rt => ..." is not equivalent to a wildcard closure!
// Eta expansion of the latter puts readType() out of the expression.
case APPLIEDtype =>
@@ -215,16 +266,25 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val sym = ctx.newSymbol(ctx.owner, readName().toTypeName, BindDefinedType, readType())
registerSym(start, sym)
TypeRef.withFixedSym(NoPrefix, sym.name, sym)
+ case LAMBDAtype =>
+ val (rawNames, paramReader) = readNamesSkipParams
+ val (variances, paramNames) = rawNames
+ .map(name => (prefixToVariance(name.head), name.tail.toTypeName)).unzip
+ val result = PolyType(paramNames, variances)(
+ pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
+ pt => readType())
+ goto(end)
+ result
case POLYtype =>
- val (names, paramReader) = readNamesSkipParams[TypeName]
- val result = PolyType(names)(
+ val (names, paramReader) = readNamesSkipParams
+ val result = PolyType(names.map(_.toTypeName))(
pt => registeringType(pt, paramReader.readParamTypes[TypeBounds](end)),
pt => readType())
goto(end)
result
case METHODtype =>
- val (names, paramReader) = readNamesSkipParams[TermName]
- val result = MethodType(names, paramReader.readParamTypes[Type](end))(
+ val (names, paramReader) = readNamesSkipParams
+ val result = MethodType(names.map(_.toTermName), paramReader.readParamTypes[Type](end))(
mt => registeringType(mt, readType()))
goto(end)
result
@@ -261,8 +321,10 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
case THIS =>
ThisType.raw(readType().asInstanceOf[TypeRef])
- case REFINEDthis =>
- RefinedThis(readTypeRef().asInstanceOf[RefinedType])
+ case RECtype =>
+ RecType(rt => registeringType(rt, readType()))
+ case RECthis =>
+ RecThis(readTypeRef().asInstanceOf[RecType])
case SHARED =>
val ref = readAddr()
typeAtAddr.getOrElseUpdate(ref, forkAt(ref).readType())
@@ -359,10 +421,23 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
/** Create symbol of definition node and enter in symAtAddr map
- * @return the largest subset of {NoInits, PureInterface} that a
- * trait owning this symbol can have as flags.
+ * @return the created symbol
+ */
+ def createSymbol()(implicit ctx: Context): Symbol = nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ createMemberSymbol()
+ case TEMPLATE =>
+ val localDummy = ctx.newLocalDummy(ctx.owner)
+ registerSym(currentAddr, localDummy)
+ localDummy
+ case tag =>
+ throw new Error(s"illegal createSymbol at $currentAddr, tag = $tag")
+ }
+
+ /** Create symbol of member definition or parameter node and enter in symAtAddr map
+ * @return the created symbol
*/
- def createSymbol()(implicit ctx: Context): FlagSet = {
+ def createMemberSymbol()(implicit ctx: Context): Symbol = {
val start = currentAddr
val tag = readByte()
val end = readEnd()
@@ -375,6 +450,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val isClass = ttag == TEMPLATE
val templateStart = currentAddr
skipTree() // tpt
+ val rhsStart = currentAddr
val rhsIsEmpty = noRhs(end)
if (!rhsIsEmpty) skipTree()
val (givenFlags, annots, privateWithin) = readModifiers(end)
@@ -392,29 +468,18 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case Some(rootd) =>
pickling.println(i"overwriting ${rootd.symbol} # ${rootd.hashCode}")
rootd.info = adjustIfModule(
- new Completer(subReader(start, end)) with SymbolLoaders.SecondCompleter)
+ new Completer(ctx.owner, subReader(start, end)) with SymbolLoaders.SecondCompleter)
rootd.flags = flags &~ Touched // allow one more completion
rootd.privateWithin = privateWithin
+ seenRoots += rootd.symbol
rootd.symbol
case _ =>
- val completer = adjustIfModule(new Completer(subReader(start, end)))
+ val completer = adjustIfModule(new Completer(ctx.owner, subReader(start, end)))
if (isClass)
- ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer,
- privateWithin, coord = start.index)
- else {
- val sym = symAtAddr.get(start) match {
- case Some(preExisting) =>
- assert(stubs contains preExisting)
- stubs -= preExisting
- preExisting
- case none =>
- ctx.newNakedSymbol(start.index)
- }
- val denot = ctx.SymDenotation(symbol = sym, owner = ctx.owner, name, flags, completer, privateWithin)
- sym.denot = denot
- sym
- }
- } // TODO set position
+ ctx.newClassSymbol(ctx.owner, name.asTypeName, flags, completer, privateWithin, coord = start.index)
+ else
+ ctx.newSymbol(ctx.owner, name, flags, completer, privateWithin, coord = start.index)
+ } // TODO set position somehow (but take care not to upset Symbol#isDefinedInCurrentRun)
sym.annotations = annots
ctx.enter(sym)
registerSym(start, sym)
@@ -422,14 +487,18 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
sym.completer.withDecls(newScope)
forkAt(templateStart).indexTemplateParams()(localContext(sym))
}
- if (isClass) NoInits
- else if (sym.isType || sym.isConstructor || flags.is(Deferred)) NoInitsInterface
- else if (tag == VALDEF) EmptyFlags
- else NoInits
+ else if (annots.exists(_.symbol == defn.InlineAnnot))
+ sym.addAnnotation(LazyBodyAnnotation { ctx0 =>
+ implicit val ctx: Context = localContext(sym)(ctx0).addMode(Mode.ReadPositions)
+ // avoids space leaks by not capturing the current context
+ forkAt(rhsStart).readTerm()
+ })
+ goto(start)
+ sym
}
/** Read modifier list into triplet of flags, annotations and a privateWithin
- * boindary symbol.
+ * boundary symbol.
*/
def readModifiers(end: Addr)(implicit ctx: Context): (FlagSet, List[Annotation], Symbol) = {
var flags: FlagSet = EmptyFlags
@@ -492,28 +561,34 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
(flags, annots.toList, privateWithin)
}
- /** Create symbols for a definitions in statement sequence between
+ /** Create symbols for the definitions in the statement sequence between
* current address and `end`.
* @return the largest subset of {NoInits, PureInterface} that a
* trait owning the indexed statements can have as flags.
*/
def indexStats(end: Addr)(implicit ctx: Context): FlagSet = {
- val flagss =
- until(end) {
- nextByte match {
- case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
- createSymbol()
- case IMPORT =>
- skipTree()
- NoInitsInterface
- case PACKAGE =>
- processPackage { (pid, end) => implicit ctx => indexStats(end) }
- case _ =>
- skipTree()
- EmptyFlags
- }
+ var initsFlags = NoInitsInterface
+ while (currentAddr.index < end.index) {
+ nextByte match {
+ case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM =>
+ val sym = symbolAtCurrent()
+ skipTree()
+ if (sym.isTerm && !sym.is(MethodOrLazyOrDeferred))
+ initsFlags = EmptyFlags
+ else if (sym.isClass ||
+ sym.is(Method, butNot = Deferred) && !sym.isConstructor)
+ initsFlags &= NoInits
+ case IMPORT =>
+ skipTree()
+ case PACKAGE =>
+ processPackage { (pid, end) => implicit ctx => indexStats(end) }
+ case _ =>
+ skipTree()
+ initsFlags = EmptyFlags
}
- (NoInitsInterface /: flagss)(_ & _)
+ }
+ assert(currentAddr.index == end.index)
+ initsFlags
}
/** Process package with given operation `op`. The operation takes as arguments
@@ -532,7 +607,10 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
* `tag` starting at current address.
*/
def indexParams(tag: Int)(implicit ctx: Context) =
- while (nextByte == tag) createSymbol()
+ while (nextByte == tag) {
+ symbolAtCurrent()
+ skipTree()
+ }
/** Create symbols for all type and value parameters of template starting
* at current address.
@@ -559,7 +637,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val end = readEnd()
def readParams[T <: MemberDef](tag: Int)(implicit ctx: Context): List[T] = {
- fork.indexParams(tag)
+ fork.indexParams(tag)(localContext(sym))
readIndexedParams(tag)
}
@@ -597,6 +675,11 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
vparamss.nestedMap(_.symbol), name == nme.CONSTRUCTOR)
val resType = ctx.effectiveResultType(sym, typeParams, tpt.tpe)
sym.info = ctx.methodType(typeParams, valueParamss, resType)
+ if (sym.isSetter && sym.accessedFieldOrGetter.is(ParamAccessor)) {
+ // reconstitute ParamAccessor flag of setters for var parameters, which is not pickled
+ sym.setFlag(ParamAccessor)
+ sym.resetFlag(Deferred)
+ }
DefDef(tparams, vparamss, tpt)
case VALDEF =>
sym.info = readType()
@@ -604,7 +687,14 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case TYPEDEF | TYPEPARAM =>
if (sym.isClass) {
val companion = sym.scalacLinkedClass
- if (companion != NoSymbol && unpickledSyms.contains(companion)) {
+
+ // Is the companion defined in the same Tasty file as `sym`?
+ // The only case to check here is if `sym` is a root. In this case
+ // `companion` might have been entered by the environment but it might
+ // be missing from the Tasty file. So we check explicitly for that.
+ def isCodefined =
+ roots.contains(companion.denot) == seenRoots.contains(companion)
+ if (companion.exists && isCodefined) {
import transform.SymUtils._
if (sym is Flags.ModuleClass) sym.registerCompanionMethod(nme.COMPANION_CLASS_METHOD, companion)
else sym.registerCompanionMethod(nme.COMPANION_MODULE_METHOD, companion)
@@ -628,9 +718,12 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
}
val mods =
- if (sym.annotations.isEmpty) EmptyModifiers
- else Modifiers(annotations = sym.annotations.map(_.tree))
- tree.withMods(mods) // record annotations in tree so that tree positions can be filled in.
+ if (sym.annotations.isEmpty) untpd.EmptyModifiers
+ else untpd.Modifiers(annotations = sym.annotations.map(_.tree))
+ tree.withMods(mods)
+ // record annotations in tree so that tree positions can be filled in.
+ // Note: Once the inline PR with its changes to positions is in, this should be
+ // no longer necessary.
goto(end)
setPos(start, tree)
}
@@ -640,8 +733,12 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
val cls = ctx.owner.asClass
def setClsInfo(parents: List[TypeRef], selfType: Type) =
cls.info = ClassInfo(cls.owner.thisType, cls, parents, cls.unforcedDecls, selfType)
- setClsInfo(Nil, NoType)
- val localDummy = ctx.newLocalDummy(cls)
+ val assumedSelfType =
+ if (cls.is(Module) && cls.owner.isClass)
+ TermRef.withSig(cls.owner.thisType, cls.name.sourceModuleName, Signature.NotAMethod)
+ else NoType
+ setClsInfo(Nil, assumedSelfType)
+ val localDummy = symbolAtCurrent()
assert(readByte() == TEMPLATE)
val end = readEnd()
val tparams = readIndexedParams[TypeDef](TYPEPARAM)
@@ -653,7 +750,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
}
}
val parentRefs = ctx.normalizeToClassRefs(parents.map(_.tpe), cls, cls.unforcedDecls)
- val self =
+ val self =
if (nextByte == SELFDEF) {
readByte()
untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType)
@@ -697,9 +794,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
@tailrec def read(acc: ListBuffer[Tree]): List[Tree] = nextByte match {
case IMPORT | PACKAGE =>
acc += readIndexedStat(NoSymbol)
- if (!isAtEnd)
- read(acc)
- else acc.toList
+ if (!isAtEnd) read(acc) else acc.toList
case _ => // top-level trees which are not imports or packages are not part of tree
acc.toList
}
@@ -728,7 +823,7 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
case RENAMED =>
readByte()
readEnd()
- untpd.Pair(untpd.Ident(readName()), untpd.Ident(readName())) :: readSelectors()
+ untpd.Thicket(untpd.Ident(readName()), untpd.Ident(readName())) :: readSelectors()
case IMPORTED =>
readByte()
untpd.Ident(readName()) :: readSelectors()
@@ -803,18 +898,24 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
tpd.Super(qual, mixName, ctx.mode.is(Mode.InSuperCall), mixClass)
case APPLY =>
val fn = readTerm()
- val isJava = fn.tpe.isInstanceOf[JavaMethodType]
+ val isJava = fn.symbol.is(JavaDefined)
def readArg() = readTerm() match {
- case SeqLiteral(elems, elemtpt) if isJava => JavaSeqLiteral(elems, elemtpt)
+ case SeqLiteral(elems, elemtpt) if isJava =>
+ JavaSeqLiteral(elems, elemtpt)
case arg => arg
}
tpd.Apply(fn, until(end)(readArg()))
case TYPEAPPLY =>
tpd.TypeApply(readTerm(), until(end)(readTpt()))
- case PAIR =>
- Pair(readTerm(), readTerm())
case TYPED =>
- Typed(readTerm(), readTpt())
+ val expr = readTerm()
+ val tpt = readTpt()
+ val expr1 = expr match {
+ case SeqLiteral(elems, elemtpt) if tpt.tpe.isRef(defn.ArrayClass) =>
+ JavaSeqLiteral(elems, elemtpt)
+ case expr => expr
+ }
+ Typed(expr1, tpt)
case NAMEDARG =>
NamedArg(readName(), readTerm())
case ASSIGN =>
@@ -898,44 +999,88 @@ class TreeUnpickler(reader: TastyReader, tastyName: TastyName.Table) {
new LazyReader(localReader, op)
}
-// ------ Hooks for positions ------------------------------------------------
+// ------ Setting positions ------------------------------------------------
- /** Record address from which tree was created as a temporary position in the tree.
- * The temporary position contains deltas relative to the position of the (as yet unknown)
- * parent node. It is marked as a non-synthetic source position.
- */
- def setPos[T <: Tree](addr: Addr, tree: T): T = {
- if (readPositions)
- tree.setPosUnchecked(positions.getOrElse(addr, Position(0, 0, 0)))
- tree
- }
+ /** Set position of `tree` at given `addr`. */
+ def setPos[T <: Tree](addr: Addr, tree: T)(implicit ctx: Context): tree.type =
+ if (ctx.mode.is(Mode.ReadPositions)) {
+ posUnpicklerOpt match {
+ case Some(posUnpickler) => tree.withPos(posUnpickler.posAt(addr))
+ case _ => tree
+ }
+ }
+ else tree
}
- private def setNormalized(tree: Tree, parentPos: Position): Unit =
- tree.setPosUnchecked(
- if (tree.pos.exists)
- Position(parentPos.start + offsetToInt(tree.pos.start), parentPos.end - tree.pos.end)
- else
- parentPos)
-
- def normalizePos(x: Any, parentPos: Position)(implicit ctx: Context): Unit =
- traverse(x, parentPos, setNormalized)
-
- class LazyReader[T <: AnyRef](reader: TreeReader, op: TreeReader => Context => T) extends Trees.Lazy[T] with DeferredPosition {
+ class LazyReader[T <: AnyRef](reader: TreeReader, op: TreeReader => Context => T) extends Trees.Lazy[T] {
def complete(implicit ctx: Context): T = {
pickling.println(i"starting to read at ${reader.reader.currentAddr}")
- val res = op(reader)(ctx.addMode(Mode.AllowDependentFunctions))
- normalizePos(res, parentPos)
- res
+ op(reader)(ctx.addMode(Mode.AllowDependentFunctions).withPhaseNoLater(ctx.picklerPhase))
}
}
- class LazyAnnotationReader(sym: Symbol, reader: TreeReader)
- extends LazyAnnotation(sym) with DeferredPosition {
+ class LazyAnnotationReader(sym: Symbol, reader: TreeReader) extends LazyAnnotation(sym) {
def complete(implicit ctx: Context) = {
- val res = reader.readTerm()
- normalizePos(res, parentPos)
- res
+ reader.readTerm()(ctx.withPhaseNoLater(ctx.picklerPhase))
+ }
+ }
+
+ /** A lazy datastructure that records how definitions are nested in TASTY data.
+ * The structure is lazy because it needs to be computed only for forward references
+ * to symbols that happen before the referenced symbol is created (see `symbolAt`).
+ * Such forward references are rare.
+ *
+ * @param addr The address of tree representing an owning definition, NoAddr for root tree
+ * @param tag The tag at `addr`. Used to determine which subtrees to scan for children
+ * (i.e. if `tag` is template, don't scan member defs, as these belong already
+ * to enclosing class).
+ * @param reader The reader to be used for scanning for children
+ * @param end The end of the owning definition
+ */
+ class OwnerTree(val addr: Addr, tag: Int, reader: TreeReader, val end: Addr) {
+
+ /** All definitions that have the definition at `addr` as closest enclosing definition */
+ lazy val children: List[OwnerTree] = {
+ val buf = new ListBuffer[OwnerTree]
+ reader.scanTrees(buf, end, if (tag == TEMPLATE) NoMemberDefs else AllDefs)
+ buf.toList
+ }
+
+ /** Find the owner of definition at `addr` */
+ def findOwner(addr: Addr)(implicit ctx: Context): Symbol = {
+ def search(cs: List[OwnerTree], current: Symbol): Symbol =
+ try cs match {
+ case ot :: cs1 =>
+ if (ot.addr.index == addr.index)
+ current
+ else if (ot.addr.index < addr.index && addr.index < ot.end.index)
+ search(ot.children, reader.symbolAt(ot.addr))
+ else
+ search(cs1, current)
+ case Nil =>
+ throw new TreeWithoutOwner
+ }
+ catch {
+ case ex: TreeWithoutOwner =>
+ println(i"no owner for $addr among $cs") // DEBUG
+ throw ex
+ }
+ search(children, NoSymbol)
}
+
+ override def toString = s"OwnerTree(${addr.index}, ${end.index}"
}
}
+
+object TreeUnpickler {
+
+ /** An enumeration indicating which subtrees should be added to an OwnerTree. */
+ type MemberDefMode = Int
+ final val MemberDefsOnly = 0 // add only member defs; skip other statements
+ final val NoMemberDefs = 1 // add only statements that are not member defs
+ final val AllDefs = 2 // add everything
+
+ class TreeWithoutOwner extends Exception
+}
+
+
diff --git a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
index 2831de3e0..70148b3e2 100644
--- a/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
+++ b/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala
@@ -12,12 +12,12 @@ import StdNames._, Denotations._, NameOps._, Flags._, Constants._, Annotations._
import dotty.tools.dotc.typer.ProtoTypes.{FunProtoTyped, FunProto}
import util.Positions._
import dotty.tools.dotc.ast.{tpd, Trees, untpd}, ast.tpd._
+import ast.untpd.Modifiers
import printing.Texts._
import printing.Printer
import io.AbstractFile
import util.common._
import typer.Checking.checkNonCyclic
-import typer.Mode
import PickleBuffer._
import scala.reflect.internal.pickling.PickleFormat._
import Decorators._
@@ -32,7 +32,7 @@ object Scala2Unpickler {
/** Exception thrown if classfile is corrupted */
class BadSignature(msg: String) extends RuntimeException(msg)
- case class TempPolyType(tparams: List[Symbol], tpe: Type) extends UncachedGroundType {
+ case class TempPolyType(tparams: List[TypeSymbol], tpe: Type) extends UncachedGroundType {
override def fallbackToText(printer: Printer): Text =
"[" ~ printer.dclsText(tparams, ", ") ~ "]" ~ printer.toText(tpe)
}
@@ -83,8 +83,8 @@ object Scala2Unpickler {
paramNames,
paramTypes.init :+ defn.RepeatedParamType.appliedTo(elemtp),
tp.resultType)
- case tp @ PolyType(paramNames) =>
- tp.derivedPolyType(paramNames, tp.paramBounds, arrayToRepeated(tp.resultType))
+ case tp: PolyType =>
+ tp.derivedPolyType(tp.paramNames, tp.paramBounds, arrayToRepeated(tp.resultType))
}
def ensureConstructor(cls: ClassSymbol, scope: Scope)(implicit ctx: Context) =
@@ -107,7 +107,8 @@ object Scala2Unpickler {
// `denot.sourceModule.exists` provision i859.scala crashes in the backend.
denot.owner.thisType select denot.sourceModule
else selfInfo
- denot.info = ClassInfo(denot.owner.thisType, denot.classSymbol, Nil, decls, ost) // first rough info to avoid CyclicReferences
+ val tempInfo = new TempClassInfo(denot.owner.thisType, denot.classSymbol, decls, ost)
+ denot.info = tempInfo // first rough info to avoid CyclicReferences
var parentRefs = ctx.normalizeToClassRefs(parents, cls, decls)
if (parentRefs.isEmpty) parentRefs = defn.ObjectType :: Nil
for (tparam <- tparams) {
@@ -133,9 +134,8 @@ object Scala2Unpickler {
registerCompanionPair(scalacCompanion, denot.classSymbol)
}
- denot.info = ClassInfo( // final info, except possibly for typeparams ordering
- denot.owner.thisType, denot.classSymbol, parentRefs, decls, ost)
- denot.updateTypeParams(tparams)
+ tempInfo.finalize(denot, parentRefs) // install final info, except possibly for typeparams ordering
+ denot.ensureTypeParamsInCorrectOrder()
}
}
@@ -178,9 +178,9 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
protected def errorBadSignature(msg: String, original: Option[RuntimeException] = None)(implicit ctx: Context) = {
val ex = new BadSignature(
- sm"""error reading Scala signature of $classRoot from $source:
- |error occurred at position $readIndex: $msg""")
- if (ctx.debug) original.getOrElse(ex).printStackTrace()
+ i"""error reading Scala signature of $classRoot from $source:
+ |error occurred at position $readIndex: $msg""")
+ if (ctx.debug || true) original.getOrElse(ex).printStackTrace() // temporarily enable printing of original failure signature to debug failing builds
throw ex
}
@@ -189,8 +189,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
case _ => errorBadSignature(s"a runtime exception occurred: $ex", Some(ex))
}
- private var postReadOp: Context => Unit = null
-
def run()(implicit ctx: Context) =
try {
var i = 0
@@ -198,10 +196,11 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
if (entries(i) == null && isSymbolEntry(i)) {
val savedIndex = readIndex
readIndex = index(i)
- entries(i) = readSymbol()
- if (postReadOp != null) {
- postReadOp(ctx)
- postReadOp = null
+ val sym = readSymbol()
+ entries(i) = sym
+ sym.infoOrCompleter match {
+ case info: ClassUnpickler => info.init()
+ case _ =>
}
readIndex = savedIndex
}
@@ -487,20 +486,20 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
}
ctx.newSymbol(owner, name1, flags1, localMemberUnpickler, coord = start)
case CLASSsym =>
- val infoRef = readNat()
- postReadOp = implicit ctx => atReadPos(index(infoRef), readTypeParams) // force reading type params early, so they get entered in the right order.
+ var infoRef = readNat()
+ if (isSymbolRef(infoRef)) infoRef = readNat()
if (isClassRoot)
completeRoot(
- classRoot, rootClassUnpickler(start, classRoot.symbol, NoSymbol))
+ classRoot, rootClassUnpickler(start, classRoot.symbol, NoSymbol, infoRef))
else if (isModuleClassRoot)
completeRoot(
- moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule))
+ moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule, infoRef))
else if (name == tpnme.REFINE_CLASS)
// create a type alias instead
ctx.newSymbol(owner, name, flags, localMemberUnpickler, coord = start)
else {
def completer(cls: Symbol) = {
- val unpickler = new LocalUnpickler() withDecls symScope(cls)
+ val unpickler = new ClassUnpickler(infoRef) withDecls symScope(cls)
if (flags is ModuleClass)
unpickler withSourceModule (implicit ctx =>
cls.owner.info.decls.lookup(cls.name.sourceModuleName)
@@ -574,7 +573,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
// println(s"unpickled ${denot.debugString}, info = ${denot.info}") !!! DEBUG
}
atReadPos(startCoord(denot).toIndex,
- () => parseToCompletion(denot)(ctx.addMode(Mode.Scala2Unpickling)))
+ () => parseToCompletion(denot)(
+ ctx.addMode(Mode.Scala2Unpickling).withPhaseNoLater(ctx.picklerPhase)))
} catch {
case ex: RuntimeException => handleRuntimeException(ex)
}
@@ -582,8 +582,27 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
object localMemberUnpickler extends LocalUnpickler
- def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol) =
- (new LocalUnpickler with SymbolLoaders.SecondCompleter {
+ class ClassUnpickler(infoRef: Int) extends LocalUnpickler with TypeParamsCompleter {
+ private def readTypeParams()(implicit ctx: Context): List[TypeSymbol] = {
+ val tag = readByte()
+ val end = readNat() + readIndex
+ if (tag == POLYtpe) {
+ val unusedRestpeRef = readNat()
+ until(end, readSymbolRef).asInstanceOf[List[TypeSymbol]]
+ } else Nil
+ }
+ private def loadTypeParams(implicit ctx: Context) =
+ atReadPos(index(infoRef), readTypeParams)
+
+ /** Force reading type params early, we need them in setClassInfo of subclasses. */
+ def init()(implicit ctx: Context) = loadTypeParams
+
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] =
+ loadTypeParams
+ }
+
+ def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol, infoRef: Int) =
+ (new ClassUnpickler(infoRef) with SymbolLoaders.SecondCompleter {
override def startCoord(denot: SymDenotation): Coord = start
}) withDecls symScope(cls) withSourceModule (_ => module)
@@ -620,9 +639,9 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
def removeSingleton(tp: Type): Type =
if (tp isRef defn.SingletonClass) defn.AnyType else tp
def elim(tp: Type): Type = tp match {
- case tp @ RefinedType(parent, name) =>
+ case tp @ RefinedType(parent, name, rinfo) =>
val parent1 = elim(tp.parent)
- tp.refinedInfo match {
+ rinfo match {
case TypeAlias(info: TypeRef) if isBound(info) =>
RefinedType(parent1, name, info.symbol.info)
case info: TypeRef if isBound(info) =>
@@ -632,8 +651,14 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
case info =>
tp.derivedRefinedType(parent1, name, info)
}
- case tp @ TypeRef(pre, tpnme.hkApply) =>
- tp.derivedSelect(elim(pre))
+ case tp @ HKApply(tycon, args) =>
+ val tycon1 = tycon.safeDealias
+ def mapArg(arg: Type) = arg match {
+ case arg: TypeRef if isBound(arg) => arg.symbol.info
+ case _ => arg
+ }
+ if (tycon1 ne tycon) elim(tycon1.appliedTo(args))
+ else tp.derivedAppliedType(tycon, args.map(mapArg))
case _ =>
tp
}
@@ -709,7 +734,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
else TypeRef(pre, sym.name.asTypeName)
val args = until(end, readTypeRef)
if (sym == defn.ByNameParamClass2x) ExprType(args.head)
- else if (args.nonEmpty) tycon.safeAppliedTo(etaExpandIfHK(sym.typeParams, args))
+ else if (args.nonEmpty) tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args))
else if (sym.typeParams.nonEmpty) tycon.EtaExpand(sym.typeParams)
else tycon
case TYPEBOUNDStpe =>
@@ -722,13 +747,12 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
val parent = parents.reduceLeft(AndType(_, _))
if (decls.isEmpty) parent
else {
- def addRefinement(tp: Type, sym: Symbol) = {
- def subst(info: Type, rt: RefinedType) =
- if (clazz.isClass) info.substThis(clazz.asClass, RefinedThis(rt))
- else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case.
- RefinedType(tp, sym.name, subst(sym.info, _))
- }
- (parent /: decls.toList)(addRefinement).asInstanceOf[RefinedType]
+ def subst(info: Type, rt: RecType) =
+ if (clazz.isClass) info.substThis(clazz.asClass, RecThis(rt))
+ else info // turns out some symbols read into `clazz` are not classes, not sure why this is the case.
+ def addRefinement(tp: Type, sym: Symbol) = RefinedType(tp, sym.name, sym.info)
+ val refined = (parent /: decls.toList)(addRefinement)
+ RecType.closeOver(rt => subst(refined, rt))
}
case CLASSINFOtpe =>
val clazz = readSymbolRef()
@@ -744,7 +768,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
case POLYtpe =>
val restpe = readTypeRef()
val typeParams = until(end, readSymbolRef)
- if (typeParams.nonEmpty) TempPolyType(typeParams, restpe.widenExpr)
+ if (typeParams.nonEmpty) TempPolyType(typeParams.asInstanceOf[List[TypeSymbol]], restpe.widenExpr)
else ExprType(restpe)
case EXISTENTIALtpe =>
val restpe = readTypeRef()
@@ -923,7 +947,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
val start = readIndex
val atp = readTypeRef()
Annotation.deferred(
- atp.typeSymbol, implicit ctx => atReadPos(start, () => readAnnotationContents(end)))
+ atp.typeSymbol, implicit ctx1 =>
+ atReadPos(start, () => readAnnotationContents(end)(ctx1.withPhase(ctx.phase))))
}
/* Read an abstract syntax tree */
@@ -1020,7 +1045,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
val toName = readNameRef()
val from = untpd.Ident(fromName)
val to = untpd.Ident(toName)
- if (toName.isEmpty) from else untpd.Pair(from, untpd.Ident(toName))
+ if (toName.isEmpty) from else untpd.Thicket(from, untpd.Ident(toName))
})
Import(expr, selectors)
@@ -1162,7 +1187,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
case ANNOTATEDtree =>
val annot = readTreeRef()
val arg = readTreeRef()
- Annotated(annot, arg)
+ Annotated(arg, annot)
case SINGLETONTYPEtree =>
SingletonTypeTree(readTreeRef())
@@ -1170,7 +1195,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
case SELECTFROMTYPEtree =>
val qualifier = readTreeRef()
val selector = readTypeNameRef()
- SelectFromTypeTree(qualifier, symbol.namedType)
+ Select(qualifier, symbol.namedType)
case COMPOUNDTYPEtree =>
readTemplateRef()
@@ -1212,7 +1237,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas
val pflags = (pflagsHi.toLong << 32) + pflagsLo
val flags = unpickleScalaFlags(pflags, isType)
val privateWithin = readNameRef().asTypeName
- Trees.Modifiers[Type](flags, privateWithin, Nil)
+ Modifiers(flags, privateWithin, Nil)
}
protected def readTemplateRef()(implicit ctx: Context): Template =
diff --git a/src/dotty/tools/dotc/parsing/JavaParsers.scala b/src/dotty/tools/dotc/parsing/JavaParsers.scala
index b4d01a0da..ed7cf9e3f 100644
--- a/src/dotty/tools/dotc/parsing/JavaParsers.scala
+++ b/src/dotty/tools/dotc/parsing/JavaParsers.scala
@@ -10,6 +10,7 @@ import scala.language.implicitConversions
import JavaTokens._
import JavaScanners._
+import Scanners.Offset
import Parsers._
import core._
import Contexts._
@@ -107,9 +108,6 @@ object JavaParsers {
def unimplementedExpr = Ident("???".toTermName)
- def makePackaging(pkg: RefTree, stats: List[Tree]): PackageDef =
- atPos(pkg.pos) { PackageDef(pkg, stats) }
-
def makeTemplate(parents: List[Tree], stats: List[Tree], tparams: List[TypeDef], needsDummyConstr: Boolean) = {
def pullOutFirstConstr(stats: List[Tree]): (Tree, List[Tree]) = stats match {
case (meth: DefDef) :: rest if meth.name == CONSTRUCTOR => (meth, rest)
@@ -175,6 +173,8 @@ object JavaParsers {
/** Consume one token of the specified type, or
* signal an error if it is not there.
+ *
+ * @return The offset at the start of the token to accept
*/
def accept(token: Int): Int = {
val offset = in.offset
@@ -227,7 +227,7 @@ object JavaParsers {
def convertToTypeId(tree: Tree): Tree = convertToTypeName(tree) match {
case Some(t) => t withPos tree.pos
case _ => tree match {
- case AppliedTypeTree(_, _) | SelectFromTypeTree(_, _) =>
+ case AppliedTypeTree(_, _) | Select(_, _) =>
tree
case _ =>
syntaxError("identifier expected", tree.pos)
@@ -248,14 +248,14 @@ object JavaParsers {
var t: RefTree = atPos(in.offset) { Ident(ident()) }
while (in.token == DOT) {
in.nextToken()
- t = atPos(in.offset) { Select(t, ident()) }
+ t = atPos(t.pos.start, in.offset) { Select(t, ident()) }
}
t
}
def optArrayBrackets(tpt: Tree): Tree =
if (in.token == LBRACKET) {
- val tpt1 = atPos(in.offset) { arrayOf(tpt) }
+ val tpt1 = atPos(tpt.pos.start, in.offset) { arrayOf(tpt) }
in.nextToken()
accept(RBRACKET)
optArrayBrackets(tpt1)
@@ -281,18 +281,15 @@ object JavaParsers {
if (in.token == FINAL) in.nextToken()
if (in.token == IDENTIFIER) {
var t = typeArgs(atPos(in.offset)(Ident(ident())))
- // typeSelect generates Select nodes is the lhs is an Ident or Select,
- // SelectFromTypeTree otherwise. See #3567.
- // Select nodes can be later
- // converted in the typechecker to SelectFromTypeTree if the class
- // turns out to be an instance ionner class instead of a static inner class.
+ // typeSelect generates Select nodes if the lhs is an Ident or Select,
+ // For other nodes it always assumes that the selected item is a type.
def typeSelect(t: Tree, name: Name) = t match {
case Ident(_) | Select(_, _) => Select(t, name)
- case _ => SelectFromTypeTree(t, name.toTypeName)
+ case _ => Select(t, name.toTypeName)
}
while (in.token == DOT) {
in.nextToken()
- t = typeArgs(atPos(in.offset)(typeSelect(t, ident())))
+ t = typeArgs(atPos(t.pos.start, in.offset)(typeSelect(t, ident())))
}
convertToTypeId(t)
} else {
@@ -326,7 +323,7 @@ object JavaParsers {
val t1 = convertToTypeId(t)
val args = repsep(typeArg, COMMA)
acceptClosingAngle()
- atPos(t1.pos) {
+ atPos(t1.pos.start) {
AppliedTypeTree(t1, args)
}
} else t
@@ -354,7 +351,11 @@ object JavaParsers {
// assumed true unless we see public/private/protected
var isPackageAccess = true
var annots: List[Tree] = Nil
- def addAnnot(sym: ClassSymbol) = annots :+= New(TypeTree(sym.typeRef)).withPos(Position(in.offset))
+ def addAnnot(sym: ClassSymbol) =
+ annots :+= atPos(in.offset) {
+ in.nextToken()
+ New(TypeTree(sym.typeRef))
+ }
while (true) {
in.token match {
@@ -385,13 +386,10 @@ object JavaParsers {
in.nextToken()
case NATIVE =>
addAnnot(NativeAnnot)
- in.nextToken()
case TRANSIENT =>
addAnnot(TransientAnnot)
- in.nextToken()
case VOLATILE =>
addAnnot(VolatileAnnot)
- in.nextToken()
case SYNCHRONIZED | STRICTFP =>
in.nextToken()
case _ =>
@@ -441,16 +439,19 @@ object JavaParsers {
}
def formalParam(): ValDef = {
+ val start = in.offset
if (in.token == FINAL) in.nextToken()
annotations()
var t = typ()
if (in.token == DOTDOTDOT) {
in.nextToken()
- t = atPos(t.pos) {
+ t = atPos(t.pos.start) {
PostfixOp(t, nme.raw.STAR)
}
}
- varDecl(Position(in.offset), Modifiers(Flags.JavaDefined | Flags.Param), t, ident().toTermName)
+ atPos(start, in.offset) {
+ varDecl(Modifiers(Flags.JavaDefined | Flags.Param), t, ident().toTermName)
+ }
}
def optThrows(): Unit = {
@@ -460,7 +461,7 @@ object JavaParsers {
}
}
- def methodBody(): Tree = {
+ def methodBody(): Tree = atPos(in.offset) {
skipAhead()
accept(RBRACE) // skip block
unimplementedExpr
@@ -468,16 +469,18 @@ object JavaParsers {
def definesInterface(token: Int) = token == INTERFACE || token == AT
- def termDecl(mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = {
+ def termDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = {
val inInterface = definesInterface(parentToken)
val tparams = if (in.token == LT) typeParams(Flags.JavaDefined | Flags.Param) else List()
val isVoid = in.token == VOID
var rtpt =
- if (isVoid) {
- in.nextToken()
- TypeTree(UnitType) withPos Position(in.offset)
- } else typ()
- var offset = in.offset
+ if (isVoid)
+ atPos(in.offset) {
+ in.nextToken()
+ TypeTree(UnitType)
+ }
+ else typ()
+ var nameOffset = in.offset
val rtptName = rtpt match {
case Ident(name) => name
case _ => nme.EMPTY
@@ -487,14 +490,15 @@ object JavaParsers {
val vparams = formalParams()
optThrows()
List {
- atPos(offset) {
- DefDef(nme.CONSTRUCTOR, parentTParams, List(vparams), TypeTree(), methodBody()).withMods(mods)
+ atPos(start) {
+ DefDef(nme.CONSTRUCTOR, parentTParams,
+ List(vparams), TypeTree(), methodBody()).withMods(mods)
}
}
} else {
var mods1 = mods
if (mods is Flags.Abstract) mods1 = mods &~ Flags.Abstract
- offset = in.offset
+ nameOffset = in.offset
val name = ident()
if (in.token == LPAREN) {
// method declaration
@@ -508,13 +512,14 @@ object JavaParsers {
} else {
if (parentToken == AT && in.token == DEFAULT) {
val annot =
- atPos(offset) {
+ atPos(nameOffset) {
New(Select(scalaDot(nme.runtime), tpnme.AnnotationDefaultATTR), Nil)
}
mods1 = mods1 withAddedAnnotation annot
+ val unimplemented = unimplementedExpr
skipTo(SEMI)
accept(SEMI)
- unimplementedExpr
+ unimplemented
} else {
accept(SEMI)
EmptyTree
@@ -522,13 +527,13 @@ object JavaParsers {
}
//if (inInterface) mods1 |= Flags.Deferred
List {
- atPos(offset) {
+ atPos(start, nameOffset) {
DefDef(name.toTermName, tparams, List(vparams), rtpt, body).withMods(mods1 | Flags.Method)
}
}
} else {
if (inInterface) mods1 |= Flags.Final | Flags.JavaStatic
- val result = fieldDecls(Position(offset), mods1, rtpt, name)
+ val result = fieldDecls(start, nameOffset, mods1, rtpt, name)
accept(SEMI)
result
}
@@ -544,19 +549,21 @@ object JavaParsers {
* Once we have reached the end of the statement, we know whether
* these potential definitions are real or not.
*/
- def fieldDecls(pos: Position, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = {
- val buf = ListBuffer[Tree](varDecl(pos, mods, tpt, name.toTermName))
+ def fieldDecls(start: Offset, firstNameOffset: Offset, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = {
+ val buf = ListBuffer[Tree](
+ atPos(start, firstNameOffset) { varDecl(mods, tpt, name.toTermName) })
val maybe = new ListBuffer[Tree] // potential variable definitions.
while (in.token == COMMA) {
in.nextToken()
if (in.token == IDENTIFIER) { // if there's an ident after the comma ...
+ val nextNameOffset = in.offset
val name = ident()
if (in.token == EQUALS || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition
buf ++= maybe
- buf += varDecl(Position(in.offset), mods, tpt, name.toTermName)
+ buf += atPos(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) }
maybe.clear()
} else if (in.token == COMMA) { // ... if there's a comma after the ident, it could be a real vardef or not.
- maybe += varDecl(Position(in.offset), mods, tpt, name.toTermName)
+ maybe += atPos(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) }
} else { // ... if there's something else we were still in the initializer of the
// previous var def; skip to next comma or semicolon.
skipTo(COMMA, SEMI)
@@ -574,35 +581,29 @@ object JavaParsers {
buf.toList
}
- def varDecl(pos: Position, mods: Modifiers, tpt: Tree, name: TermName): ValDef = {
+ def varDecl(mods: Modifiers, tpt: Tree, name: TermName): ValDef = {
val tpt1 = optArrayBrackets(tpt)
if (in.token == EQUALS && !(mods is Flags.Param)) skipTo(COMMA, SEMI)
val mods1 = if (mods is Flags.Final) mods else mods | Flags.Mutable
- atPos(pos) {
- ValDef(name, tpt1, if (mods is Flags.Param) EmptyTree else unimplementedExpr).withMods(mods1)
- }
+ ValDef(name, tpt1, if (mods is Flags.Param) EmptyTree else unimplementedExpr).withMods(mods1)
}
- def memberDecl(mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match {
+ def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match {
case CLASS | ENUM | INTERFACE | AT =>
- typeDecl(if (definesInterface(parentToken)) mods | Flags.JavaStatic else mods)
+ typeDecl(start, if (definesInterface(parentToken)) mods | Flags.JavaStatic else mods)
case _ =>
- termDecl(mods, parentToken, parentTParams)
+ termDecl(start, mods, parentToken, parentTParams)
}
def makeCompanionObject(cdef: TypeDef, statics: List[Tree]): Tree =
atPos(cdef.pos) {
+ assert(cdef.pos.exists)
ModuleDef(cdef.name.toTermName,
makeTemplate(List(), statics, List(), false)).withMods((cdef.mods & (Flags.AccessFlags | Flags.JavaDefined)).toTermFlags)
}
- private val wild = Ident(nme.WILDCARD) withPos Position(-1)
- private val wildList = List(wild) // OPT This list is shared for performance.
-
def importCompanionObject(cdef: TypeDef): Tree =
- atPos(cdef.pos) {
- Import(Ident(cdef.name.toTermName), wildList)
- }
+ Import(Ident(cdef.name.toTermName).withPos(NoPosition), Ident(nme.WILDCARD) :: Nil)
// Importing the companion object members cannot be done uncritically: see
// ticket #2377 wherein a class contains two static inner classes, each of which
@@ -631,8 +632,8 @@ object JavaParsers {
}
def importDecl(): List[Tree] = {
+ val start = in.offset
accept(IMPORT)
- val offset = in.offset
val buf = new ListBuffer[Name]
def collectIdents() : Int = {
if (in.token == ASTERISK) {
@@ -655,7 +656,7 @@ object JavaParsers {
accept(SEMI)
val names = buf.toList
if (names.length < 2) {
- syntaxError(offset, "illegal import", skipIt = false)
+ syntaxError(start, "illegal import", skipIt = false)
List()
} else {
val qual = ((Ident(names.head): Tree) /: names.tail.init) (Select(_, _))
@@ -665,7 +666,8 @@ object JavaParsers {
// case nme.WILDCARD => Pair(ident, Ident(null) withPos Position(-1))
// case _ => Pair(ident, ident)
// }
- List(atPos(offset)(Import(qual, List(ident))))
+ val imp = atPos(start) { Import(qual, List(ident)) }
+ imp :: Nil
}
}
@@ -677,9 +679,9 @@ object JavaParsers {
List()
}
- def classDecl(mods: Modifiers): List[Tree] = {
+ def classDecl(start: Offset, mods: Modifiers): List[Tree] = {
accept(CLASS)
- val offset = in.offset
+ val nameOffset = in.offset
val name = identForType()
val tparams = typeParams()
val superclass =
@@ -691,14 +693,15 @@ object JavaParsers {
}
val interfaces = interfacesOpt()
val (statics, body) = typeBody(CLASS, name, tparams)
- addCompanionObject(statics, atPos(offset) {
+ val cls = atPos(start, nameOffset) {
TypeDef(name, makeTemplate(superclass :: interfaces, body, tparams, true)).withMods(mods)
- })
+ }
+ addCompanionObject(statics, cls)
}
- def interfaceDecl(mods: Modifiers): List[Tree] = {
+ def interfaceDecl(start: Offset, mods: Modifiers): List[Tree] = {
accept(INTERFACE)
- val offset = in.offset
+ val nameOffset = in.offset
val name = identForType()
val tparams = typeParams()
val parents =
@@ -709,11 +712,12 @@ object JavaParsers {
List(javaLangObject())
}
val (statics, body) = typeBody(INTERFACE, name, tparams)
- addCompanionObject(statics, atPos(offset) {
+ val iface = atPos(start, nameOffset) {
TypeDef(
name, tparams,
makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.Trait | Flags.JavaInterface | Flags.Abstract)
- })
+ }
+ addCompanionObject(statics, iface)
}
def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = {
@@ -728,7 +732,8 @@ object JavaParsers {
val statics = new ListBuffer[Tree]
val members = new ListBuffer[Tree]
while (in.token != RBRACE && in.token != EOF) {
- var mods = modifiers(inInterface)
+ val start = in.offset
+ var mods = atPos(start) { modifiers(inInterface) }
if (in.token == LBRACE) {
skipAhead() // skip init block, we just assume we have seen only static
accept(RBRACE)
@@ -736,7 +741,7 @@ object JavaParsers {
in.nextToken()
} else {
if (in.token == ENUM || definesInterface(in.token)) mods |= Flags.JavaStatic
- val decls = memberDecl(mods, parentToken, parentTParams)
+ val decls = memberDecl(start, mods, parentToken, parentTParams)
(if ((mods is Flags.JavaStatic) || inInterface && !(decls exists (_.isInstanceOf[DefDef])))
statics
else
@@ -759,10 +764,10 @@ object JavaParsers {
Select(javaLangDot(nme.annotation), tpnme.Annotation),
scalaAnnotationDot(tpnme.ClassfileAnnotation)
)
- def annotationDecl(mods: Modifiers): List[Tree] = {
+ def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = {
accept(AT)
accept(INTERFACE)
- val offset = in.offset
+ val nameOffset = in.offset
val name = identForType()
val (statics, body) = typeBody(AT, name, List())
val constructorParams = body.collect {
@@ -772,14 +777,15 @@ object JavaParsers {
List(), List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined))
val body1 = body.filterNot(_.isInstanceOf[DefDef])
val templ = makeTemplate(annotationParents, constr :: body1, List(), false)
- addCompanionObject(statics, atPos(offset) {
+ val annot = atPos(start, nameOffset) {
TypeDef(name, templ).withMods(mods | Flags.Abstract)
- })
+ }
+ addCompanionObject(statics, annot)
}
- def enumDecl(mods: Modifiers): List[Tree] = {
+ def enumDecl(start: Offset, mods: Modifiers): List[Tree] = {
accept(ENUM)
- val offset = in.offset
+ val nameOffset = in.offset
val name = identForType()
def enumType = Ident(name)
val interfaces = interfacesOpt()
@@ -822,10 +828,11 @@ object JavaParsers {
val superclazz = Apply(TypeApply(
Select(New(javaLangDot(tpnme.Enum)), nme.CONSTRUCTOR), List(enumType)),
List(Literal(Constant(null)),Literal(Constant(0))))
- addCompanionObject(consts ::: statics ::: predefs, atPos(offset) {
+ val enum = atPos(start, nameOffset) {
TypeDef(name, List(),
makeTemplate(superclazz :: interfaces, body, List(), true)).withMods(mods | Flags.Enum)
- })
+ }
+ addCompanionObject(consts ::: statics ::: predefs, enum)
}
def enumConst(enumType: Tree) = {
@@ -846,22 +853,21 @@ object JavaParsers {
}
}
- def typeDecl(mods: Modifiers): List[Tree] = in.token match {
- case ENUM => enumDecl(mods)
- case INTERFACE => interfaceDecl(mods)
- case AT => annotationDecl(mods)
- case CLASS => classDecl(mods)
+ def typeDecl(start: Offset, mods: Modifiers): List[Tree] = in.token match {
+ case ENUM => enumDecl(start, mods)
+ case INTERFACE => interfaceDecl(start, mods)
+ case AT => annotationDecl(start, mods)
+ case CLASS => classDecl(start, mods)
case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree)
}
/** CompilationUnit ::= [package QualId semi] TopStatSeq
*/
def compilationUnit(): Tree = {
- var offset = in.offset
+ val start = in.offset
val pkg: RefTree =
if (in.token == AT || in.token == PACKAGE) {
annotations()
- offset = in.offset
accept(PACKAGE)
val pkg = qualId()
accept(SEMI)
@@ -878,13 +884,15 @@ object JavaParsers {
buf ++= importDecl()
while (in.token != EOF && in.token != RBRACE) {
while (in.token == SEMI) in.nextToken()
- if (in.token != EOF)
- buf ++= typeDecl(modifiers(inInterface = false))
+ if (in.token != EOF) {
+ val start = in.offset
+ val mods = atPos(start) { modifiers(inInterface = false) }
+ buf ++= typeDecl(start, mods)
+ }
}
+ val unit = atPos(start) { PackageDef(pkg, buf.toList) }
accept(EOF)
- atPos(offset) {
- makePackaging(pkg, buf.toList)
- }
+ unit
}
}
}
diff --git a/src/dotty/tools/dotc/parsing/JavaScanners.scala b/src/dotty/tools/dotc/parsing/JavaScanners.scala
index faac8e163..83e16627c 100644
--- a/src/dotty/tools/dotc/parsing/JavaScanners.scala
+++ b/src/dotty/tools/dotc/parsing/JavaScanners.scala
@@ -27,6 +27,7 @@ object JavaScanners {
def nextToken(): Unit = {
if (next.token == EMPTY) {
+ lastOffset = lastCharOffset
fetchToken()
}
else {
diff --git a/src/dotty/tools/dotc/parsing/Parsers.scala b/src/dotty/tools/dotc/parsing/Parsers.scala
index ea9da8db9..507a2e80c 100644
--- a/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -19,13 +19,16 @@ import StdNames._
import util.Positions._
import Constants._
import ScriptParsers._
-import annotation.switch
+import Comments._
+import scala.annotation.{tailrec, switch}
import util.DotClass
import rewrite.Rewrites.patch
object Parsers {
import ast.untpd._
+ import reporting.diagnostic.Message
+ import reporting.diagnostic.messages._
case class OpInfo(operand: Tree, operator: Name, offset: Offset)
@@ -58,19 +61,30 @@ object Parsers {
/* ------------- POSITIONS ------------------------------------------- */
+ /** Positions tree.
+ * If `t` does not have a position yet, set its position to the given one.
+ */
+ def atPos[T <: Positioned](pos: Position)(t: T): T =
+ if (t.pos.isSourceDerived) t else t.withPos(pos)
+
def atPos[T <: Positioned](start: Offset, point: Offset, end: Offset)(t: T): T =
atPos(Position(start, end, point))(t)
+ /** If the last read offset is strictly greater than `start`, position tree
+ * to position spanning from `start` to last read offset, with given point.
+ * If the last offset is less than or equal to start, the tree `t` did not
+ * consume any source for its construction. In this case, don't position it yet,
+ * but wait for its position to be determined by `setChildPositions` when the
+ * parent node is positioned.
+ */
def atPos[T <: Positioned](start: Offset, point: Offset)(t: T): T =
- atPos(start, point, in.lastOffset max start)(t)
+ if (in.lastOffset > start) atPos(start, point, in.lastOffset)(t) else t
def atPos[T <: Positioned](start: Offset)(t: T): T =
atPos(start, start)(t)
- def atPos[T <: Positioned](pos: Position)(t: T): T =
- if (t.pos.isSourceDerived) t else t.withPos(pos)
-
- def tokenRange = Position(in.offset, in.lastCharOffset, in.offset)
+ def nameStart: Offset =
+ if (in.token == BACKQUOTED_IDENT) in.offset + 1 else in.offset
def sourcePos(off: Int = in.offset): SourcePosition =
source atPos Position(off)
@@ -85,7 +99,7 @@ object Parsers {
/** Issue an error at given offset if beyond last error offset
* and update lastErrorOffset.
*/
- def syntaxError(msg: String, offset: Int = in.offset): Unit =
+ def syntaxError(msg: Message, offset: Int = in.offset): Unit =
if (offset > lastErrorOffset) {
syntaxError(msg, Position(offset))
lastErrorOffset = in.offset
@@ -94,7 +108,7 @@ object Parsers {
/** Unconditionally issue an error at given position, without
* updating lastErrorOffset.
*/
- def syntaxError(msg: String, pos: Position): Unit =
+ def syntaxError(msg: Message, pos: Position): Unit =
ctx.error(msg, source atPos pos)
}
@@ -201,20 +215,23 @@ object Parsers {
}
}
- def warning(msg: String, offset: Int = in.offset) =
+ def warning(msg: Message, sourcePos: SourcePosition) =
+ ctx.warning(msg, sourcePos)
+
+ def warning(msg: Message, offset: Int = in.offset) =
ctx.warning(msg, source atPos Position(offset))
- def deprecationWarning(msg: String, offset: Int = in.offset) =
+ def deprecationWarning(msg: Message, offset: Int = in.offset) =
ctx.deprecationWarning(msg, source atPos Position(offset))
/** Issue an error at current offset taht input is incomplete */
- def incompleteInputError(msg: String) =
+ def incompleteInputError(msg: Message) =
ctx.incompleteInputError(msg, source atPos Position(in.offset))
/** If at end of file, issue an incompleteInputError.
* Otherwise issue a syntax error and skip to next safe point.
*/
- def syntaxErrorOrIncomplete(msg: String) =
+ def syntaxErrorOrIncomplete(msg: Message) =
if (in.token == EOF) incompleteInputError(msg)
else {
syntaxError(msg)
@@ -223,10 +240,14 @@ object Parsers {
} // DEBUG
private def expectedMsg(token: Int): String =
- showToken(token) + " expected but " + showToken(in.token) + " found."
+ expectedMessage(showToken(token))
+ private def expectedMessage(what: String): String =
+ s"$what expected but ${showToken(in.token)} found"
/** Consume one token of the specified type, or
* signal an error if it is not there.
+ *
+ * @return The offset at the start of the token to accept
*/
def accept(token: Int): Int = {
val offset = in.offset
@@ -234,7 +255,7 @@ object Parsers {
syntaxErrorOrIncomplete(expectedMsg(token))
}
if (in.token == token) in.nextToken()
- in.offset
+ offset
}
/** semi = nl {nl} | `;'
@@ -259,21 +280,12 @@ object Parsers {
} finally inFunReturnType = saved
}
- private val isScala2Mode =
- ctx.settings.language.value.contains(nme.Scala2.toString)
-
def migrationWarningOrError(msg: String, offset: Int = in.offset) =
- if (isScala2Mode)
+ if (in.isScala2Mode)
ctx.migrationWarning(msg, source atPos Position(offset))
else
syntaxError(msg, offset)
- /** Cannot use ctx.featureEnabled because accessing the context would force too much */
- private def testScala2Mode(msg: String, pos: Position = Position(in.offset)) = {
- if (isScala2Mode) ctx.migrationWarning(msg, source atPos pos)
- isScala2Mode
- }
-
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
/** Convert tree to formal parameter list
@@ -292,7 +304,7 @@ object Parsers {
case Typed(Ident(name), tpt) =>
makeParameter(name.asTermName, tpt, mods) withPos tree.pos
case _ =>
- syntaxError(s"not a legal $expected (${tree.getClass})", tree.pos)
+ syntaxError(s"not a legal $expected", tree.pos)
makeParameter(nme.ERROR, tree, mods)
}
@@ -308,8 +320,6 @@ object Parsers {
tree
}
- def emptyConstructor() = atPos(in.offset) { ast.untpd.emptyConstructor }
-
/* --------------- PLACEHOLDERS ------------------------------------------- */
/** The implicit parameters introduced by `_` in the current expression.
@@ -596,30 +606,30 @@ object Parsers {
}
private def interpolatedString(inPattern: Boolean = false): Tree = atPos(in.offset) {
- val partsBuf = new ListBuffer[Literal]
- val exprBuf = new ListBuffer[Tree]
+ val segmentBuf = new ListBuffer[Tree]
val interpolator = in.name
in.nextToken()
while (in.token == STRINGPART) {
- partsBuf += literal().asInstanceOf[Literal]
- exprBuf += atPos(in.offset) {
- if (in.token == IDENTIFIER)
- termIdent()
- else if (in.token == THIS) {
- in.nextToken()
- This(tpnme.EMPTY)
- }
- else if (in.token == LBRACE)
- if (inPattern) Block(Nil, inBraces(pattern()))
- else expr()
- else {
- syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected")
- EmptyTree
- }
- }
+ segmentBuf += Thicket(
+ literal(),
+ atPos(in.offset) {
+ if (in.token == IDENTIFIER)
+ termIdent()
+ else if (in.token == THIS) {
+ in.nextToken()
+ This(tpnme.EMPTY)
+ }
+ else if (in.token == LBRACE)
+ if (inPattern) Block(Nil, inBraces(pattern()))
+ else expr()
+ else {
+ syntaxErrorOrIncomplete("error in interpolated string: identifier or block expected")
+ EmptyTree
+ }
+ })
}
- if (in.token == STRINGLIT) partsBuf += literal().asInstanceOf[Literal]
- InterpolatedString(interpolator, partsBuf.toList, exprBuf.toList)
+ if (in.token == STRINGLIT) segmentBuf += literal()
+ InterpolatedString(interpolator, segmentBuf.toList)
}
/* ------------- NEW LINES ------------------------------------------------- */
@@ -644,11 +654,24 @@ object Parsers {
}
/* ------------- TYPES ------------------------------------------------------ */
+ /** Same as [[typ]], but if this results in a wildcard it emits a syntax error and
+ * returns a tree for type `Any` instead.
+ */
+ def toplevelTyp(): Tree = {
+ val t = typ()
+ findWildcardType(t) match {
+ case Some(wildcardPos) =>
+ syntaxError("unbound wildcard type", wildcardPos)
+ scalaAny
+ case None => t
+ }
+ }
- /** Type ::= FunArgTypes `=>' Type
- * | InfixType
+ /** Type ::= FunArgTypes `=>' Type
+ * | HkTypeParamClause `->' Type
+ * | InfixType
* FunArgTypes ::= InfixType
- * | `(' [ FunArgType {`,' FunArgType } ] `)'
+ * | `(' [ FunArgType {`,' FunArgType } ] `)'
*/
def typ(): Tree = {
val start = in.offset
@@ -675,6 +698,13 @@ object Parsers {
}
}
}
+ else if (in.token == LBRACKET) {
+ val start = in.offset
+ val tparams = typeParamClause(ParamOwner.TypeParam)
+ if (in.token == ARROW)
+ atPos(start, in.skipToken())(PolyTypeTree(tparams, typ()))
+ else { accept(ARROW); typ() }
+ }
else infixType()
in.token match {
@@ -707,7 +737,7 @@ object Parsers {
def withTypeRest(t: Tree): Tree =
if (in.token == WITH) {
- deprecationWarning("`with' as a type operator has been deprecated; use `&' instead")
+ deprecationWarning(DeprecatedWithOperator())
in.nextToken()
AndTypeTree(t, withType())
}
@@ -718,7 +748,7 @@ object Parsers {
def annotType(): Tree = annotTypeRest(simpleType())
def annotTypeRest(t: Tree): Tree =
- if (in.token == AT) annotTypeRest(atPos(t.pos.start) { Annotated(annot(), t) })
+ if (in.token == AT) annotTypeRest(atPos(t.pos.start) { Annotated(t, annot()) })
else t
/** SimpleType ::= SimpleType TypeArgs
@@ -726,6 +756,7 @@ object Parsers {
* | StableId
* | Path `.' type
* | `(' ArgTypes `)'
+ * | `_' TypeBounds
* | Refinement
* | Literal
*/
@@ -735,6 +766,10 @@ object Parsers {
else if (in.token == LBRACE)
atPos(in.offset) { RefinedTypeTree(EmptyTree, refinement()) }
else if (isSimpleLiteral) { SingletonTypeTree(literal()) }
+ else if (in.token == USCORE) {
+ val start = in.skipToken()
+ typeBounds().withPos(Position(start, in.lastOffset, start))
+ }
else path(thisOK = false, handleSingletonType) match {
case r @ SingletonTypeTree(_) => r
case r => convertToTypeId(r)
@@ -756,28 +791,19 @@ object Parsers {
private def typeProjection(t: Tree): Tree = {
accept(HASH)
val id = typeIdent()
- atPos(t.pos.start, id.pos.start) { SelectFromTypeTree(t, id.name) }
+ atPos(t.pos.start, id.pos.start) { Select(t, id.name) }
}
- /** ArgType ::= Type | `_' TypeBounds
- */
- val argType = () =>
- if (in.token == USCORE) {
- val start = in.skipToken()
- typeBounds().withPos(Position(start, in.offset, start))
- }
- else typ()
-
- /** NamedTypeArg ::= id `=' ArgType
+ /** NamedTypeArg ::= id `=' Type
*/
val namedTypeArg = () => {
val name = ident()
accept(EQUALS)
- NamedArg(name.toTypeName, argType())
+ NamedArg(name.toTypeName, typ())
}
- /** ArgTypes ::= ArgType {`,' ArgType}
- * NamedTypeArg {`,' NamedTypeArg}
+ /** ArgTypes ::= Type {`,' Type}
+ * | NamedTypeArg {`,' NamedTypeArg}
*/
def argTypes(namedOK: Boolean = false) = {
def otherArgs(first: Tree, arg: () => Tree): List[Tree] = {
@@ -790,22 +816,22 @@ object Parsers {
first :: rest
}
if (namedOK && in.token == IDENTIFIER)
- argType() match {
+ typ() match {
case Ident(name) if in.token == EQUALS =>
in.nextToken()
- otherArgs(NamedArg(name, argType()), namedTypeArg)
+ otherArgs(NamedArg(name, typ()), namedTypeArg)
case firstArg =>
if (in.token == EQUALS) println(s"??? $firstArg")
- otherArgs(firstArg, argType)
+ otherArgs(firstArg, typ)
}
- else commaSeparated(argType)
+ else commaSeparated(typ)
}
- /** FunArgType ::= ArgType | `=>' ArgType
+ /** FunArgType ::= Type | `=>' Type
*/
val funArgType = () =>
- if (in.token == ARROW) atPos(in.skipToken()) { ByNameTypeTree(argType()) }
- else argType()
+ if (in.token == ARROW) atPos(in.skipToken()) { ByNameTypeTree(typ()) }
+ else typ()
/** ParamType ::= [`=>'] ParamValueType
*/
@@ -816,14 +842,14 @@ object Parsers {
/** ParamValueType ::= Type [`*']
*/
def paramValueType(): Tree = {
- val t = typ()
+ val t = toplevelTyp()
if (isIdent(nme.raw.STAR)) {
in.nextToken()
atPos(t.pos.start) { PostfixOp(t, nme.raw.STAR) }
} else t
}
- /** TypeArgs ::= `[' ArgType {`,' ArgType} `]'
+ /** TypeArgs ::= `[' Type {`,' Type} `]'
* NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]'
*/
def typeArgs(namedOK: Boolean = false): List[Tree] = inBrackets(argTypes(namedOK))
@@ -838,7 +864,7 @@ object Parsers {
atPos(in.offset) { TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) }
private def bound(tok: Int): Tree =
- if (in.token == tok) { in.nextToken(); typ() }
+ if (in.token == tok) { in.nextToken(); toplevelTyp() }
else EmptyTree
/** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type}
@@ -846,25 +872,26 @@ object Parsers {
def typeParamBounds(pname: TypeName): Tree = {
val t = typeBounds()
val cbs = contextBounds(pname)
- if (cbs.isEmpty) t else atPos(t.pos.start) { ContextBounds(t, cbs) }
+ if (cbs.isEmpty) t
+ else atPos((t.pos union cbs.head.pos).start) { ContextBounds(t, cbs) }
}
def contextBounds(pname: TypeName): List[Tree] = in.token match {
case COLON =>
atPos(in.skipToken) {
- AppliedTypeTree(typ(), Ident(pname))
+ AppliedTypeTree(toplevelTyp(), Ident(pname))
} :: contextBounds(pname)
case VIEWBOUND =>
deprecationWarning("view bounds `<%' are deprecated, use a context bound `:' instead")
atPos(in.skipToken) {
- Function(Ident(pname) :: Nil, typ())
+ Function(Ident(pname) :: Nil, toplevelTyp())
} :: contextBounds(pname)
case _ =>
Nil
}
def typedOpt(): Tree =
- if (in.token == COLON) { in.nextToken(); typ() }
+ if (in.token == COLON) { in.nextToken(); toplevelTyp() }
else TypeTree()
def typeDependingOn(location: Location.Value): Tree =
@@ -872,6 +899,17 @@ object Parsers {
else if (location == Location.InPattern) refinedType()
else infixType()
+ /** Checks whether `t` is a wildcard type.
+ * If it is, returns the [[Position]] where the wildcard occurs.
+ */
+ @tailrec
+ private final def findWildcardType(t: Tree): Option[Position] = t match {
+ case TypeBoundsTree(_, _) => Some(t.pos)
+ case Parens(t1) => findWildcardType(t1)
+ case Annotated(t1, _) => findWildcardType(t1)
+ case _ => None
+ }
+
/* ----------- EXPRESSIONS ------------------------------------------------ */
/** EqualsExpr ::= `=' Expr
@@ -941,7 +979,7 @@ object Parsers {
else
try
if (placeholderParams.isEmpty) t
- else Function(placeholderParams.reverse, t)
+ else new WildcardFunction(placeholderParams.reverse, t)
finally placeholderParams = saved
}
@@ -971,16 +1009,35 @@ object Parsers {
DoWhile(body, cond)
}
case TRY =>
+ val tryOffset = in.offset
atPos(in.skipToken()) {
val body = expr()
- val handler =
+ val (handler, handlerStart) =
if (in.token == CATCH) {
+ val pos = in.offset
in.nextToken()
- expr()
- } else EmptyTree
+ (expr(), pos)
+ } else (EmptyTree, -1)
+
+ handler match {
+ case Block(Nil, EmptyTree) =>
+ assert(handlerStart != -1)
+ syntaxError(
+ new EmptyCatchBlock(body),
+ Position(handlerStart, handler.pos.end)
+ )
+ case _ =>
+ }
+
val finalizer =
- if (handler.isEmpty || in.token == FINALLY) { accept(FINALLY); expr() }
- else EmptyTree
+ if (in.token == FINALLY) { accept(FINALLY); expr() }
+ else {
+ if (handler.isEmpty) warning(
+ EmptyCatchAndFinallyBlock(body),
+ source atPos Position(tryOffset, body.pos.end)
+ )
+ EmptyTree
+ }
ParsedTry(body, handler, finalizer)
}
case THROW =>
@@ -1019,18 +1076,19 @@ object Parsers {
val uscoreStart = in.skipToken()
if (isIdent(nme.raw.STAR)) {
in.nextToken()
- if (in.token != RPAREN) syntaxError("`_*' can be used only for last argument")
+ if (in.token != RPAREN) syntaxError("`_*' can be used only for last argument", uscoreStart)
Typed(t, atPos(uscoreStart) { Ident(tpnme.WILDCARD_STAR) })
} else {
syntaxErrorOrIncomplete("`*' expected"); t
}
case AT if location != Location.InPattern =>
- (t /: annotations()) ((t, annot) => Annotated(annot, t))
+ (t /: annotations())(Annotated)
case _ =>
val tpt = typeDependingOn(location)
if (isWildcard(t) && location != Location.InPattern) {
val vd :: rest = placeholderParams
- placeholderParams = cpy.ValDef(vd)(tpt = tpt) :: rest
+ placeholderParams =
+ cpy.ValDef(vd)(tpt = tpt).withPos(vd.pos union tpt.pos) :: rest
}
Typed(t, tpt)
}
@@ -1109,12 +1167,12 @@ object Parsers {
case NEW =>
canApply = false
val start = in.skipToken()
- val (impl, missingBody) = template(emptyConstructor())
+ val (impl, missingBody) = template(emptyConstructor)
impl.parents match {
case parent :: Nil if missingBody =>
if (parent.isType) ensureApplied(wrapNew(parent)) else parent
case _ =>
- New(impl)
+ New(impl.withPos(Position(start, in.lastOffset)))
}
case _ =>
if (isLiteral) literal()
@@ -1317,15 +1375,15 @@ object Parsers {
*/
val pattern2 = () => infixPattern() match {
case p @ Ident(name) if isVarPattern(p) && in.token == AT =>
- val pos = in.skipToken()
+ val offset = in.skipToken()
// compatibility for Scala2 `x @ _*` syntax
infixPattern() match {
case pt @ Ident(tpnme.WILDCARD_STAR) =>
migrationWarningOrError("The syntax `x @ _*' is no longer supported; use `x : _*' instead", p.pos.start)
- atPos(p.pos.start, pos) { Typed(p, pt) }
+ atPos(p.pos.start, offset) { Typed(p, pt) }
case p =>
- atPos(p.pos.start, pos) { Bind(name, p) }
+ atPos(p.pos.start, offset) { Bind(name, p) }
}
case p @ Ident(tpnme.WILDCARD_STAR) =>
// compatibility for Scala2 `_*` syntax
@@ -1410,6 +1468,7 @@ object Parsers {
case ABSTRACT => Abstract
case FINAL => Final
case IMPLICIT => ImplicitCommon
+ case INLINE => Inline
case LAZY => Lazy
case OVERRIDE => Override
case PRIVATE => Private
@@ -1513,7 +1572,10 @@ object Parsers {
/** Annotation ::= `@' SimpleType {ParArgumentExprs}
*/
def annot() =
- adjustStart(accept(AT)) { ensureApplied(parArgumentExprss(wrapNew(simpleType()))) }
+ adjustStart(accept(AT)) {
+ if (in.token == INLINE) in.token = BACKQUOTED_IDENT // allow for now
+ ensureApplied(parArgumentExprss(wrapNew(simpleType())))
+ }
def annotations(skipNewLines: Boolean = false): List[Tree] = {
if (skipNewLines) newLineOptWhenFollowedBy(AT)
@@ -1540,17 +1602,17 @@ object Parsers {
* TypTypeParam ::= {Annotation} Id [HkTypePamClause] TypeBounds
*
* HkTypeParamClause ::= `[' HkTypeParam {`,' HkTypeParam} `]'
- * HkTypeParam ::= {Annotation} ['+' | `-'] (Id | _') TypeBounds
+ * HkTypeParam ::= {Annotation} ['+' | `-'] (Id[HkTypePamClause] | _') TypeBounds
*/
def typeParamClause(ownerKind: ParamOwner.Value): List[TypeDef] = inBrackets {
def typeParam(): TypeDef = {
val isConcreteOwner = ownerKind == ParamOwner.Class || ownerKind == ParamOwner.Def
- val modStart = in.offset
+ val start = in.offset
var mods = annotsAsMods()
if (ownerKind == ParamOwner.Class) {
mods = modifiers(start = mods)
mods =
- atPos(modStart, in.offset) {
+ atPos(start, in.offset) {
if (in.token == TYPE) {
in.nextToken()
mods | Param | ParamAccessor
@@ -1560,22 +1622,20 @@ object Parsers {
}
}
}
- else mods = atPos(modStart) (mods | Param)
+ else mods = atPos(start) (mods | Param)
if (ownerKind != ParamOwner.Def) {
if (isIdent(nme.raw.PLUS)) mods |= Covariant
else if (isIdent(nme.raw.MINUS)) mods |= Contravariant
if (mods is VarianceFlags) in.nextToken()
}
- atPos(tokenRange) {
+ atPos(start, nameStart) {
val name =
if (isConcreteOwner || in.token != USCORE) ident().toTypeName
else {
in.nextToken()
ctx.freshName(nme.USCORE_PARAM_PREFIX).toTypeName
}
- val hkparams =
- if (ownerKind == ParamOwner.TypeParam) Nil
- else typeParamClauseOpt(ParamOwner.TypeParam)
+ val hkparams = typeParamClauseOpt(ParamOwner.TypeParam)
val bounds =
if (isConcreteOwner) typeParamBounds(name)
else typeBounds()
@@ -1591,23 +1651,24 @@ object Parsers {
/** ClsParamClauses ::= {ClsParamClause} [[nl] `(' `implicit' ClsParams `)']
* ClsParamClause ::= [nl] `(' [ClsParams] ')'
* ClsParams ::= ClsParam {`' ClsParam}
- * ClsParam ::= {Annotation} [{Modifier} (`val' | `var')] id `:' ParamType [`=' Expr]
+ * ClsParam ::= {Annotation} [{Modifier} (`val' | `var') | `inline'] Param
* DefParamClauses ::= {DefParamClause} [[nl] `(' `implicit' DefParams `)']
* DefParamClause ::= [nl] `(' [DefParams] ')'
* DefParams ::= DefParam {`,' DefParam}
- * DefParam ::= {Annotation} id `:' ParamType [`=' Expr]
- */
+ * DefParam ::= {Annotation} [`inline'] Param
+ * Param ::= id `:' ParamType [`=' Expr]
+ */
def paramClauses(owner: Name, ofCaseClass: Boolean = false): List[List[ValDef]] = {
var implicitFlag = EmptyFlags
var firstClauseOfCaseClass = ofCaseClass
var implicitOffset = -1 // use once
def param(): ValDef = {
- val modStart = in.offset
+ val start = in.offset
var mods = annotsAsMods()
if (owner.isTypeName) {
mods = modifiers(start = mods) | ParamAccessor
mods =
- atPos(modStart, in.offset) {
+ atPos(start, in.offset) {
if (in.token == VAL) {
in.nextToken()
mods
@@ -1615,13 +1676,17 @@ object Parsers {
in.nextToken()
addFlag(mods, Mutable)
} else {
- if (!(mods.flags &~ ParamAccessor).isEmpty) syntaxError("`val' or `var' expected")
+ if (!(mods.flags &~ (ParamAccessor | Inline)).isEmpty)
+ syntaxError("`val' or `var' expected")
if (firstClauseOfCaseClass) mods else mods | PrivateLocal
}
}
}
- else mods = atPos(modStart) { mods | Param }
- atPos(tokenRange) {
+ else {
+ if (in.token == INLINE) mods = addModifier(mods)
+ mods = atPos(start) { mods | Param }
+ }
+ atPos(start, nameStart) {
val name = ident()
val tpt =
if (ctx.settings.YmethodInfer.value && owner.isTermName && in.token != COLON) {
@@ -1640,7 +1705,7 @@ object Parsers {
if (in.token == EQUALS) { in.nextToken(); expr() }
else EmptyTree
if (implicitOffset >= 0) {
- mods = mods.withPos(mods.pos.withStart(implicitOffset))
+ mods = mods.withPos(mods.pos.union(Position(implicitOffset, implicitOffset)))
implicitOffset = -1
}
ValDef(name, tpt, default).withMods(addFlag(mods, implicitFlag))
@@ -1731,12 +1796,16 @@ object Parsers {
val from = termIdentOrWildcard()
if (from.name != nme.WILDCARD && in.token == ARROW)
atPos(from.pos.start, in.skipToken()) {
- Pair(from, termIdentOrWildcard())
+ Thicket(from, termIdentOrWildcard())
}
else from
}
- def posMods(start: Int, mods: Modifiers) = atPos(start, in.skipToken())(mods)
+ def posMods(start: Int, mods: Modifiers) = {
+ val mods1 = atPos(start)(mods)
+ in.nextToken()
+ mods1
+ }
/** Def ::= val PatDef
* | var VarDef
@@ -1750,13 +1819,13 @@ object Parsers {
*/
def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match {
case VAL =>
- patDefOrDcl(posMods(start, mods))
+ patDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
case VAR =>
- patDefOrDcl(posMods(start, addFlag(mods, Mutable)))
+ patDefOrDcl(start, posMods(start, addFlag(mods, Mutable)), in.getDocComment(start))
case DEF =>
- defDefOrDcl(posMods(start, mods))
+ defDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
case TYPE =>
- typeDefOrDcl(posMods(start, mods))
+ typeDefOrDcl(start, posMods(start, mods), in.getDocComment(start))
case _ =>
tmplDef(start, mods)
}
@@ -1766,7 +1835,7 @@ object Parsers {
* ValDcl ::= Id {`,' Id} `:' Type
* VarDcl ::= Id {`,' Id} `:' Type
*/
- def patDefOrDcl(mods: Modifiers): Tree = {
+ def patDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(start, nameStart) {
val lhs = commaSeparated(pattern2)
val tpt = typedOpt()
val rhs =
@@ -1780,8 +1849,10 @@ object Parsers {
}
} else EmptyTree
lhs match {
- case (id @ Ident(name: TermName)) :: Nil => cpy.ValDef(id)(name, tpt, rhs).withMods(mods)
- case _ => PatDef(mods, lhs, tpt, rhs)
+ case (id @ Ident(name: TermName)) :: Nil => {
+ ValDef(name, tpt, rhs).withMods(mods).setComment(docstring)
+ } case _ =>
+ PatDef(mods, lhs, tpt, rhs)
}
}
@@ -1790,12 +1861,12 @@ object Parsers {
* DefDcl ::= DefSig `:' Type
* DefSig ::= id [DefTypeParamClause] ParamClauses
*/
- def defDefOrDcl(mods: Modifiers): Tree = atPos(tokenRange) {
+ def defDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = atPos(start, nameStart) {
def scala2ProcedureSyntax(resultTypeStr: String) = {
val toInsert =
if (in.token == LBRACE) s"$resultTypeStr ="
else ": Unit " // trailing space ensures that `def f()def g()` works.
- testScala2Mode(s"Procedure syntax no longer supported; `$toInsert' should be inserted here") && {
+ in.testScala2Mode(s"Procedure syntax no longer supported; `$toInsert' should be inserted here") && {
patch(source, Position(in.lastOffset), toInsert)
true
}
@@ -1831,7 +1902,7 @@ object Parsers {
accept(EQUALS)
expr()
}
- DefDef(name, tparams, vparamss, tpt, rhs).withMods(mods1)
+ DefDef(name, tparams, vparamss, tpt, rhs).withMods(mods1).setComment(docstring)
}
}
@@ -1865,17 +1936,17 @@ object Parsers {
/** TypeDef ::= type Id [TypeParamClause] `=' Type
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
*/
- def typeDefOrDcl(mods: Modifiers): Tree = {
+ def typeDefOrDcl(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): Tree = {
newLinesOpt()
- atPos(tokenRange) {
+ atPos(start, nameStart) {
val name = ident().toTypeName
val tparams = typeParamClauseOpt(ParamOwner.Type)
in.token match {
case EQUALS =>
in.nextToken()
- TypeDef(name, tparams, typ()).withMods(mods)
+ TypeDef(name, tparams, typ()).withMods(mods).setComment(docstring)
case SUPERTYPE | SUBTYPE | SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | EOF =>
- TypeDef(name, tparams, typeBounds()).withMods(mods)
+ TypeDef(name, tparams, typeBounds()).withMods(mods).setComment(docstring)
case _ =>
syntaxErrorOrIncomplete("`=', `>:', or `<:' expected")
EmptyTree
@@ -1886,35 +1957,40 @@ object Parsers {
/** TmplDef ::= ([`case'] `class' | `trait') ClassDef
* | [`case'] `object' ObjectDef
*/
- def tmplDef(start: Int, mods: Modifiers): Tree = in.token match {
- case TRAIT =>
- classDef(posMods(start, addFlag(mods, Trait)))
- case CLASS =>
- classDef(posMods(start, mods))
- case CASECLASS =>
- classDef(posMods(start, mods | Case))
- case OBJECT =>
- objectDef(posMods(start, mods | Module))
- case CASEOBJECT =>
- objectDef(posMods(start, mods | Case | Module))
- case _ =>
- syntaxErrorOrIncomplete("expected start of definition")
- EmptyTree
+ def tmplDef(start: Int, mods: Modifiers): Tree = {
+ val docstring = in.getDocComment(start)
+ in.token match {
+ case TRAIT =>
+ classDef(start, posMods(start, addFlag(mods, Trait)), docstring)
+ case CLASS =>
+ classDef(start, posMods(start, mods), docstring)
+ case CASECLASS =>
+ classDef(start, posMods(start, mods | Case), docstring)
+ case OBJECT =>
+ objectDef(start, posMods(start, mods | Module), docstring)
+ case CASEOBJECT =>
+ objectDef(start, posMods(start, mods | Case | Module), docstring)
+ case _ =>
+ syntaxErrorOrIncomplete("expected start of definition")
+ EmptyTree
+ }
}
/** ClassDef ::= Id [ClsTypeParamClause]
* [ConstrMods] ClsParamClauses TemplateOpt
*/
- def classDef(mods: Modifiers): TypeDef = atPos(tokenRange) {
+ def classDef(start: Offset, mods: Modifiers, docstring: Option[Comment]): TypeDef = atPos(start, nameStart) {
val name = ident().toTypeName
- val constr = atPos(in.offset) {
+ val constr = atPos(in.lastOffset) {
val tparams = typeParamClauseOpt(ParamOwner.Class)
val cmods = constrModsOpt()
val vparamss = paramClauses(name, mods is Case)
+
makeConstructor(tparams, vparamss).withMods(cmods)
}
val templ = templateOpt(constr)
- TypeDef(name, templ).withMods(mods)
+
+ TypeDef(name, templ).withMods(mods).setComment(docstring)
}
/** ConstrMods ::= AccessModifier
@@ -1930,10 +2006,11 @@ object Parsers {
/** ObjectDef ::= Id TemplateOpt
*/
- def objectDef(mods: Modifiers): ModuleDef = {
+ def objectDef(start: Offset, mods: Modifiers, docstring: Option[Comment] = None): ModuleDef = atPos(start, nameStart) {
val name = ident()
- val template = templateOpt(emptyConstructor())
- ModuleDef(name, template).withMods(mods)
+ val template = templateOpt(emptyConstructor)
+
+ ModuleDef(name, template).withMods(mods).setComment(docstring)
}
/* -------- TEMPLATES ------------------------------------------- */
@@ -1970,12 +2047,12 @@ object Parsers {
else {
newLineOptWhenFollowedBy(LBRACE)
if (in.token == LBRACE) template(constr)._1
- else Template(constr, Nil, EmptyValDef, Nil).withPos(constr.pos.toSynthetic)
+ else Template(constr, Nil, EmptyValDef, Nil)
}
/** TemplateBody ::= [nl] `{' TemplateStatSeq `}'
*/
- def templateBodyOpt(constr: DefDef, parents: List[Tree]) = atPos(constr.pos.start) {
+ def templateBodyOpt(constr: DefDef, parents: List[Tree]) = {
val (self, stats) =
if (in.token == LBRACE) templateBody() else (EmptyValDef, Nil)
Template(constr, parents, self, stats)
@@ -1986,7 +2063,7 @@ object Parsers {
if (in.token == WITH) {
syntaxError("early definitions are not supported; use trait parameters instead")
in.nextToken()
- template(emptyConstructor())
+ template(emptyConstructor)
}
r
}
@@ -2021,7 +2098,7 @@ object Parsers {
if (in.token == PACKAGE) {
val start = in.skipToken()
if (in.token == OBJECT)
- stats += objectDef(atPos(start, in.skipToken()) { Modifiers(Package) })
+ stats += objectDef(start, atPos(start, in.skipToken()) { Modifiers(Package) })
else stats += packaging(start)
}
else if (in.token == IMPORT)
@@ -2029,7 +2106,10 @@ object Parsers {
else if (in.token == AT || isTemplateIntro || isModifier)
stats += tmplDef(in.offset, defAnnotsMods(modifierTokens))
else if (!isStatSep) {
- syntaxErrorOrIncomplete("expected class or object definition")
+ if (in.token == CASE)
+ syntaxErrorOrIncomplete("only `case class` or `case object` allowed")
+ else
+ syntaxErrorOrIncomplete("expected class or object definition")
if (mustStartStat) // do parse all definitions even if they are probably local (i.e. a "}" has been forgotten)
defOrDcl(in.offset, defAnnotsMods(modifierTokens))
}
@@ -2119,17 +2199,10 @@ object Parsers {
var exitOnError = false
while (!isStatSeqEnd && in.token != CASE && !exitOnError) {
setLastStatOffset()
- if (in.token == IMPORT) {
+ if (in.token == IMPORT)
stats ++= importClause()
- }
- else if (isExprIntro) {
- val t = expr(Location.InBlock)
- stats += t
- t match {
- case _: Function => return stats.toList
- case _ =>
- }
- }
+ else if (isExprIntro)
+ stats += expr(Location.InBlock)
else if (isDefIntro(localModifierTokens))
if (in.token == IMPLICIT) {
val start = in.skipToken()
@@ -2158,7 +2231,8 @@ object Parsers {
if (in.token == PACKAGE) {
in.nextToken()
if (in.token == OBJECT) {
- ts += objectDef(atPos(start, in.skipToken()) { Modifiers(Package) })
+ val docstring = in.getDocComment(start)
+ ts += objectDef(start, atPos(start, in.skipToken()) { Modifiers(Package) }, docstring)
if (in.token != EOF) {
acceptStatSep()
ts ++= topStatSeq()
diff --git a/src/dotty/tools/dotc/parsing/Scanners.scala b/src/dotty/tools/dotc/parsing/Scanners.scala
index 489038f1e..60003d098 100644
--- a/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -3,7 +3,7 @@ package dotc
package parsing
import core.Names._, core.Contexts._, core.Decorators._, util.Positions._
-import core.StdNames._
+import core.StdNames._, core.Comments._
import util.SourceFile
import java.lang.Character.isDigit
import scala.reflect.internal.Chars._
@@ -12,7 +12,7 @@ import scala.annotation.{ switch, tailrec }
import scala.collection.mutable
import mutable.ListBuffer
import Utility.isNameStart
-
+import rewrite.Rewrites.patch
object Scanners {
@@ -22,10 +22,6 @@ object Scanners {
/** An undefined offset */
val NoOffset: Offset = -1
- case class Comment(pos: Position, chrs: String) {
- def isDocComment = chrs.startsWith("/**")
- }
-
type Token = Int
trait TokenData {
@@ -108,6 +104,7 @@ object Scanners {
target.token = toToken(idx)
}
}
+
def toToken(idx: Int): Token
/** Clear buffer and set string */
@@ -175,18 +172,59 @@ object Scanners {
}
class Scanner(source: SourceFile, override val startFrom: Offset = 0)(implicit ctx: Context) extends ScannerCommon(source)(ctx) {
- var keepComments = false
+ val keepComments = ctx.settings.YkeepComments.value
- /** All comments in the reverse order of their position in the source.
- * set only when `keepComments` is true.
+ /** All doc comments as encountered, each list contains doc comments from
+ * the same block level. Starting with the deepest level and going upward
*/
- var revComments: List[Comment] = Nil
+ private[this] var docsPerBlockStack: List[List[Comment]] = List(Nil)
+
+ /** Adds level of nesting to docstrings */
+ def enterBlock(): Unit =
+ docsPerBlockStack = List(Nil) ::: docsPerBlockStack
+
+ /** Removes level of nesting for docstrings */
+ def exitBlock(): Unit = docsPerBlockStack = docsPerBlockStack match {
+ case x :: Nil => List(Nil)
+ case _ => docsPerBlockStack.tail
+ }
+
+ /** Returns the closest docstring preceding the position supplied */
+ def getDocComment(pos: Int): Option[Comment] = {
+ def closest(c: Comment, docstrings: List[Comment]): Comment = docstrings match {
+ case x :: xs if (c.pos.end < x.pos.end && x.pos.end <= pos) => closest(x, xs)
+ case Nil => c
+ }
+
+ docsPerBlockStack match {
+ case (list @ (x :: xs)) :: _ => {
+ val c = closest(x, xs)
+ docsPerBlockStack = list.dropWhile(_ != c).tail :: docsPerBlockStack.tail
+ Some(c)
+ }
+ case _ => None
+ }
+ }
/** A buffer for comments */
val commentBuf = new StringBuilder
+ private def handleMigration(keyword: Token): Token =
+ if (!isScala2Mode) keyword
+ else if (keyword == INLINE) treatAsIdent()
+ else keyword
+
+
+ private def treatAsIdent() = {
+ testScala2Mode(i"$name is now a keyword, write `$name` instead of $name to keep it as an identifier")
+ patch(source, Position(offset), "`")
+ patch(source, Position(offset + name.length), "`")
+ IDENTIFIER
+ }
+
def toToken(idx: Int): Token =
- if (idx >= 0 && idx <= lastKeywordStart) kwArray(idx) else IDENTIFIER
+ if (idx >= 0 && idx <= lastKeywordStart) handleMigration(kwArray(idx))
+ else IDENTIFIER
private class TokenData0 extends TokenData
@@ -208,6 +246,16 @@ object Scanners {
*/
var sepRegions: List[Token] = List()
+// Scala 2 compatibility
+
+ val isScala2Mode = ctx.settings.language.value.contains(nme.Scala2.toString)
+
+ /** Cannot use ctx.featureEnabled because accessing the context would force too much */
+ def testScala2Mode(msg: String, pos: Position = Position(offset)) = {
+ if (isScala2Mode) ctx.migrationWarning(msg, source atPos pos)
+ isScala2Mode
+ }
+
// Get next token ------------------------------------------------------------
/** Are we directly in a string interpolation expression?
@@ -487,13 +535,13 @@ object Scanners {
case ',' =>
nextChar(); token = COMMA
case '(' =>
- nextChar(); token = LPAREN
+ enterBlock(); nextChar(); token = LPAREN
case '{' =>
- nextChar(); token = LBRACE
+ enterBlock(); nextChar(); token = LBRACE
case ')' =>
- nextChar(); token = RPAREN
+ exitBlock(); nextChar(); token = RPAREN
case '}' =>
- nextChar(); token = RBRACE
+ exitBlock(); nextChar(); token = RBRACE
case '[' =>
nextChar(); token = LBRACKET
case ']' =>
@@ -558,9 +606,12 @@ object Scanners {
def finishComment(): Boolean = {
if (keepComments) {
val pos = Position(start, charOffset, start)
- nextChar()
- revComments = Comment(pos, flushBuf(commentBuf)) :: revComments
+ val comment = Comment(pos, flushBuf(commentBuf))
+
+ if (comment.isDocComment)
+ docsPerBlockStack = (docsPerBlockStack.head :+ comment) :: docsPerBlockStack.tail
}
+
true
}
nextChar()
@@ -892,8 +943,8 @@ object Scanners {
getFraction()
}
} else (ch: @switch) match {
- case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' if base == 10 =>
- getFraction()
+ case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' =>
+ if (base == 10) getFraction()
case 'l' | 'L' =>
nextChar()
token = LONGLIT
diff --git a/src/dotty/tools/dotc/parsing/Tokens.scala b/src/dotty/tools/dotc/parsing/Tokens.scala
index b490cd133..5324207db 100644
--- a/src/dotty/tools/dotc/parsing/Tokens.scala
+++ b/src/dotty/tools/dotc/parsing/Tokens.scala
@@ -91,6 +91,7 @@ abstract class TokensCommon {
//final val LAZY = 59; enter(LAZY, "lazy")
//final val THEN = 60; enter(THEN, "then")
//final val FORSOME = 61; enter(FORSOME, "forSome") // TODO: deprecate
+ //final val INLINE = 62; enter(INLINE, "inline")
/** special symbols */
final val COMMA = 70; enter(COMMA, "','")
@@ -171,6 +172,7 @@ object Tokens extends TokensCommon {
final val LAZY = 59; enter(LAZY, "lazy")
final val THEN = 60; enter(THEN, "then")
final val FORSOME = 61; enter(FORSOME, "forSome") // TODO: deprecate
+ final val INLINE = 62; enter(INLINE, "inline")
/** special symbols */
final val NEWLINE = 78; enter(NEWLINE, "end of statement", "new line")
@@ -188,7 +190,7 @@ object Tokens extends TokensCommon {
/** XML mode */
final val XMLSTART = 96; enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate
- final val alphaKeywords = tokenRange(IF, FORSOME)
+ final val alphaKeywords = tokenRange(IF, INLINE)
final val symbolicKeywords = tokenRange(USCORE, VIEWBOUND)
final val symbolicTokens = tokenRange(COMMA, VIEWBOUND)
final val keywords = alphaKeywords | symbolicKeywords
@@ -214,7 +216,7 @@ object Tokens extends TokensCommon {
final val defIntroTokens = templateIntroTokens | dclIntroTokens
final val localModifierTokens = BitSet(
- ABSTRACT, FINAL, SEALED, IMPLICIT, LAZY)
+ ABSTRACT, FINAL, SEALED, IMPLICIT, INLINE, LAZY)
final val accessModifierTokens = BitSet(
PRIVATE, PROTECTED)
diff --git a/src/dotty/tools/dotc/printing/Disambiguation.scala b/src/dotty/tools/dotc/printing/Disambiguation.scala
deleted file mode 100644
index aa3fae2de..000000000
--- a/src/dotty/tools/dotc/printing/Disambiguation.scala
+++ /dev/null
@@ -1,86 +0,0 @@
-package dotty.tools.dotc
-package printing
-
-import core._
-import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Contexts._
-import collection.mutable
-import scala.annotation.switch
-
-object Disambiguation {
-
- private class State {
- var hasConflicts = false
- val symString = new mutable.HashMap[Symbol, String]
- val variants = new mutable.HashMap[String, mutable.ListBuffer[Symbol]]
- }
-
- def newPrinter: Context => RefinedPrinter = {
- val state = new State
- new Printer(state)(_)
- }
-
- private class Printer(state: State)(_ctx: Context) extends RefinedPrinter(_ctx) {
- import state._
-
- override def simpleNameString(sym: Symbol): String = {
- if ((sym is ModuleClass) && sym.sourceModule.exists) simpleNameString(sym.sourceModule)
- else symString.getOrElse(sym, recordedNameString(sym))
- }
-
- private def rawNameString(sym: Symbol) = super.simpleNameString(sym)
-
- private def recordedNameString(sym: Symbol): String = {
- val str = rawNameString(sym)
- val existing = variants.getOrElse(str, new mutable.ListBuffer[Symbol])
- // Dotty deviation: without a type parameter on ListBuffer, inference
- // will compute ListBuffer[Symbol] | ListBuffer[Nothing] as the type of "existing"
- // and then the assignment to variants below will fail.
- // We need to find a way to avoid such useless inferred types.
- if (!(existing contains sym)) {
- hasConflicts |= existing.nonEmpty
- variants(str) = (existing += sym)
- }
- str
- }
-
- def disambiguated(): Boolean = {
- val res = hasConflicts
- while (hasConflicts) disambiguate()
- res
- }
-
- private def qualifiers: Stream[String] =
- Stream("", "(some other)", "(some 3rd)") ++ (Stream.from(4) map (n => s"(some ${n}th)"))
-
- private def disambiguate(): Unit = {
- def update(sym: Symbol, str: String) = if (!(symString contains sym)) symString(sym) = str
- def disambiguated(sym: Symbol, owner: Symbol) = s"${rawNameString(sym)}(in ${simpleNameString(owner)})"
- hasConflicts = false
- for ((name, vs) <- variants.toList)
- if (vs.tail.nonEmpty) {
- for ((owner, syms) <- vs.groupBy(_.effectiveOwner)) {
- if (syms.tail.isEmpty) update(syms.head, disambiguated(syms.head, owner))
- else
- for {
- (kind, syms1) <- syms.groupBy(kindString)
- (sym, qual) <- syms1 zip qualifiers
- } {
- update(sym, s"$qual$kind ${disambiguated(sym, owner)}")
- }
- }
- }
- }
- }
-
- def disambiguated(op: Context => String)(implicit ctx: Context): String = {
- val dctx = ctx.printer match {
- case dp: Printer => ctx
- case _ => ctx.fresh.setPrinterFn(newPrinter)
- }
- val res = op(dctx)
- dctx.printer match {
- case dp: Printer if dp.disambiguated() => op(dctx)
- case _ => res
- }
- }
-}
diff --git a/src/dotty/tools/dotc/printing/Formatting.scala b/src/dotty/tools/dotc/printing/Formatting.scala
new file mode 100644
index 000000000..e7968b14a
--- /dev/null
+++ b/src/dotty/tools/dotc/printing/Formatting.scala
@@ -0,0 +1,258 @@
+package dotty.tools.dotc
+package printing
+
+import core._
+import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Contexts._
+import collection.mutable
+import collection.Map
+import Decorators._
+import scala.annotation.switch
+import scala.util.control.NonFatal
+import reporting.diagnostic.MessageContainer
+import util.DiffUtil
+import Highlighting._
+import SyntaxHighlighting._
+
+object Formatting {
+
+ /** General purpose string formatter, with the following features:
+ *
+ * 1) On all Showables, `show` is called instead of `toString`
+ * 2) Exceptions raised by a `show` are handled by falling back to `toString`.
+ * 3) Sequences can be formatted using the desired separator between two `%` signs,
+ * eg `i"myList = (${myList}%, %)"`
+ * 4) Safe handling of multi-line margins. Left margins are skipped om the parts
+ * of the string context *before* inserting the arguments. That way, we guard
+ * against accidentally treating an interpolated value as a margin.
+ */
+ class StringFormatter(protected val sc: StringContext) {
+
+ protected def showArg(arg: Any)(implicit ctx: Context): String = arg match {
+ case arg: Showable =>
+ try arg.show(ctx.addMode(Mode.FutureDefsOK))
+ catch {
+ case NonFatal(ex) => s"[cannot display due to $ex, raw string = $toString]"
+ }
+ case _ => arg.toString
+ }
+
+ private def treatArg(arg: Any, suffix: String)(implicit ctx: Context): (Any, String) = arg match {
+ case arg: Seq[_] if suffix.nonEmpty && suffix.head == '%' =>
+ val (rawsep, rest) = suffix.tail.span(_ != '%')
+ val sep = StringContext.treatEscapes(rawsep)
+ if (rest.nonEmpty) (arg.map(showArg).mkString(sep), rest.tail)
+ else (arg, suffix)
+ case _ =>
+ (showArg(arg), suffix)
+ }
+
+ def assemble(args: Seq[Any])(implicit ctx: Context): String = {
+ def isLineBreak(c: Char) = c == '\n' || c == '\f' // compatible with StringLike#isLineBreak
+ def stripTrailingPart(s: String) = {
+ val (pre, post) = s.span(c => !isLineBreak(c))
+ pre ++ post.stripMargin
+ }
+ val (prefix, suffixes) = sc.parts.toList match {
+ case head :: tail => (head.stripMargin, tail map stripTrailingPart)
+ case Nil => ("", Nil)
+ }
+ val (args1, suffixes1) = (args, suffixes).zipped.map(treatArg(_, _)).unzip
+ new StringContext(prefix :: suffixes1.toList: _*).s(args1: _*)
+ }
+ }
+
+ /** The `em` string interpolator works like the `i` string interpolator, but marks nonsensical errors
+ * using `<nonsensical>...</nonsensical>` tags.
+ * Note: Instead of these tags, it would be nicer to return a data structure containing the message string
+ * and a boolean indicating whether the message is sensical, but then we cannot use string operations
+ * like concatenation, stripMargin etc on the values returned by em"...", and in the current error
+ * message composition methods, this is crucial.
+ */
+ class ErrorMessageFormatter(sc: StringContext) extends StringFormatter(sc) {
+ override protected def showArg(arg: Any)(implicit ctx: Context): String =
+ wrapNonSensical(arg, super.showArg(arg))
+ }
+
+ class SyntaxFormatter(sc: StringContext) extends StringFormatter(sc) {
+ override protected def showArg(arg: Any)(implicit ctx: Context): String =
+ arg match {
+ case arg: Showable if ctx.settings.color.value != "never" =>
+ val highlighted =
+ SyntaxHighlighting(wrapNonSensical(arg, super.showArg(arg)))
+ new String(highlighted.toArray)
+ case hl: Highlight =>
+ hl.show
+ case hb: HighlightBuffer =>
+ hb.toString
+ case str: String if ctx.settings.color.value != "never" =>
+ new String(SyntaxHighlighting(str).toArray)
+ case _ => super.showArg(arg)
+ }
+ }
+
+ private def wrapNonSensical(arg: Any /* Type | Symbol */, str: String)(implicit ctx: Context): String = {
+ import MessageContainer._
+ def isSensical(arg: Any): Boolean = arg match {
+ case tpe: Type =>
+ tpe.exists && !tpe.isErroneous
+ case sym: Symbol if sym.isCompleted =>
+ sym.info != ErrorType && sym.info != TypeAlias(ErrorType) && sym.info.exists
+ case _ => true
+ }
+
+ if (isSensical(arg)) str
+ else nonSensicalStartTag + str + nonSensicalEndTag
+ }
+
+ private type Recorded = AnyRef /*Symbol | PolyParam*/
+
+ private class Seen extends mutable.HashMap[String, List[Recorded]] {
+
+ override def default(key: String) = Nil
+
+ def record(str: String, entry: Recorded)(implicit ctx: Context): String = {
+ def followAlias(e1: Recorded): Recorded = e1 match {
+ case e1: Symbol if e1.isAliasType =>
+ val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol
+ if (underlying.name == e1.name) underlying else e1
+ case _ => e1
+ }
+ lazy val dealiased = followAlias(entry)
+ var alts = apply(str).dropWhile(alt => dealiased ne followAlias(alt))
+ if (alts.isEmpty) {
+ alts = entry :: apply(str)
+ update(str, alts)
+ }
+ str + "'" * (alts.length - 1)
+ }
+ }
+
+ private class ExplainingPrinter(seen: Seen)(_ctx: Context) extends RefinedPrinter(_ctx) {
+ override def simpleNameString(sym: Symbol): String =
+ if ((sym is ModuleClass) && sym.sourceModule.exists) simpleNameString(sym.sourceModule)
+ else seen.record(super.simpleNameString(sym), sym)
+
+ override def polyParamNameString(param: PolyParam): String =
+ seen.record(super.polyParamNameString(param), param)
+ }
+
+ /** Create explanation for single `Recorded` type or symbol */
+ def explanation(entry: AnyRef)(implicit ctx: Context): String = {
+ def boundStr(bound: Type, default: ClassSymbol, cmp: String) =
+ if (bound.isRef(default)) "" else i"$cmp $bound"
+
+ def boundsStr(bounds: TypeBounds): String = {
+ val lo = boundStr(bounds.lo, defn.NothingClass, ">:")
+ val hi = boundStr(bounds.hi, defn.AnyClass, "<:")
+ if (lo.isEmpty) hi
+ else if (hi.isEmpty) lo
+ else s"$lo and $hi"
+ }
+
+ def addendum(cat: String, info: Type): String = info match {
+ case bounds @ TypeBounds(lo, hi) if bounds ne TypeBounds.empty =>
+ if (lo eq hi) i" which is an alias of $lo"
+ else i" with $cat ${boundsStr(bounds)}"
+ case _ =>
+ ""
+ }
+
+ entry match {
+ case param: PolyParam =>
+ s"is a type variable${addendum("constraint", ctx.typeComparer.bounds(param))}"
+ case sym: Symbol =>
+ s"is a ${ctx.printer.kindString(sym)}${sym.showExtendedLocation}${addendum("bounds", sym.info)}"
+ }
+ }
+
+ /** Turns a `Seen` into a `String` to produce an explanation for types on the
+ * form `where: T is...`
+ *
+ * @return string disambiguating types
+ */
+ private def explanations(seen: Seen)(implicit ctx: Context): String = {
+ def needsExplanation(entry: Recorded) = entry match {
+ case param: PolyParam => ctx.typerState.constraint.contains(param)
+ case _ => false
+ }
+
+ val toExplain: List[(String, Recorded)] = seen.toList.flatMap {
+ case (str, entry :: Nil) =>
+ if (needsExplanation(entry)) (str, entry) :: Nil else Nil
+ case (str, entries) =>
+ entries.map(alt => (seen.record(str, alt), alt))
+ }.sortBy(_._1)
+
+ def columnar(parts: List[(String, String)]): List[String] = {
+ lazy val maxLen = parts.map(_._1.length).max
+ parts.map {
+ case (leader, trailer) =>
+ val variable = hl"$leader"
+ s"""$variable${" " * (maxLen - leader.length)} $trailer"""
+ }
+ }
+
+ val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry)) }
+ val explainLines = columnar(explainParts)
+ if (explainLines.isEmpty) "" else i"where: $explainLines%\n %\n"
+ }
+
+ /** Context with correct printer set for explanations */
+ private def explainCtx(seen: Seen)(implicit ctx: Context): Context = ctx.printer match {
+ case dp: ExplainingPrinter =>
+ ctx // re-use outer printer and defer explanation to it
+ case _ => ctx.fresh.setPrinterFn(ctx => new ExplainingPrinter(seen)(ctx))
+ }
+
+ /** Entrypoint for explanation string interpolator:
+ *
+ * ```
+ * ex"disambiguate $tpe1 and $tpe2"
+ * ```
+ */
+ def explained2(op: Context => String)(implicit ctx: Context): String = {
+ val seen = new Seen
+ op(explainCtx(seen)) ++ explanations(seen)
+ }
+
+ /** When getting a type mismatch it is useful to disambiguate placeholders like:
+ *
+ * ```
+ * found: List[Int]
+ * required: List[T]
+ * where: T is a type in the initalizer of value s which is an alias of
+ * String
+ * ```
+ *
+ * @return the `where` section as well as the printing context for the
+ * placeholders - `("T is a...", printCtx)`
+ */
+ def disambiguateTypes(args: Type*)(implicit ctx: Context): (String, Context) = {
+ val seen = new Seen
+ val printCtx = explainCtx(seen)
+ args.foreach(_.show(printCtx)) // showing each member will put it into `seen`
+ (explanations(seen), printCtx)
+ }
+
+ /** This method will produce a colored type diff from the given arguments.
+ * The idea is to do this for known cases that are useful and then fall back
+ * on regular syntax highlighting for the cases which are unhandled.
+ *
+ * Please not that if used in combination with `disambiguateTypes` the
+ * correct `Context` for printing should also be passed when calling the
+ * method.
+ *
+ * @return the (found, expected, changePercentage) with coloring to
+ * highlight the difference
+ */
+ def typeDiff(found: Type, expected: Type)(implicit ctx: Context): (String, String) = {
+ val fnd = wrapNonSensical(found, found.show)
+ val exp = wrapNonSensical(expected, expected.show)
+
+ DiffUtil.mkColoredTypeDiff(fnd, exp) match {
+ case _ if ctx.settings.color.value == "never" => (fnd, exp)
+ case (fnd, exp, change) if change < 0.5 => (fnd, exp)
+ case _ => (fnd, exp)
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/printing/Highlighting.scala b/src/dotty/tools/dotc/printing/Highlighting.scala
new file mode 100644
index 000000000..3bda7fb7a
--- /dev/null
+++ b/src/dotty/tools/dotc/printing/Highlighting.scala
@@ -0,0 +1,77 @@
+package dotty.tools
+package dotc
+package printing
+
+import scala.collection.mutable
+import core.Contexts.Context
+
+object Highlighting {
+
+ implicit def highlightShow(h: Highlight)(implicit ctx: Context): String =
+ h.show
+
+ abstract class Highlight(private val highlight: String) {
+ def text: String
+
+ def show(implicit ctx: Context) =
+ if (ctx.settings.color.value == "never") text
+ else highlight + text + Console.RESET
+
+ override def toString =
+ highlight + text + Console.RESET
+
+ def +(other: Highlight)(implicit ctx: Context): HighlightBuffer =
+ new HighlightBuffer(this) + other
+
+ def +(other: String)(implicit ctx: Context): HighlightBuffer =
+ new HighlightBuffer(this) + other
+ }
+
+ abstract class Modifier(private val mod: String, text: String) extends Highlight(Console.RESET) {
+ override def show(implicit ctx: Context) =
+ if (ctx.settings.color.value == "never") ""
+ else mod + super.show
+ }
+
+ case class HighlightBuffer(hl: Highlight)(implicit ctx: Context) {
+ val buffer = new mutable.ListBuffer[String]
+
+ buffer += hl.show
+
+ def +(other: Highlight): HighlightBuffer = {
+ buffer += other.show
+ this
+ }
+
+ def +(other: String): HighlightBuffer = {
+ buffer += other
+ this
+ }
+
+ override def toString =
+ buffer.mkString
+ }
+
+ case class NoColor(text: String) extends Highlight(Console.RESET)
+
+ case class Red(text: String) extends Highlight(Console.RED)
+ case class Blue(text: String) extends Highlight(Console.BLUE)
+ case class Cyan(text: String) extends Highlight(Console.CYAN)
+ case class Black(text: String) extends Highlight(Console.BLACK)
+ case class Green(text: String) extends Highlight(Console.GREEN)
+ case class White(text: String) extends Highlight(Console.WHITE)
+ case class Yellow(text: String) extends Highlight(Console.YELLOW)
+ case class Magenta(text: String) extends Highlight(Console.MAGENTA)
+
+ case class RedB(text: String) extends Highlight(Console.RED_B)
+ case class BlueB(text: String) extends Highlight(Console.BLUE_B)
+ case class CyanB(text: String) extends Highlight(Console.CYAN_B)
+ case class BlackB(text: String) extends Highlight(Console.BLACK_B)
+ case class GreenB(text: String) extends Highlight(Console.GREEN_B)
+ case class WhiteB(text: String) extends Highlight(Console.WHITE_B)
+ case class YellowB(text: String) extends Highlight(Console.YELLOW_B)
+ case class MagentaB(text: String) extends Highlight(Console.MAGENTA_B)
+
+ case class Bold(text: String) extends Modifier(Console.BOLD, text)
+ case class Underlined(text: String) extends Modifier(Console.UNDERLINED, text)
+}
diff --git a/src/dotty/tools/dotc/printing/PlainPrinter.scala b/src/dotty/tools/dotc/printing/PlainPrinter.scala
index 6d026dde7..785f57897 100644
--- a/src/dotty/tools/dotc/printing/PlainPrinter.scala
+++ b/src/dotty/tools/dotc/printing/PlainPrinter.scala
@@ -6,14 +6,16 @@ import Texts._, Types._, Flags._, Names._, Symbols._, NameOps._, Constants._, De
import Contexts.Context, Scopes.Scope, Denotations.Denotation, Annotations.Annotation
import StdNames.{nme, tpnme}
import ast.Trees._, ast._
+import config.Config
import java.lang.Integer.toOctalString
import config.Config.summarizeDepth
-import typer.Mode
import scala.annotation.switch
class PlainPrinter(_ctx: Context) extends Printer {
protected[this] implicit def ctx: Context = _ctx.addMode(Mode.Printing)
+ private var openRecs: List[RecType] = Nil
+
protected def maxToTextRecursions = 100
protected final def controlled(op: => Text): Text =
@@ -49,14 +51,17 @@ class PlainPrinter(_ctx: Context) extends Printer {
homogenize(tp1) & homogenize(tp2)
case OrType(tp1, tp2) =>
homogenize(tp1) | homogenize(tp2)
- case tp @ TypeRef(_, tpnme.hkApply) =>
- val tp1 = tp.reduceProjection
- if (tp1 eq tp) tp else homogenize(tp1)
+ case tp: SkolemType =>
+ homogenize(tp.info)
+ case tp: LazyRef =>
+ homogenize(tp.ref)
case _ =>
tp
}
else tp
+ private def selfRecName(n: Int) = s"z$n"
+
/** Render elements alternating with `sep` string */
protected def toText(elems: Traversable[Showable], sep: String) =
Text(elems map (_ toText this), sep)
@@ -104,12 +109,17 @@ class PlainPrinter(_ctx: Context) extends Printer {
protected def toTextRefinement(rt: RefinedType) =
(refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close
+ protected def argText(arg: Type): Text = arg match {
+ case arg: TypeBounds => "_" ~ toTextGlobal(arg)
+ case _ => toTextGlobal(arg)
+ }
+
/** The longest sequence of refinement types, starting at given type
* and following parents.
*/
private def refinementChain(tp: Type): List[Type] =
tp :: (tp match {
- case RefinedType(parent, _) => refinementChain(parent.stripTypeVar)
+ case tp: RefinedType => refinementChain(tp.parent.stripTypeVar)
case _ => Nil
})
@@ -129,6 +139,12 @@ class PlainPrinter(_ctx: Context) extends Printer {
val parent :: (refined: List[RefinedType @unchecked]) =
refinementChain(tp).reverse
toTextLocal(parent) ~ "{" ~ Text(refined map toTextRefinement, "; ").close ~ "}"
+ case tp: RecType =>
+ try {
+ openRecs = tp :: openRecs
+ "{" ~ selfRecName(openRecs.length) ~ " => " ~ toTextGlobal(tp.parent) ~ "}"
+ }
+ finally openRecs = openRecs.tail
case AndType(tp1, tp2) =>
changePrec(AndPrec) { toText(tp1) ~ " & " ~ toText(tp2) }
case OrType(tp1, tp2) =>
@@ -151,20 +167,21 @@ class PlainPrinter(_ctx: Context) extends Printer {
case tp: ExprType =>
changePrec(GlobalPrec) { "=> " ~ toText(tp.resultType) }
case tp: PolyType =>
- def paramText(name: TypeName, bounds: TypeBounds) =
- toText(polyParamName(name)) ~ polyHash(tp) ~ toText(bounds)
+ def paramText(variance: Int, name: Name, bounds: TypeBounds): Text =
+ varianceString(variance) ~ name.toString ~ toText(bounds)
changePrec(GlobalPrec) {
- "[" ~
- Text((tp.paramNames, tp.paramBounds).zipped map paramText, ", ") ~
- "]" ~ toText(tp.resultType)
+ "[" ~ Text((tp.variances, tp.paramNames, tp.paramBounds).zipped.map(paramText), ", ") ~
+ "] => " ~ toTextGlobal(tp.resultType)
}
- case PolyParam(pt, n) =>
- toText(polyParamName(pt.paramNames(n))) ~ polyHash(pt)
+ case tp: PolyParam =>
+ polyParamNameString(tp) ~ polyHash(tp.binder)
case AnnotatedType(tpe, annot) =>
toTextLocal(tpe) ~ " " ~ toText(annot)
+ case HKApply(tycon, args) =>
+ toTextLocal(tycon) ~ "[" ~ Text(args.map(argText), ", ") ~ "]"
case tp: TypeVar =>
if (tp.isInstantiated)
- toTextLocal(tp.instanceOpt) ~ "'" // debug for now, so that we can see where the TypeVars are.
+ toTextLocal(tp.instanceOpt) ~ "^" // debug for now, so that we can see where the TypeVars are.
else {
val constr = ctx.typerState.constraint
val bounds =
@@ -174,13 +191,15 @@ class PlainPrinter(_ctx: Context) extends Printer {
else toText(tp.origin)
}
case tp: LazyRef =>
- "LazyRef(" ~ toTextGlobal(tp.ref) ~ ")"
+ "LazyRef(" ~ toTextGlobal(tp.ref) ~ ")" // TODO: only print this during debug mode?
case _ =>
tp.fallbackToText(this)
}
}.close
- protected def polyParamName(name: TypeName): TypeName = name
+ protected def polyParamNameString(name: TypeName): String = name.toString
+
+ protected def polyParamNameString(param: PolyParam): String = polyParamNameString(param.binder.paramNames(param.paramNum))
/** The name of the symbol without a unique id. Under refined printing,
* the decoded original name.
@@ -189,7 +208,7 @@ class PlainPrinter(_ctx: Context) extends Printer {
/** If -uniqid is set, the hashcode of the polytype, after a # */
protected def polyHash(pt: PolyType): Text =
- "#" + pt.hashCode provided ctx.settings.uniqid.value
+ if (ctx.settings.uniqid.value) "#" + pt.hashCode else ""
/** If -uniqid is set, the unique id of symbol, after a # */
protected def idString(sym: Symbol): String =
@@ -231,11 +250,12 @@ class PlainPrinter(_ctx: Context) extends Printer {
toText(value)
case MethodParam(mt, idx) =>
nameString(mt.paramNames(idx))
- case tp: RefinedThis =>
- s"${nameString(tp.binder.typeSymbol)}{...}.this"
+ case tp: RecThis =>
+ val idx = openRecs.reverse.indexOf(tp.binder)
+ if (idx >= 0) selfRecName(idx + 1)
+ else "{...}.this" // TODO move underlying type to an addendum, e.g. ... z3 ... where z3: ...
case tp: SkolemType =>
- if (homogenizedView) toText(tp.info)
- else "<unknown instance of type " ~ toTextGlobal(tp.info) ~ ">"
+ if (homogenizedView) toText(tp.info) else tp.repr
}
}
@@ -376,13 +396,33 @@ class PlainPrinter(_ctx: Context) extends Printer {
def locationText(sym: Symbol): Text =
if (!sym.exists) ""
else {
- val owns = sym.effectiveOwner
- if (owns.isClass && !isEmptyPrefix(owns)) " in " ~ toText(owns) else Text()
- }
+ val ownr = sym.effectiveOwner
+ if (ownr.isClass && !isEmptyPrefix(ownr)) " in " ~ toText(ownr) else Text()
+ }
def locatedText(sym: Symbol): Text =
(toText(sym) ~ locationText(sym)).close
+ def extendedLocationText(sym: Symbol): Text =
+ if (!sym.exists) ""
+ else {
+ def recur(ownr: Symbol, innerLocation: String): Text = {
+ def nextOuter(innerKind: String): Text =
+ recur(ownr.effectiveOwner,
+ if (!innerLocation.isEmpty) innerLocation
+ else s" in an anonymous $innerKind")
+ def showLocation(ownr: Symbol, where: String): Text =
+ innerLocation ~ " " ~ where ~ " " ~ toText(ownr)
+ if (ownr.isAnonymousClass) nextOuter("class")
+ else if (ownr.isAnonymousFunction) nextOuter("function")
+ else if (isEmptyPrefix(ownr)) ""
+ else if (ownr.isLocalDummy) showLocation(ownr.owner, "locally defined in")
+ else if (ownr.isTerm && !ownr.is(Module | Method)) showLocation(ownr, "in the initalizer of")
+ else showLocation(ownr, "in")
+ }
+ recur(sym.owner, "")
+ }
+
def toText(denot: Denotation): Text = toText(denot.symbol) ~ "/D"
@switch private def escapedChar(ch: Char): String = ch match {
diff --git a/src/dotty/tools/dotc/printing/Printer.scala b/src/dotty/tools/dotc/printing/Printer.scala
index 360874522..14b63012e 100644
--- a/src/dotty/tools/dotc/printing/Printer.scala
+++ b/src/dotty/tools/dotc/printing/Printer.scala
@@ -68,6 +68,9 @@ abstract class Printer {
/** Textual representation of symbol and its location */
def locatedText(sym: Symbol): Text
+ /** A description of sym's location */
+ def extendedLocationText(sym: Symbol): Text
+
/** Textual representation of denotation */
def toText(denot: Denotation): Text
diff --git a/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/src/dotty/tools/dotc/printing/RefinedPrinter.scala
index e21f12410..6315cfabc 100644
--- a/src/dotty/tools/dotc/printing/RefinedPrinter.scala
+++ b/src/dotty/tools/dotc/printing/RefinedPrinter.scala
@@ -7,11 +7,12 @@ import TypeErasure.ErasedValueType
import Contexts.Context, Scopes.Scope, Denotations._, SymDenotations._, Annotations.Annotation
import StdNames.{nme, tpnme}
import ast.{Trees, untpd, tpd}
-import typer.Namer
+import typer.{Namer, Inliner}
import typer.ProtoTypes.{SelectionProto, ViewProto, FunProto, IgnoredProto, dummyTreeOfType}
import Trees._
import TypeApplications._
import Decorators._
+import config.Config
import scala.annotation.switch
import language.implicitConversions
@@ -19,7 +20,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
/** A stack of enclosing DefDef, TypeDef, or ClassDef, or ModuleDefs nodes */
private var enclosingDef: untpd.Tree = untpd.EmptyTree
- private var lambdaNestingLevel: Int = 0
private var myCtx: Context = _ctx
override protected[this] implicit def ctx: Context = myCtx
@@ -52,6 +52,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
nameString(if (sym is ExpandedTypeParam) name.asTypeName.unexpandedName else name)
}
+ override def fullNameString(sym: Symbol): String =
+ if (isEmptyPrefix(sym.maybeOwner)) nameString(sym)
+ else super.fullNameString(sym)
+
override protected def fullNameOwner(sym: Symbol) = {
val owner = super.fullNameOwner(sym)
if (owner is ModuleClass) owner.sourceModule else owner
@@ -94,20 +98,16 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
override def toText(tp: Type): Text = controlled {
- def argText(arg: Type): Text = arg match {
- case arg: TypeBounds => "_" ~ toTextGlobal(arg)
- case _ => toTextGlobal(arg)
- }
def toTextTuple(args: List[Type]): Text =
- "(" ~ toTextGlobal(args, ", ") ~ ")"
+ "(" ~ Text(args.map(argText), ", ") ~ ")"
def toTextFunction(args: List[Type]): Text =
changePrec(GlobalPrec) {
val argStr: Text =
if (args.length == 2 && !defn.isTupleType(args.head))
- atPrec(InfixPrec) { toText(args.head) }
+ atPrec(InfixPrec) { argText(args.head) }
else
toTextTuple(args.init)
- argStr ~ " => " ~ toText(args.last)
+ argStr ~ " => " ~ argText(args.last)
}
homogenize(tp) match {
case AppliedType(tycon, args) =>
@@ -116,35 +116,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
if (defn.isFunctionClass(cls)) return toTextFunction(args)
if (defn.isTupleClass(cls)) return toTextTuple(args)
return (toTextLocal(tycon) ~ "[" ~ Text(args map argText, ", ") ~ "]").close
- case tp @ TypeLambda(variances, argBoundss, body) =>
- val prefix = ((('X' - 'A') + lambdaNestingLevel) % 26 + 'A').toChar
- val paramNames = variances.indices.toList.map(prefix.toString + _)
- val instantiate = new TypeMap {
- def contains(tp1: Type, tp2: Type): Boolean =
- tp1.eq(tp2) || {
- tp1.stripTypeVar match {
- case RefinedType(parent, _) => contains(parent, tp2)
- case _ => false
- }
- }
- def apply(t: Type): Type = t match {
- case TypeRef(RefinedThis(rt), name) if name.isHkArgName && contains(tp, rt) =>
- // Make up a name that prints as "Xi". Need to be careful we do not
- // accidentally unique-hash to something else. That's why we can't
- // use prefix = NoPrefix or a WithFixedSym instance.
- TypeRef.withSymAndName(
- defn.EmptyPackageClass.thisType, defn.AnyClass,
- paramNames(name.hkArgIndex).toTypeName)
- case _ =>
- mapOver(t)
- }
- }
- val instArgs = argBoundss.map(instantiate).asInstanceOf[List[TypeBounds]]
- val instBody = instantiate(body).dropAlias
- lambdaNestingLevel += 1
- try
- return typeLambdaText(paramNames, variances, instArgs, instBody)
- finally lambdaNestingLevel -=1
case tp: TypeRef =>
val hideType = tp.symbol is AliasPreferred
if (hideType && !ctx.phase.erasedTypes && !tp.symbol.isCompleting) {
@@ -157,8 +128,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
return toText(tp.info)
case ExprType(result) =>
return "=> " ~ toText(result)
- case ErasedValueType(clazz, underlying) =>
- return "ErasedValueType(" ~ toText(clazz.typeRef) ~ ", " ~ toText(underlying) ~ ")"
+ case ErasedValueType(tycon, underlying) =>
+ return "ErasedValueType(" ~ toText(tycon) ~ ", " ~ toText(underlying) ~ ")"
case tp: ClassInfo =>
return toTextParents(tp.parentsWithArgs) ~ "{...}"
case JavaArrayType(elemtp) =>
@@ -182,34 +153,22 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
def blockText[T >: Untyped](trees: List[Tree[T]]): Text =
- "{" ~ toText(trees, "\n") ~ "}"
-
- /** The text for a TypeLambda
- *
- * [v_1 p_1: B_1, ..., v_n p_n: B_n] -> T
- *
- * where
- * @param paramNames = p_1, ..., p_n
- * @param variances = v_1, ..., v_n
- * @param argBoundss = B_1, ..., B_n
- * @param body = T
- */
- def typeLambdaText(paramNames: List[String], variances: List[Int], argBoundss: List[TypeBounds], body: Type): Text = {
- def lambdaParamText(variance: Int, name: String, bounds: TypeBounds): Text =
- varianceString(variance) ~ name ~ toText(bounds)
- changePrec(GlobalPrec) {
- "[" ~ Text((variances, paramNames, argBoundss).zipped.map(lambdaParamText), ", ") ~
- "] -> " ~ toTextGlobal(body)
- }
- }
+ ("{" ~ toText(trees, "\n") ~ "}").close
override def toText[T >: Untyped](tree: Tree[T]): Text = controlled {
import untpd.{modsDeco => _, _}
- /** Print modifiers form symbols if tree has type, overriding the untpd behavior. */
- implicit def modsDeco(mdef: untpd.MemberDef)(implicit ctx: Context): untpd.ModsDeco =
- tpd.modsDeco(mdef.asInstanceOf[tpd.MemberDef]).asInstanceOf[untpd.ModsDeco]
+ /** Print modifiers from symbols if tree has type, overriding the untpd behavior. */
+ implicit def modsDeco(mdef: untpd.MemberDef)(implicit ctx: Context): untpd.ModsDecorator =
+ new untpd.ModsDecorator {
+ def mods = if (mdef.hasType) Modifiers(mdef.symbol) else mdef.rawMods
+ }
+
+ def Modifiers(sym: Symbol)(implicit ctx: Context): Modifiers = untpd.Modifiers(
+ sym.flags & (if (sym.isType) ModifierFlags | VarianceFlags else ModifierFlags),
+ if (sym.privateWithin.exists) sym.privateWithin.asType.name else tpnme.EMPTY,
+ sym.annotations map (_.tree))
def isLocalThis(tree: Tree) = tree.typeOpt match {
case tp: ThisType => tp.cls == ctx.owner.enclosingClass
@@ -264,6 +223,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
Text(mods.annotations.map(annotText), " ") ~~ flagsText ~~ (kw provided !suppressKw)
}
+ def varianceText(mods: untpd.Modifiers) =
+ if (mods is Covariant) "+"
+ else if (mods is Contravariant) "-"
+ else ""
+
def argText(arg: Tree): Text = arg match {
case arg: TypeBoundsTree => "_" ~ toTextGlobal(arg)
case arg: TypeTree =>
@@ -295,6 +259,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
if (vparamss.isEmpty || primaryConstrs.nonEmpty) tparamsTxt
else {
var modsText = modText(constr.mods, "")
+ if (!modsText.isEmpty) modsText = " " ~ modsText
if (constr.mods.hasAnnotations && !constr.mods.hasFlags) modsText = modsText ~~ " this"
withEnclosingDef(constr) { addVparamssText(tparamsTxt ~~ modsText, vparamss) }
}
@@ -308,10 +273,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
def toTextPackageId(pid: Tree): Text =
- if (homogenizedView) toTextLocal(pid.tpe)
+ if (homogenizedView && pid.hasType) toTextLocal(pid.tpe)
else toTextLocal(pid)
- var txt: Text = tree match {
+ def toTextCore(tree: Tree): Text = tree match {
case id: Trees.BackquotedIdent[_] if !homogenizedView =>
"`" ~ toText(id.name) ~ "`"
case Ident(name) =>
@@ -322,7 +287,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
case _ => toText(name)
}
case tree @ Select(qual, name) =>
- toTextLocal(qual) ~ ("." ~ nameIdText(tree) provided name != nme.CONSTRUCTOR)
+ if (qual.isType) toTextLocal(qual) ~ "#" ~ toText(name)
+ else toTextLocal(qual) ~ ("." ~ nameIdText(tree) provided name != nme.CONSTRUCTOR)
case tree: This =>
optDotPrefix(tree) ~ "this" ~ idText(tree)
case Super(qual: This, mix) =>
@@ -345,11 +311,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
"new " ~ {
tpt match {
case tpt: Template => toTextTemplate(tpt, ofNew = true)
- case _ => toTextLocal(tpt)
+ case _ =>
+ if (tpt.hasType)
+ toTextLocal(tpt.typeOpt.underlyingClassRef(refinementOK = false))
+ else
+ toTextLocal(tpt)
}
}
- case Pair(l, r) =>
- "(" ~ toTextGlobal(l) ~ ", " ~ toTextGlobal(r) ~ ")"
case Typed(expr, tpt) =>
changePrec(InfixPrec) { toText(expr) ~ ": " ~ toText(tpt) }
case NamedArg(name, arg) =>
@@ -382,14 +350,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
case SeqLiteral(elems, elemtpt) =>
"[" ~ toTextGlobal(elems, ",") ~ " : " ~ toText(elemtpt) ~ "]"
+ case tree @ Inlined(call, bindings, body) =>
+ if (homogenizedView) toTextCore(Inliner.dropInlined(tree.asInstanceOf[tpd.Inlined]))
+ else "/* inlined from " ~ toText(call) ~ "*/ " ~ blockText(bindings :+ body)
case tpt: untpd.DerivedTypeTree =>
"<derived typetree watching " ~ summarized(toText(tpt.watched)) ~ ">"
case TypeTree(orig) =>
if (tree.hasType) toText(tree.typeOpt) else toText(orig)
case SingletonTypeTree(ref) =>
toTextLocal(ref) ~ ".type"
- case SelectFromTypeTree(qual, name) =>
- toTextLocal(qual) ~ "#" ~ toText(name)
case AndTypeTree(l, r) =>
changePrec(AndPrec) { toText(l) ~ " & " ~ toText(r) }
case OrTypeTree(l, r) =>
@@ -398,6 +367,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
toTextLocal(tpt) ~ " " ~ blockText(refines)
case AppliedTypeTree(tpt, args) =>
toTextLocal(tpt) ~ "[" ~ Text(args map argText, ", ") ~ "]"
+ case PolyTypeTree(tparams, body) =>
+ changePrec(GlobalPrec) {
+ tparamsText(tparams) ~ " -> " ~ toText(body)
+ }
case ByNameTypeTree(tpt) =>
"=> " ~ toTextLocal(tpt)
case TypeBoundsTree(lo, hi) =>
@@ -431,7 +404,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
case tree @ TypeDef(name, rhs) =>
def typeDefText(rhsText: Text) =
dclTextOr {
- modText(tree.mods, "type") ~~ nameIdText(tree) ~
+ modText(tree.mods, "type") ~~ (varianceText(tree.mods) ~ nameIdText(tree)) ~
withEnclosingDef(tree) {
val rhsText1 = if (tree.hasType) toText(tree.symbol.info) else rhsText
tparamsText(tree.tparams) ~ rhsText1
@@ -441,7 +414,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
case impl: Template =>
modText(tree.mods, if ((tree).mods is Trait) "trait" else "class") ~~
nameIdText(tree) ~ withEnclosingDef(tree) { toTextTemplate(impl) } ~
- (if (tree.hasType && ctx.settings.verbose.value) s"[decls = ${tree.symbol.info.decls}]" else "")
+ (if (tree.hasType && ctx.settings.verbose.value) i"[decls = ${tree.symbol.info.decls}]" else "")
case rhs: TypeBoundsTree =>
typeDefText(toText(rhs))
case _ =>
@@ -449,7 +422,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
case Import(expr, selectors) =>
def selectorText(sel: Tree): Text = sel match {
- case Pair(l, r) => toTextGlobal(l) ~ " => " ~ toTextGlobal(r)
+ case Thicket(l :: r :: Nil) => toTextGlobal(l) ~ " => " ~ toTextGlobal(r)
case _ => toTextGlobal(sel)
}
val selectorsText: Text = selectors match {
@@ -467,7 +440,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
"package " ~ toTextPackageId(pid) ~ bodyText
case tree: Template =>
toTextTemplate(tree)
- case Annotated(annot, arg) =>
+ case Annotated(arg, annot) =>
toTextLocal(arg) ~~ annotText(annot)
case EmptyTree =>
"<empty>"
@@ -479,15 +452,13 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
}
case SymbolLit(str) =>
"'" + str
- case InterpolatedString(id, strings, elems) =>
- def interleave(strs: List[Text], elems: List[Text]): Text = ((strs, elems): @unchecked) match {
- case (Nil, Nil) => ""
- case (str :: Nil, Nil) => str
- case (str :: strs1, elem :: elems1) => str ~ elem ~ interleave(strs1, elems1)
+ case InterpolatedString(id, segments) =>
+ def strText(str: Literal) = Str(escapedString(str.const.stringValue))
+ def segmentText(segment: Tree) = segment match {
+ case Thicket(List(str: Literal, expr)) => strText(str) ~ "{" ~ toTextGlobal(expr) ~ "}"
+ case str: Literal => strText(str)
}
- val strTexts = strings map (str => Str(escapedString(str.const.stringValue)))
- val elemsTexts = elems map (elem => "{" ~ toTextGlobal(elem) ~ "}")
- toText(id) ~ "\"" ~ interleave(strTexts, elemsTexts) ~ "\""
+ toText(id) ~ "\"" ~ Text(segments map segmentText, "") ~ "\""
case Function(args, body) =>
var implicitSeen: Boolean = false
def argToText(arg: Tree) = arg match {
@@ -545,13 +516,14 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
case _ =>
tree.fallbackToText(this)
}
+ var txt = toTextCore(tree)
if (ctx.settings.printtypes.value && tree.hasType) {
val tp = tree.typeOpt match {
case tp: TermRef if tree.isInstanceOf[RefTree] && !tp.denot.isOverloaded => tp.underlying
case tp => tp
}
if (tree.isType) txt = toText(tp)
- else if (!tree.isDef) txt = "<" ~ txt ~ ":" ~ toText(tp) ~ ">"
+ else if (!tree.isDef) txt = ("<" ~ txt ~ ":" ~ toText(tp) ~ ">").close
}
if (ctx.settings.Yprintpos.value && !tree.isInstanceOf[WithoutTypeOrPos[_]])
txt = txt ~ "@" ~ tree.pos.toString
@@ -570,8 +542,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
def optText[T >: Untyped](tree: List[Tree[T]])(encl: Text => Text): Text =
if (tree.exists(!_.isEmpty)) encl(blockText(tree)) else ""
- override protected def polyParamName(name: TypeName): TypeName =
- name.unexpandedName
+ override protected def polyParamNameString(name: TypeName): String =
+ name.unexpandedName.toString
override protected def treatAsTypeParam(sym: Symbol): Boolean = sym is TypeParam
@@ -580,7 +552,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
(sym.allOverriddenSymbols exists (_ is TypeParam))
override def toText(sym: Symbol): Text = {
- if (sym.name == nme.IMPORT) {
+ if (sym.isImport) {
def importString(tree: untpd.Tree) = s"import ${tree.show}"
sym.infoOrCompleter match {
case info: Namer#Completer => return importString(info.original)
@@ -588,7 +560,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) {
case _ =>
}
}
- super.toText(sym)
+ if (sym.is(ModuleClass))
+ kindString(sym) ~~ (nameString(sym.name.stripModuleClassSuffix) + idString(sym))
+ else
+ super.toText(sym)
}
override def kindString(sym: Symbol) = {
diff --git a/src/dotty/tools/dotc/printing/Showable.scala b/src/dotty/tools/dotc/printing/Showable.scala
index 37de053cb..efddb26f7 100644
--- a/src/dotty/tools/dotc/printing/Showable.scala
+++ b/src/dotty/tools/dotc/printing/Showable.scala
@@ -21,11 +21,7 @@ trait Showable extends Any {
def fallbackToText(printer: Printer): Text = toString
/** The string representation of this showable element. */
- def show(implicit ctx: Context): String =
- try toText(ctx.printer).show
- catch {
- case NonFatal(ex) => s"[cannot display due to $ex, raw string = $toString]"
- }
+ def show(implicit ctx: Context): String = toText(ctx.printer).show
/** The summarized string representation of this showable element.
* Recursion depth is limited to some smallish value. Default is
diff --git a/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
new file mode 100644
index 000000000..86f34e64d
--- /dev/null
+++ b/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala
@@ -0,0 +1,304 @@
+package dotty.tools
+package dotc
+package printing
+
+import parsing.Tokens._
+import scala.annotation.switch
+import scala.collection.mutable.StringBuilder
+import core.Contexts.Context
+import Highlighting.{Highlight, HighlightBuffer}
+
+/** This object provides functions for syntax highlighting in the REPL */
+object SyntaxHighlighting {
+
+ val NoColor = Console.RESET
+ val CommentColor = Console.BLUE
+ val KeywordColor = Console.YELLOW
+ val ValDefColor = Console.CYAN
+ val LiteralColor = Console.RED
+ val TypeColor = Console.MAGENTA
+ val AnnotationColor = Console.MAGENTA
+
+ private def none(str: String) = str
+ private def keyword(str: String) = KeywordColor + str + NoColor
+ private def typeDef(str: String) = TypeColor + str + NoColor
+ private def literal(str: String) = LiteralColor + str + NoColor
+ private def valDef(str: String) = ValDefColor + str + NoColor
+ private def operator(str: String) = TypeColor + str + NoColor
+ private def annotation(str: String) =
+ if (str.trim == "@") str else AnnotationColor + str + NoColor
+ private val tripleQs = Console.RED_B + "???" + NoColor
+
+ private val keywords: Seq[String] = for {
+ index <- IF to INLINE // All alpha keywords
+ } yield tokenString(index)
+
+ private val interpolationPrefixes =
+ 'A' :: 'B' :: 'C' :: 'D' :: 'E' :: 'F' :: 'G' :: 'H' :: 'I' :: 'J' :: 'K' ::
+ 'L' :: 'M' :: 'N' :: 'O' :: 'P' :: 'Q' :: 'R' :: 'S' :: 'T' :: 'U' :: 'V' ::
+ 'W' :: 'X' :: 'Y' :: 'Z' :: '$' :: '_' :: 'a' :: 'b' :: 'c' :: 'd' :: 'e' ::
+ 'f' :: 'g' :: 'h' :: 'i' :: 'j' :: 'k' :: 'l' :: 'm' :: 'n' :: 'o' :: 'p' ::
+ 'q' :: 'r' :: 's' :: 't' :: 'u' :: 'v' :: 'w' :: 'x' :: 'y' :: 'z' :: Nil
+
+ private val typeEnders =
+ '{' :: '}' :: ')' :: '(' :: '[' :: ']' :: '=' :: ' ' :: ',' :: '.' ::
+ '\n' :: Nil
+
+ def apply(chars: Iterable[Char]): Iterable[Char] = {
+ var prev: Char = 0
+ var remaining = chars.toStream
+ val newBuf = new StringBuilder
+ var lastToken = ""
+
+ @inline def keywordStart =
+ prev == 0 || prev == ' ' || prev == '{' || prev == '(' ||
+ prev == '\n' || prev == '[' || prev == ','
+
+ @inline def numberStart(c: Char) =
+ c.isDigit && (!prev.isLetter || prev == '.' || prev == ' ' || prev == '(' || prev == '\u0000')
+
+ def takeChar(): Char = takeChars(1).head
+ def takeChars(x: Int): Seq[Char] = {
+ val taken = remaining.take(x)
+ remaining = remaining.drop(x)
+ taken
+ }
+
+ while (remaining.nonEmpty) {
+ val n = takeChar()
+ if (interpolationPrefixes.contains(n)) {
+ // Interpolation prefixes are a superset of the keyword start chars
+ val next = remaining.take(3).mkString
+ if (next.startsWith("\"")) {
+ newBuf += n
+ prev = n
+ if (remaining.nonEmpty) takeChar() // drop 1 for appendLiteral
+ appendLiteral('"', next == "\"\"\"")
+ } else {
+ if (n.isUpper && keywordStart) {
+ appendWhile(n, !typeEnders.contains(_), typeDef)
+ } else if (keywordStart) {
+ append(n, keywords.contains(_), { kw =>
+ if (kw == "new") typeDef(kw) else keyword(kw)
+ })
+ } else {
+ newBuf += n
+ prev = n
+ }
+ }
+ } else {
+ (n: @switch) match {
+ case '/' =>
+ if (remaining.nonEmpty) {
+ remaining.head match {
+ case '/' =>
+ takeChar()
+ eolComment()
+ case '*' =>
+ takeChar()
+ blockComment()
+ case x =>
+ newBuf += '/'
+ }
+ } else newBuf += '/'
+ case '=' =>
+ append('=', _ == "=>", operator)
+ case '<' =>
+ append('<', { x => x == "<-" || x == "<:" || x == "<%" }, operator)
+ case '>' =>
+ append('>', { x => x == ">:" }, operator)
+ case '#' =>
+ if (prev != ' ' && prev != '.') newBuf append operator("#")
+ else newBuf += n
+ prev = '#'
+ case '@' =>
+ appendWhile('@', !typeEnders.contains(_), annotation)
+ case '\"' =>
+ appendLiteral('\"', multiline = remaining.take(2).mkString == "\"\"")
+ case '\'' =>
+ appendLiteral('\'')
+ case '`' =>
+ appendTo('`', _ == '`', none)
+ case _ => {
+ if (n == '?' && remaining.take(2).mkString == "??") {
+ takeChars(2)
+ newBuf append tripleQs
+ prev = '?'
+ } else if (n.isUpper && keywordStart)
+ appendWhile(n, !typeEnders.contains(_), typeDef)
+ else if (numberStart(n))
+ appendWhile(n, { x => x.isDigit || x == '.' || x == '\u0000'}, literal)
+ else
+ newBuf += n; prev = n
+ }
+ }
+ }
+ }
+
+ def eolComment() = {
+ newBuf append (CommentColor + "//")
+ var curr = '/'
+ while (curr != '\n' && remaining.nonEmpty) {
+ curr = takeChar()
+ newBuf += curr
+ }
+ prev = curr
+ newBuf append NoColor
+ }
+
+ def blockComment() = {
+ newBuf append (CommentColor + "/*")
+ var curr = '*'
+ var open = 1
+ while (open > 0 && remaining.nonEmpty) {
+ curr = takeChar()
+ newBuf += curr
+
+ if (curr == '*' && remaining.nonEmpty) {
+ curr = takeChar()
+ newBuf += curr
+ if (curr == '/') open -= 1
+ } else if (curr == '/' && remaining.nonEmpty) {
+ curr = takeChar()
+ newBuf += curr
+ if (curr == '*') open += 1
+ }
+ }
+ prev = curr
+ newBuf append NoColor
+ }
+
+ def appendLiteral(delim: Char, multiline: Boolean = false) = {
+ var curr: Char = 0
+ var continue = true
+ var closing = 0
+ val inInterpolation = interpolationPrefixes.contains(prev)
+ newBuf append (LiteralColor + delim)
+
+ def shouldInterpolate =
+ inInterpolation && curr == '$' && prev != '$' && remaining.nonEmpty
+
+ def interpolate() = {
+ val next = takeChar()
+ if (next == '$') {
+ newBuf += curr
+ newBuf += next
+ prev = '$'
+ } else if (next == '{') {
+ var open = 1 // keep track of open blocks
+ newBuf append (ValDefColor + curr)
+ newBuf += next
+ while (remaining.nonEmpty && open > 0) {
+ var c = takeChar()
+ newBuf += c
+ if (c == '}') open -= 1
+ else if (c == '{') open += 1
+ }
+ newBuf append LiteralColor
+ } else {
+ newBuf append (ValDefColor + curr)
+ newBuf += next
+ var c: Char = 'a'
+ while (c.isLetterOrDigit && remaining.nonEmpty) {
+ c = takeChar()
+ if (c != '"') newBuf += c
+ }
+ newBuf append LiteralColor
+ if (c == '"') {
+ newBuf += c
+ continue = false
+ }
+ }
+ closing = 0
+ }
+
+ while (continue && remaining.nonEmpty) {
+ curr = takeChar()
+ if (curr == '\\' && remaining.nonEmpty) {
+ val next = takeChar()
+ newBuf append (KeywordColor + curr)
+ if (next == 'u') {
+ val code = "u" + takeChars(4).mkString
+ newBuf append code
+ } else newBuf += next
+ newBuf append LiteralColor
+ closing = 0
+ } else if (shouldInterpolate) {
+ interpolate()
+ } else if (curr == delim && multiline) {
+ closing += 1
+ if (closing == 3) continue = false
+ newBuf += curr
+ } else if (curr == delim) {
+ continue = false
+ newBuf += curr
+ } else {
+ newBuf += curr
+ closing = 0
+ }
+ }
+ newBuf append NoColor
+ prev = curr
+ }
+
+ def append(c: Char, shouldHL: String => Boolean, highlight: String => String) = {
+ var curr: Char = 0
+ val sb = new StringBuilder(s"$c")
+
+ def delim(c: Char) = (c: @switch) match {
+ case ' ' => true
+ case '\n' => true
+ case '(' => true
+ case '[' => true
+ case ':' => true
+ case '@' => true
+ case _ => false
+ }
+
+ while (remaining.nonEmpty && !delim(curr)) {
+ curr = takeChar()
+ if (!delim(curr)) sb += curr
+ }
+
+ val str = sb.toString
+ val toAdd =
+ if (shouldHL(str))
+ highlight(str)
+ else if (("var" :: "val" :: "def" :: "case" :: Nil).contains(lastToken))
+ valDef(str)
+ else str
+ val suffix = if (delim(curr)) s"$curr" else ""
+ newBuf append (toAdd + suffix)
+ lastToken = str
+ prev = curr
+ }
+
+ def appendWhile(c: Char, pred: Char => Boolean, highlight: String => String) = {
+ var curr: Char = 0
+ val sb = new StringBuilder(s"$c")
+ while (remaining.nonEmpty && pred(curr)) {
+ curr = takeChar()
+ if (pred(curr)) sb += curr
+ }
+
+ val str = sb.toString
+ val suffix = if (!pred(curr)) s"$curr" else ""
+ newBuf append (highlight(str) + suffix)
+ prev = curr
+ }
+
+ def appendTo(c: Char, pred: Char => Boolean, highlight: String => String) = {
+ var curr: Char = 0
+ val sb = new StringBuilder(s"$c")
+ while (remaining.nonEmpty && !pred(curr)) {
+ curr = takeChar()
+ sb += curr
+ }
+
+ newBuf append highlight(sb.toString)
+ prev = curr
+ }
+
+ newBuf.toIterable
+ }
+}
diff --git a/src/dotty/tools/dotc/repl/AmmoniteReader.scala b/src/dotty/tools/dotc/repl/AmmoniteReader.scala
new file mode 100644
index 000000000..f3b68e4b0
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/AmmoniteReader.scala
@@ -0,0 +1,82 @@
+package dotty.tools
+package dotc
+package repl
+
+import core.Contexts._
+import ammonite.terminal._
+import LazyList._
+import Ansi.Color
+import filters._
+import BasicFilters._
+import GUILikeFilters._
+import util.SourceFile
+import printing.SyntaxHighlighting
+
+class AmmoniteReader(val interpreter: Interpreter)(implicit ctx: Context) extends InteractiveReader {
+ val interactive = true
+
+ def incompleteInput(str: String): Boolean =
+ interpreter.delayOutputDuring(interpreter.interpret(str)) match {
+ case Interpreter.Incomplete => true
+ case _ => false
+ }
+
+ val reader = new java.io.InputStreamReader(System.in)
+ val writer = new java.io.OutputStreamWriter(System.out)
+ val cutPasteFilter = ReadlineFilters.CutPasteFilter()
+ var history = List.empty[String]
+ val selectionFilter = GUILikeFilters.SelectionFilter(indent = 2)
+ val multilineFilter: Filter = Filter("multilineFilter") {
+ case TermState(lb ~: rest, b, c, _)
+ if (lb == 10 || lb == 13) && incompleteInput(b.mkString) =>
+ BasicFilters.injectNewLine(b, c, rest, indent = 2)
+ }
+
+ def readLine(prompt: String): String = {
+ val historyFilter = new HistoryFilter(
+ () => history.toVector,
+ Console.BLUE,
+ AnsiNav.resetForegroundColor
+ )
+
+ val allFilters = Filter.merge(
+ UndoFilter(),
+ historyFilter,
+ selectionFilter,
+ GUILikeFilters.altFilter,
+ GUILikeFilters.fnFilter,
+ ReadlineFilters.navFilter,
+ cutPasteFilter,
+ multilineFilter,
+ BasicFilters.all
+ )
+
+ Terminal.readLine(
+ Console.BLUE + prompt + Console.RESET,
+ reader,
+ writer,
+ allFilters,
+ displayTransform = (buffer, cursor) => {
+ val coloredBuffer =
+ if (ctx.useColors) SyntaxHighlighting(buffer)
+ else buffer
+
+ val ansiBuffer = Ansi.Str.parse(coloredBuffer.toVector)
+ val (newBuffer, cursorOffset) = SelectionFilter.mangleBuffer(
+ selectionFilter, ansiBuffer, cursor, Ansi.Reversed.On
+ )
+ val newNewBuffer = HistoryFilter.mangleBuffer(
+ historyFilter, newBuffer, cursor,
+ Ansi.Color.Green
+ )
+
+ (newNewBuffer, cursorOffset)
+ }
+ ) match {
+ case Some(res) =>
+ history = res :: history;
+ res
+ case None => ":q"
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/repl/CompilingInterpreter.scala b/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
index 7d1da1419..5b3669d5e 100644
--- a/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
+++ b/src/dotty/tools/dotc/repl/CompilingInterpreter.scala
@@ -2,7 +2,10 @@ package dotty.tools
package dotc
package repl
-import java.io.{File, PrintWriter, StringWriter, Writer}
+import java.io.{
+ File, PrintWriter, PrintStream, StringWriter, Writer, OutputStream,
+ ByteArrayOutputStream => ByteOutputStream
+}
import java.lang.{Class, ClassLoader}
import java.net.{URL, URLClassLoader}
@@ -24,6 +27,7 @@ import dotty.tools.backend.jvm.GenBCode
import Symbols._, Types._, Contexts._, StdNames._, Names._, NameOps._
import Decorators._
import scala.util.control.NonFatal
+import printing.SyntaxHighlighting
/** An interpreter for Scala code which is based on the `dotc` compiler.
*
@@ -56,10 +60,16 @@ import scala.util.control.NonFatal
* @param ictx The context to use for initialization of the interpreter,
* needed to access the current classpath.
*/
-class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler with Interpreter {
+class CompilingInterpreter(
+ out: PrintWriter,
+ ictx: Context,
+ parentClassLoader: Option[ClassLoader]
+) extends Compiler with Interpreter {
import ast.untpd._
import CompilingInterpreter._
+ ictx.base.initialize()(ictx)
+
/** directory to save .class files to */
val virtualDirectory =
if (ictx.settings.d.isDefault(ictx)) new VirtualDirectory("(memory)", None)
@@ -76,6 +86,25 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
/** whether to print out result lines */
private var printResults: Boolean = true
+ private var delayOutput: Boolean = false
+
+ val previousOutput = ListBuffer.empty[String]
+
+ override def lastOutput() = {
+ val prev = previousOutput.toList
+ previousOutput.clear()
+ prev
+ }
+
+ override def delayOutputDuring[T](operation: => T): T = {
+ val old = delayOutput
+ try {
+ delayOutput = true
+ operation
+ } finally {
+ delayOutput = old
+ }
+ }
/** Temporarily be quiet */
override def beQuietDuring[T](operation: => T): T = {
@@ -88,18 +117,22 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
}
}
- private def newReporter = new ConsoleReporter(Console.in, out) {
- override def printMessage(msg: String) = {
- out.print(/*clean*/(msg) + "\n")
- // Suppress clean for now for compiler messages
- // Otherwise we will completely delete all references to
- // line$object$ module classes. The previous interpreter did not
- // have the project because the module class was written without the final `$'
- // and therefore escaped the purge. We can turn this back on once
- // we drop the final `$' from module classes.
- out.flush()
+ private def newReporter =
+ new ConsoleReporter(Console.in, out) {
+ override def printMessage(msg: String) =
+ if (!delayOutput) {
+ out.print(/*clean*/(msg) + "\n")
+ // Suppress clean for now for compiler messages
+ // Otherwise we will completely delete all references to
+ // line$object$ module classes. The previous interpreter did not
+ // have the project because the module class was written without the final `$'
+ // and therefore escaped the purge. We can turn this back on once
+ // we drop the final `$' from module classes.
+ out.flush()
+ } else {
+ previousOutput += (/*clean*/(msg) + "\n")
+ }
}
- }
/** the previous requests this interpreter has processed */
private val prevRequests = new ArrayBuffer[Request]()
@@ -107,8 +140,6 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
/** the compiler's classpath, as URL's */
val compilerClasspath: List[URL] = ictx.platform.classPath(ictx).asURLs
- protected def parentClassLoader: ClassLoader = classOf[Interpreter].getClassLoader
-
/* A single class loader is used for all commands interpreted by this Interpreter.
It would also be possible to create a new class loader for each command
to interpret. The advantages of the current approach are:
@@ -124,8 +155,10 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
*/
/** class loader used to load compiled code */
val classLoader: ClassLoader = {
- val parent = new URLClassLoader(compilerClasspath.toArray, parentClassLoader)
- new AbstractFileClassLoader(virtualDirectory, parent)
+ lazy val parent = new URLClassLoader(compilerClasspath.toArray,
+ classOf[Interpreter].getClassLoader)
+
+ new AbstractFileClassLoader(virtualDirectory, parentClassLoader.getOrElse(parent))
}
// Set the current Java "context" class loader to this interpreter's class loader
@@ -175,19 +208,23 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
// if (prevRequests.isEmpty)
// new Run(this) // initialize the compiler // (not sure this is needed)
// parse
- parse(indentCode(line)) match {
+ parse(line) match {
case None => Interpreter.Incomplete
case Some(Nil) => Interpreter.Error // parse error or empty input
case Some(tree :: Nil) if tree.isTerm && !tree.isInstanceOf[Assign] =>
+ previousOutput.clear() // clear previous error reporting
interpret(s"val $newVarName =\n$line")
case Some(trees) =>
+ previousOutput.clear() // clear previous error reporting
val req = new Request(line, newLineName)
if (!req.compile())
Interpreter.Error // an error happened during compilation, e.g. a type error
else {
- val (interpreterResultString, succeeded) = req.loadAndRun()
- if (printResults || !succeeded)
- out.print(clean(interpreterResultString))
+ val (resultStrings, succeeded) = req.loadAndRun()
+ if (delayOutput)
+ previousOutput ++= resultStrings.map(clean)
+ else if (printResults || !succeeded)
+ resultStrings.foreach(x => out.print(clean(x)))
if (succeeded) {
prevRequests += req
Interpreter.Success
@@ -197,6 +234,65 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
}
}
+ private def loadAndSetValue(objectName: String, value: AnyRef) = {
+ /** This terrible string is the wrapped class's full name inside the
+ * classloader:
+ * lineX$object$$iw$$iw$list$object
+ */
+ val objName: String = List(
+ currentLineName + INTERPRETER_WRAPPER_SUFFIX,
+ INTERPRETER_IMPORT_WRAPPER,
+ INTERPRETER_IMPORT_WRAPPER,
+ objectName
+ ).mkString("$")
+
+ try {
+ val resObj: Class[_] = Class.forName(objName, true, classLoader)
+ val setMethod = resObj.getDeclaredMethods.find(_.getName == "set")
+
+ setMethod.fold(false) { method =>
+ method.invoke(resObj, value) == null
+ }
+ } catch {
+ case NonFatal(_) =>
+ // Unable to set value on object due to exception during reflection
+ false
+ }
+ }
+
+ /** This bind is implemented by creating an object with a set method and a
+ * field `value`. The value is then set via Java reflection.
+ *
+ * Example: We want to bind a value `List(1,2,3)` to identifier `list` from
+ * sbt. The bind method accomplishes this by creating the following:
+ * {{{
+ * object ContainerObjectWithUniqueID {
+ * var value: List[Int] = _
+ * def set(x: Any) = value = x.asInstanceOf[List[Int]]
+ * }
+ * val list = ContainerObjectWithUniqueID.value
+ * }}}
+ *
+ * Between the object being created and the value being assigned, the value
+ * inside the object is set via reflection.
+ */
+ override def bind(id: String, boundType: String, value: AnyRef)(implicit ctx: Context): Interpreter.Result =
+ interpret(
+ """
+ |object %s {
+ | var value: %s = _
+ | def set(x: Any) = value = x.asInstanceOf[%s]
+ |}
+ """.stripMargin.format(id + INTERPRETER_WRAPPER_SUFFIX, boundType, boundType)
+ ) match {
+ case Interpreter.Success if loadAndSetValue(id + INTERPRETER_WRAPPER_SUFFIX, value) =>
+ val line = "val %s = %s.value".format(id, id + INTERPRETER_WRAPPER_SUFFIX)
+ interpret(line)
+ case Interpreter.Error | Interpreter.Incomplete =>
+ out.println("Set failed in bind(%s, %s, %s)".format(id, boundType, value))
+ Interpreter.Error
+ }
+
/** Trait collecting info about one of the statements of an interpreter request */
private trait StatementInfo {
/** The statement */
@@ -220,9 +316,13 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
/** One line of code submitted by the user for interpretation */
private class Request(val line: String, val lineName: String)(implicit ctx: Context) {
- private val trees = parse(line) match {
- case Some(ts) => ts
- case None => Nil
+ private val trees = {
+ val parsed = parse(line)
+ previousOutput.clear() // clear previous error reporting
+ parsed match {
+ case Some(ts) => ts
+ case None => Nil
+ }
}
/** name to use for the object that will compute "line" */
@@ -234,6 +334,7 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
private def chooseHandler(stat: Tree): StatementHandler = stat match {
case stat: DefDef => new DefHandler(stat)
case stat: ValDef => new ValHandler(stat)
+ case stat: PatDef => new PatHandler(stat)
case stat @ Assign(Ident(_), _) => new AssignHandler(stat)
case stat: ModuleDef => new ModuleHandler(stat)
case stat: TypeDef if stat.isClassDef => new ClassHandler(stat)
@@ -269,9 +370,9 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
private def objectSourceCode: String =
stringFrom { code =>
// header for the wrapper object
- code.println("object " + objectName + " {")
+ code.println(s"object $objectName{")
code.print(importsPreamble)
- code.println(indentCode(toCompute))
+ code.println(toCompute)
handlers.foreach(_.extraCodeToEvaluate(this,code))
code.println(importsTrailer)
//end the wrapper object
@@ -286,9 +387,9 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
from objectSourceCode */
private def resultObjectSourceCode: String =
stringFrom(code => {
- code.println("object " + resultObjectName)
+ code.println(s"object $resultObjectName")
code.println("{ val result: String = {")
- code.println(objectName + accessPath + ";") // evaluate the object, to make sure its constructor is run
+ code.println(s"$objectName$accessPath;") // evaluate the object, to make sure its constructor is run
code.print("(\"\"") // print an initial empty string, so later code can
// uniformly be: + morestuff
handlers.foreach(_.resultExtractionCode(this, code))
@@ -356,24 +457,53 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
names1 ++ names2
}
+ /** Sets both System.{out,err} and Console.{out,err} to supplied
+ * `os: OutputStream`
+ */
+ private def withOutput[T](os: ByteOutputStream)(op: ByteOutputStream => T) = {
+ val ps = new PrintStream(os)
+ val oldOut = System.out
+ val oldErr = System.err
+ System.setOut(ps)
+ System.setErr(ps)
+
+ try {
+ Console.withOut(os)(Console.withErr(os)(op(os)))
+ } finally {
+ System.setOut(oldOut)
+ System.setErr(oldErr)
+ }
+ }
+
/** load and run the code using reflection.
- * @return A pair consisting of the run's result as a string, and
+ * @return A pair consisting of the run's result as a `List[String]`, and
* a boolean indicating whether the run succeeded without throwing
* an exception.
*/
- def loadAndRun(): (String, Boolean) = {
+ def loadAndRun(): (List[String], Boolean) = {
val interpreterResultObject: Class[_] =
Class.forName(resultObjectName, true, classLoader)
- val resultValMethod: java.lang.reflect.Method =
+ val valMethodRes: java.lang.reflect.Method =
interpreterResultObject.getMethod("result")
try {
- (resultValMethod.invoke(interpreterResultObject).toString, true)
+ withOutput(new ByteOutputStream) { ps =>
+ val rawRes = valMethodRes.invoke(interpreterResultObject).toString
+ val res =
+ if (ictx.useColors) new String(SyntaxHighlighting(rawRes).toArray)
+ else rawRes
+ val prints = ps.toString("utf-8")
+ val printList = if (prints != "") prints :: Nil else Nil
+
+ if (!delayOutput) out.print(prints)
+
+ (printList :+ res, true)
+ }
} catch {
case NonFatal(ex) =>
def cause(ex: Throwable): Throwable =
if (ex.getCause eq null) ex else cause(ex.getCause)
val orig = cause(ex)
- (stringFrom(str => orig.printStackTrace(str)), false)
+ (stringFrom(str => orig.printStackTrace(str)) :: Nil, false)
}
}
@@ -477,7 +607,7 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
addWrapper()
if (handler.statement.isInstanceOf[Import])
- preamble.append(handler.statement.toString + ";\n")
+ preamble.append(handler.statement.show + ";\n")
// give wildcard imports a import wrapper all to their own
if (handler.importsWildcard)
@@ -539,29 +669,65 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
private class GenericHandler(statement: Tree) extends StatementHandler(statement)
- private class ValHandler(statement: ValDef) extends StatementHandler(statement) {
- override val boundNames = List(statement.name)
+ private abstract class ValOrPatHandler(statement: Tree)
+ extends StatementHandler(statement) {
+ override val boundNames: List[Name] = _boundNames
override def valAndVarNames = boundNames
override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
- val vname = statement.name
- if (!statement.mods.is(Flags.AccessFlags) &&
- !(isGeneratedVarName(vname.toString) &&
- req.typeOf(vname.encode) == "Unit")) {
- val prettyName = vname.decode
- code.print(" + \"" + prettyName + ": " +
- string2code(req.typeOf(vname)) +
- " = \" + " +
- " (if(" +
- req.fullPath(vname) +
- ".asInstanceOf[AnyRef] != null) " +
- " ((if(" +
- req.fullPath(vname) +
- ".toString().contains('\\n')) " +
- " \"\\n\" else \"\") + " +
- req.fullPath(vname) + ".toString() + \"\\n\") else \"null\\n\") ")
- }
+ if (!shouldShowResult(req)) return
+ val resultExtractors = boundNames.map(name => resultExtractor(req, name))
+ code.print(resultExtractors.mkString(""))
+ }
+
+ private def resultExtractor(req: Request, varName: Name): String = {
+ val prettyName = varName.decode
+ val varType = string2code(req.typeOf(varName))
+ val fullPath = req.fullPath(varName)
+
+ s""" + "$prettyName: $varType = " + {
+ | if ($fullPath.asInstanceOf[AnyRef] != null) {
+ | (if ($fullPath.toString().contains('\\n')) "\\n" else "") +
+ | $fullPath.toString() + "\\n"
+ | } else {
+ | "null\\n"
+ | }
+ |}""".stripMargin
+ }
+
+ protected def _boundNames: List[Name]
+ protected def shouldShowResult(req: Request): Boolean
+ }
+
+ private class ValHandler(statement: ValDef) extends ValOrPatHandler(statement) {
+ override def _boundNames = List(statement.name)
+
+ override def shouldShowResult(req: Request): Boolean =
+ !statement.mods.is(Flags.AccessFlags) &&
+ !(isGeneratedVarName(statement.name.toString) &&
+ req.typeOf(statement.name.encode) == "Unit")
+ }
+
+
+ private class PatHandler(statement: PatDef) extends ValOrPatHandler(statement) {
+ override def _boundNames = statement.pats.flatMap(findVariableNames)
+
+ override def shouldShowResult(req: Request): Boolean =
+ !statement.mods.is(Flags.AccessFlags)
+
+ private def findVariableNames(tree: Tree): List[Name] = tree match {
+ case Ident(name) if name.toString != "_" => List(name)
+ case _ => VariableNameFinder(Nil, tree).reverse
}
+
+ private object VariableNameFinder extends UntypedDeepFolder[List[Name]](
+ (acc: List[Name], t: Tree) => t match {
+ case _: BackquotedIdent => acc
+ case Ident(name) if name.isVariableName && name.toString != "_" => name :: acc
+ case Bind(name, _) if name.isVariableName => name :: acc
+ case _ => acc
+ }
+ )
}
private class DefHandler(defDef: DefDef) extends StatementHandler(defDef) {
@@ -582,12 +748,12 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
override val valAndVarNames = List(helperName)
override def extraCodeToEvaluate(req: Request, code: PrintWriter): Unit = {
- code.println("val " + helperName + " = " + statement.lhs + ";")
+ code.println(i"val $helperName = ${statement.lhs};")
}
/** Print out lhs instead of the generated varName */
override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
- code.print(" + \"" + lhs + ": " +
+ code.print(" + \"" + lhs.show + ": " +
string2code(req.typeOf(helperName.encode)) +
" = \" + " +
string2code(req.fullPath(helperName))
@@ -645,7 +811,7 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
private class ImportHandler(imp: Import) extends StatementHandler(imp) {
override def resultExtractionCode(req: Request, code: PrintWriter): Unit = {
- code.println("+ \"" + imp.toString + "\\n\"")
+ code.println("+ \"" + imp.show + "\\n\"")
}
def isWildcardSelector(tree: Tree) = tree match {
@@ -678,6 +844,9 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
INTERPRETER_LINE_PREFIX + num
}
+ private def currentLineName =
+ INTERPRETER_LINE_PREFIX + (nextLineNo - 1)
+
/** next result variable number to use */
private var nextVarNameNo = 0
@@ -710,7 +879,7 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
val stringWriter = new StringWriter()
val stream = new NewLinePrintWriter(stringWriter)
writer(stream)
- stream.close
+ stream.close()
stringWriter.toString
}
@@ -725,29 +894,15 @@ class CompilingInterpreter(out: PrintWriter, ictx: Context) extends Compiler wit
return str
val trailer = "..."
- if (maxpr >= trailer.length+1)
- return str.substring(0, maxpr-3) + trailer
-
- str.substring(0, maxpr)
+ if (maxpr >= trailer.length-1)
+ str.substring(0, maxpr-3) + trailer + "\n"
+ else
+ str.substring(0, maxpr-1)
}
/** Clean up a string for output */
private def clean(str: String)(implicit ctx: Context) =
truncPrintString(stripWrapperGunk(str))
-
- /** Indent some code by the width of the scala> prompt.
- * This way, compiler error messages read better.
- */
- def indentCode(code: String) = {
- val spaces = " "
-
- stringFrom(str =>
- for (line <- code.lines) {
- str.print(spaces)
- str.print(line + "\n")
- str.flush()
- })
- }
}
/** Utility methods for the Interpreter. */
diff --git a/src/dotty/tools/dotc/repl/InteractiveReader.scala b/src/dotty/tools/dotc/repl/InteractiveReader.scala
index 96c55ebd0..07ce23717 100644
--- a/src/dotty/tools/dotc/repl/InteractiveReader.scala
+++ b/src/dotty/tools/dotc/repl/InteractiveReader.scala
@@ -2,30 +2,19 @@ package dotty.tools
package dotc
package repl
+import dotc.core.Contexts.Context
+
/** Reads lines from an input stream */
trait InteractiveReader {
def readLine(prompt: String): String
val interactive: Boolean
}
-/** TODO Enable jline support.
- * The current Scala REPL know how to do this flexibly.
+/** The current Scala REPL know how to do this flexibly.
*/
object InteractiveReader {
- /** Create an interactive reader. Uses JLine if the
- * library is available, but otherwise uses a
- * SimpleReader. */
- def createDefault(): InteractiveReader = new SimpleReader()
- /*
- {
- try {
- new JLineReader
- } catch {
- case e =>
- //out.println("jline is not available: " + e) //debug
- new SimpleReader()
- }
+ /** Create an interactive reader */
+ def createDefault(in: Interpreter)(implicit ctx: Context): InteractiveReader = {
+ new AmmoniteReader(in)
}
-*/
-
}
diff --git a/src/dotty/tools/dotc/repl/Interpreter.scala b/src/dotty/tools/dotc/repl/Interpreter.scala
index ea587a097..edcc5b153 100644
--- a/src/dotty/tools/dotc/repl/Interpreter.scala
+++ b/src/dotty/tools/dotc/repl/Interpreter.scala
@@ -25,12 +25,21 @@ object Interpreter {
trait Interpreter {
import Interpreter._
- /** Interpret one line of input. All feedback, including parse errors
- * and evaluation results, are printed via the context's reporter.
- * reporter. Values defined are available for future interpreted strings.
- */
+ /** Interpret one line of input. All feedback, including parse errors and
+ * evaluation results, are printed via the context's reporter. Values
+ * defined are available for future interpreted strings.
+ */
def interpret(line: String)(implicit ctx: Context): Result
+ /** Tries to bind an id to a value, returns the outcome of trying to bind */
+ def bind(id: String, boundType: String, value: AnyRef)(implicit ctx: Context): Result
+
/** Suppress output during evaluation of `operation`. */
def beQuietDuring[T](operation: => T): T
+
+ /** Suppresses output and saves it for `lastOutput` to collect */
+ def delayOutputDuring[T](operation: => T): T
+
+ /** Gets the last output not printed immediately */
+ def lastOutput(): Seq[String]
}
diff --git a/src/dotty/tools/dotc/repl/InterpreterLoop.scala b/src/dotty/tools/dotc/repl/InterpreterLoop.scala
index eedec3c82..8b1000f2e 100644
--- a/src/dotty/tools/dotc/repl/InterpreterLoop.scala
+++ b/src/dotty/tools/dotc/repl/InterpreterLoop.scala
@@ -18,16 +18,16 @@ import scala.concurrent.ExecutionContext.Implicits.global
* After instantiation, clients should call the `run` method.
*
* @author Moez A. Abdel-Gawad
- * @author Lex Spoon
+ * @author Lex Spoon
* @author Martin Odersky
*/
-class InterpreterLoop(
- compiler: Compiler,
- private var in: InteractiveReader,
- out: PrintWriter)(implicit ctx: Context) {
+class InterpreterLoop(compiler: Compiler, config: REPL.Config)(implicit ctx: Context) {
+ import config._
val interpreter = compiler.asInstanceOf[Interpreter]
+ private var in = input(interpreter)
+
/** The context class loader at the time this object was created */
protected val originalClassLoader =
Thread.currentThread.getContextClassLoader
@@ -52,38 +52,22 @@ class InterpreterLoop(
/** print a friendly help message */
def printHelp(): Unit = {
printWelcome()
- out.println("Type :load followed by a filename to load a Scala file.")
- out.println("Type :replay to reset execution and replay all previous commands.")
- out.println("Type :quit to exit the interpreter.")
+ output.println("Type :load followed by a filename to load a Scala file.")
+ output.println("Type :replay to reset execution and replay all previous commands.")
+ output.println("Type :quit to exit the interpreter.")
}
/** Print a welcome message */
def printWelcome(): Unit = {
- out.println(s"Welcome to Scala$version " + " (" +
+ output.println(s"Welcome to Scala$version " + " (" +
System.getProperty("java.vm.name") + ", Java " + System.getProperty("java.version") + ")." )
- out.println("Type in expressions to have them evaluated.")
- out.println("Type :help for more information.")
- out.flush()
+ output.println("Type in expressions to have them evaluated.")
+ output.println("Type :help for more information.")
+ output.flush()
}
- /** Prompt to print when awaiting input */
- val prompt = "scala> "
- val continuationPrompt = " | "
-
val version = ".next (pre-alpha)"
- /** The first interpreted command always takes a couple of seconds
- * due to classloading. To bridge the gap, we warm up the interpreter
- * by letting it interpret a dummy line while waiting for the first
- * line of input to be entered.
- */
- def firstLine(): String = {
- val futLine = Future(in.readLine(prompt))
- interpreter.beQuietDuring(
- interpreter.interpret("val theAnswerToLifeInTheUniverseAndEverything = 21 * 2"))
- Await.result(futLine, Duration.Inf)
- }
-
/** The main read-eval-print loop for the interpreter. It calls
* `command()` for each line of input.
*/
@@ -92,42 +76,33 @@ class InterpreterLoop(
val (keepGoing, finalLineOpt) = command(line)
if (keepGoing) {
finalLineOpt.foreach(addReplay)
- out.flush()
+ output.flush()
repl()
}
}
/** interpret all lines from a specified file */
def interpretAllFrom(filename: String): Unit = {
- val fileIn = try {
- new FileReader(filename)
+ import java.nio.file.{Files, Paths}
+ import scala.collection.JavaConversions._
+ try {
+ val lines = Files.readAllLines(Paths.get(filename)).mkString("\n")
+ output.println("Loading " + filename + "...")
+ output.flush
+ interpreter.interpret(lines)
} catch {
case _: IOException =>
- out.println("Error opening file: " + filename)
- return
- }
- val oldIn = in
- val oldReplay = replayCommandsRev
- try {
- val inFile = new BufferedReader(fileIn)
- in = new SimpleReader(inFile, out, false)
- out.println("Loading " + filename + "...")
- out.flush
- repl()
- } finally {
- in = oldIn
- replayCommandsRev = oldReplay
- fileIn.close
+ output.println("Error opening file: " + filename)
}
}
/** create a new interpreter and replay all commands so far */
def replay(): Unit = {
for (cmd <- replayCommands) {
- out.println("Replaying: " + cmd)
- out.flush() // because maybe cmd will have its own output
+ output.println("Replaying: " + cmd)
+ output.flush() // because maybe cmd will have its own output
command(cmd)
- out.println
+ output.println
}
}
@@ -138,12 +113,12 @@ class InterpreterLoop(
def withFile(command: String)(action: String => Unit): Unit = {
val spaceIdx = command.indexOf(' ')
if (spaceIdx <= 0) {
- out.println("That command requires a filename to be specified.")
+ output.println("That command requires a filename to be specified.")
return
}
val filename = command.substring(spaceIdx).trim
if (!new File(filename).exists) {
- out.println("That file does not exist")
+ output.println("That file does not exist")
return
}
action(filename)
@@ -153,6 +128,7 @@ class InterpreterLoop(
val quitRegexp = ":q(u(i(t)?)?)?"
val loadRegexp = ":l(o(a(d)?)?)?.*"
val replayRegexp = ":r(e(p(l(a(y)?)?)?)?)?.*"
+ val lastOutput = interpreter.lastOutput()
var shouldReplay: Option[String] = None
@@ -169,36 +145,38 @@ class InterpreterLoop(
else if (line matches replayRegexp)
replay()
else if (line startsWith ":")
- out.println("Unknown command. Type :help for help.")
+ output.println("Unknown command. Type :help for help.")
else
- shouldReplay = interpretStartingWith(line)
+ shouldReplay = lastOutput match { // don't interpret twice
+ case Nil => interpretStartingWith(line)
+ case oldRes =>
+ oldRes foreach output.print
+ Some(line)
+ }
(true, shouldReplay)
}
+ def silentlyRun(cmds: List[String]): Unit = cmds.foreach { cmd =>
+ interpreter.beQuietDuring(interpreter.interpret(cmd))
+ }
+
+ def silentlyBind(values: Array[(String, Any)]): Unit = values.foreach { case (id, value) =>
+ interpreter.beQuietDuring(
+ interpreter.bind(id, value.asInstanceOf[AnyRef].getClass.getName, value.asInstanceOf[AnyRef]))
+ }
+
/** Interpret expressions starting with the first line.
* Read lines until a complete compilation unit is available
* or until a syntax error has been seen. If a full unit is
* read, go ahead and interpret it. Return the full string
* to be recorded for replay, if any.
*/
- def interpretStartingWith(code: String): Option[String] = {
+ def interpretStartingWith(code: String): Option[String] =
interpreter.interpret(code) match {
case Interpreter.Success => Some(code)
- case Interpreter.Error => None
- case Interpreter.Incomplete =>
- if (in.interactive && code.endsWith("\n\n")) {
- out.println("You typed two blank lines. Starting a new command.")
- None
- } else {
- val nextLine = in.readLine(continuationPrompt)
- if (nextLine == null)
- None // end of file
- else
- interpretStartingWith(code + "\n" + nextLine)
- }
+ case _ => None
}
- }
/*
def loadFiles(settings: Settings) {
settings match {
@@ -207,7 +185,7 @@ class InterpreterLoop(
val cmd = ":load " + filename
command(cmd)
replayCommandsRev = cmd :: replayCommandsRev
- out.println()
+ output.println()
}
case _ =>
}
@@ -218,7 +196,10 @@ class InterpreterLoop(
try {
if (!ctx.reporter.hasErrors) { // if there are already errors, no sense to continue
printWelcome()
- repl(firstLine())
+ silentlyRun(config.initialCommands)
+ silentlyBind(config.boundValues)
+ repl(in.readLine(prompt))
+ silentlyRun(config.cleanupCommands)
}
} finally {
closeInterpreter()
diff --git a/src/dotty/tools/dotc/repl/Main.scala b/src/dotty/tools/dotc/repl/Main.scala
index b2b92299e..48ed3e788 100644
--- a/src/dotty/tools/dotc/repl/Main.scala
+++ b/src/dotty/tools/dotc/repl/Main.scala
@@ -13,7 +13,6 @@ package repl
*
* There are a number of TODOs:
*
- * - re-enable jline support (urgent, easy, see TODO in InteractiveReader.scala)
* - figure out why we can launch REPL only with `java`, not with `scala`.
* - make a doti command (urgent, easy)
* - create or port REPL tests (urgent, intermediate)
@@ -26,4 +25,4 @@ package repl
* - integrate with presentation compiler for command completion (not urgent, hard)
*/
/** The main entry point of the REPL */
-object Main extends REPL \ No newline at end of file
+object Main extends REPL
diff --git a/src/dotty/tools/dotc/repl/REPL.scala b/src/dotty/tools/dotc/repl/REPL.scala
index 2d6a3c742..211e3c931 100644
--- a/src/dotty/tools/dotc/repl/REPL.scala
+++ b/src/dotty/tools/dotc/repl/REPL.scala
@@ -4,7 +4,10 @@ package repl
import core.Contexts.Context
import reporting.Reporter
-import java.io.{BufferedReader, File, FileReader, PrintWriter}
+import io.{AbstractFile, PlainFile, VirtualDirectory}
+import scala.reflect.io.{PlainDirectory, Directory}
+import java.io.{BufferedReader, File => JFile, FileReader, PrintWriter}
+import java.net.{URL, URLClassLoader}
/** A compiler which stays resident between runs.
* Usage:
@@ -23,27 +26,75 @@ import java.io.{BufferedReader, File, FileReader, PrintWriter}
*/
class REPL extends Driver {
- /** The default input reader */
- def input(implicit ctx: Context): InteractiveReader = {
- val emacsShell = System.getProperty("env.emacs", "") != ""
- //println("emacsShell="+emacsShell) //debug
- if (ctx.settings.Xnojline.value || emacsShell) new SimpleReader()
- else InteractiveReader.createDefault()
- }
+ lazy val config = new REPL.Config
- /** The defult output writer */
- def output: PrintWriter = new NewLinePrintWriter(new ConsoleWriter, true)
+ override def setup(args: Array[String], rootCtx: Context): (List[String], Context) = {
+ val (strs, ctx) = super.setup(args, rootCtx)
+ (strs, config.context(ctx))
+ }
override def newCompiler(implicit ctx: Context): Compiler =
- new repl.CompilingInterpreter(output, ctx)
+ new repl.CompilingInterpreter(config.output, ctx, config.classLoader)
override def sourcesRequired = false
override def doCompile(compiler: Compiler, fileNames: List[String])(implicit ctx: Context): Reporter = {
if (fileNames.isEmpty)
- new InterpreterLoop(compiler, input, output).run()
+ new InterpreterLoop(compiler, config).run()
else
ctx.error(s"don't now what to do with $fileNames%, %")
ctx.reporter
}
}
+
+object REPL {
+ class Config {
+ val prompt = "scala> "
+ val continuationPrompt = " "
+ val version = ".next (pre-alpha)"
+
+ def context(ctx: Context): Context = ctx
+
+ /** The first interpreted commands always take a couple of seconds due to
+ * classloading. To bridge the gap, we warm up the interpreter by letting
+ * it interpret at least a dummy line while waiting for the first line of
+ * input to be entered.
+ */
+ val initialCommands: List[String] =
+ "val theAnswerToLifeInTheUniverseAndEverything = 21 * 2" :: Nil
+
+ /** Before exiting, the interpreter will also run the cleanup commands
+ * issued in the variable below. This is useful if your REPL creates
+ * things during its run that should be dealt with before shutdown.
+ */
+ val cleanupCommands: List[String] = Nil
+
+ /** Initial values in the REPL can also be bound from runtime. Override
+ * this variable in the following manner to bind a variable at the start
+ * of the REPL session:
+ *
+ * {{{
+ * override val boundValues = Array("exampleList" -> List(1, 1, 2, 3, 5))
+ * }}}
+ *
+ * This is useful if you've integrated the REPL as part of your project
+ * and already have objects available during runtime that you'd like to
+ * inspect.
+ */
+ val boundValues: Array[(String, Any)] = Array.empty[(String, Any)]
+
+ /** To pass a custom ClassLoader to the Dotty REPL, overwride this value */
+ val classLoader: Option[ClassLoader] = None
+
+ /** The default input reader */
+ def input(in: Interpreter)(implicit ctx: Context): InteractiveReader = {
+ val emacsShell = System.getProperty("env.emacs", "") != ""
+ //println("emacsShell="+emacsShell) //debug
+ if (emacsShell) new SimpleReader()
+ else InteractiveReader.createDefault(in)
+ }
+
+ /** The default output writer */
+ def output: PrintWriter = new NewLinePrintWriter(new ConsoleWriter, true)
+ }
+}
diff --git a/src/dotty/tools/dotc/repl/SimpleReader.scala b/src/dotty/tools/dotc/repl/SimpleReader.scala
index 9fd563382..5fab47bbe 100644
--- a/src/dotty/tools/dotc/repl/SimpleReader.scala
+++ b/src/dotty/tools/dotc/repl/SimpleReader.scala
@@ -3,6 +3,7 @@ package dotc
package repl
import java.io.{BufferedReader, PrintWriter}
+import dotc.core.Contexts.Context
/** Reads using standard JDK API */
diff --git a/src/dotty/tools/dotc/repl/ammonite/Ansi.scala b/src/dotty/tools/dotc/repl/ammonite/Ansi.scala
new file mode 100644
index 000000000..37c4de7b5
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/Ansi.scala
@@ -0,0 +1,256 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+object Ansi {
+
+ /**
+ * Represents a single, atomic ANSI escape sequence that results in a
+ * color, background or decoration being added to the output.
+ *
+ * @param escape the actual ANSI escape sequence corresponding to this Attr
+ */
+ case class Attr private[Ansi](escape: Option[String], resetMask: Int, applyMask: Int) {
+ override def toString = escape.getOrElse("") + Console.RESET
+ def transform(state: Short) = ((state & ~resetMask) | applyMask).toShort
+
+ def matches(state: Short) = (state & resetMask) == applyMask
+ def apply(s: Ansi.Str) = s.overlay(this, 0, s.length)
+ }
+
+ object Attr {
+ val Reset = new Attr(Some(Console.RESET), Short.MaxValue, 0)
+
+ /**
+ * Quickly convert string-colors into [[Ansi.Attr]]s
+ */
+ val ParseMap = {
+ val pairs = for {
+ cat <- categories
+ color <- cat.all
+ str <- color.escape
+ } yield (str, color)
+ (pairs :+ (Console.RESET -> Reset)).toMap
+ }
+ }
+
+ /**
+ * Represents a set of [[Ansi.Attr]]s all occupying the same bit-space
+ * in the state `Short`
+ */
+ sealed abstract class Category() {
+ val mask: Int
+ val all: Seq[Attr]
+ lazy val bitsMap = all.map{ m => m.applyMask -> m}.toMap
+ def makeAttr(s: Option[String], applyMask: Int) = {
+ new Attr(s, mask, applyMask)
+ }
+ }
+
+ object Color extends Category {
+
+ val mask = 15 << 7
+ val Reset = makeAttr(Some("\u001b[39m"), 0 << 7)
+ val Black = makeAttr(Some(Console.BLACK), 1 << 7)
+ val Red = makeAttr(Some(Console.RED), 2 << 7)
+ val Green = makeAttr(Some(Console.GREEN), 3 << 7)
+ val Yellow = makeAttr(Some(Console.YELLOW), 4 << 7)
+ val Blue = makeAttr(Some(Console.BLUE), 5 << 7)
+ val Magenta = makeAttr(Some(Console.MAGENTA), 6 << 7)
+ val Cyan = makeAttr(Some(Console.CYAN), 7 << 7)
+ val White = makeAttr(Some(Console.WHITE), 8 << 7)
+
+ val all = Vector(
+ Reset, Black, Red, Green, Yellow,
+ Blue, Magenta, Cyan, White
+ )
+ }
+
+ object Back extends Category {
+ val mask = 15 << 3
+
+ val Reset = makeAttr(Some("\u001b[49m"), 0 << 3)
+ val Black = makeAttr(Some(Console.BLACK_B), 1 << 3)
+ val Red = makeAttr(Some(Console.RED_B), 2 << 3)
+ val Green = makeAttr(Some(Console.GREEN_B), 3 << 3)
+ val Yellow = makeAttr(Some(Console.YELLOW_B), 4 << 3)
+ val Blue = makeAttr(Some(Console.BLUE_B), 5 << 3)
+ val Magenta = makeAttr(Some(Console.MAGENTA_B), 6 << 3)
+ val Cyan = makeAttr(Some(Console.CYAN_B), 7 << 3)
+ val White = makeAttr(Some(Console.WHITE_B), 8 << 3)
+
+ val all = Seq(
+ Reset, Black, Red, Green, Yellow,
+ Blue, Magenta, Cyan, White
+ )
+ }
+
+ object Bold extends Category {
+ val mask = 1 << 0
+ val On = makeAttr(Some(Console.BOLD), 1 << 0)
+ val Off = makeAttr(None , 0 << 0)
+ val all = Seq(On, Off)
+ }
+
+ object Underlined extends Category {
+ val mask = 1 << 1
+ val On = makeAttr(Some(Console.UNDERLINED), 1 << 1)
+ val Off = makeAttr(None, 0 << 1)
+ val all = Seq(On, Off)
+ }
+
+ object Reversed extends Category {
+ val mask = 1 << 2
+ val On = makeAttr(Some(Console.REVERSED), 1 << 2)
+ val Off = makeAttr(None, 0 << 2)
+ val all = Seq(On, Off)
+ }
+
+ val hardOffMask = Bold.mask | Underlined.mask | Reversed.mask
+ val categories = List(Color, Back, Bold, Underlined, Reversed)
+
+ object Str {
+ @sharable lazy val ansiRegex = "\u001B\\[[;\\d]*m".r
+
+ implicit def parse(raw: CharSequence): Str = {
+ val chars = new Array[Char](raw.length)
+ val colors = new Array[Short](raw.length)
+ var currentIndex = 0
+ var currentColor = 0.toShort
+
+ val matches = ansiRegex.findAllMatchIn(raw)
+ val indices = Seq(0) ++ matches.flatMap { m => Seq(m.start, m.end) } ++ Seq(raw.length)
+
+ for {
+ Seq(start, end) <- indices.sliding(2).toSeq
+ if start != end
+ } {
+ val frag = raw.subSequence(start, end).toString
+ if (frag.charAt(0) == '\u001b' && Attr.ParseMap.contains(frag)) {
+ currentColor = Attr.ParseMap(frag).transform(currentColor)
+ } else {
+ var i = 0
+ while(i < frag.length){
+ chars(currentIndex) = frag(i)
+ colors(currentIndex) = currentColor
+ i += 1
+ currentIndex += 1
+ }
+ }
+ }
+
+ Str(chars.take(currentIndex), colors.take(currentIndex))
+ }
+ }
+
+ /**
+ * An [[Ansi.Str]]'s `color`s array is filled with shorts, each representing
+ * the ANSI state of one character encoded in its bits. Each [[Attr]] belongs
+ * to a [[Category]] that occupies a range of bits within each short:
+ *
+ * 15 14 13 12 11 10 9 8 7 6 5 4 3 2 1 0
+ * |-----------| |--------| |--------| | | |bold
+ * | | | | |reversed
+ * | | | |underlined
+ * | | |foreground-color
+ * | |background-color
+ * |unused
+ *
+ *
+ * The `0000 0000 0000 0000` short corresponds to plain text with no decoration
+ *
+ */
+ type State = Short
+
+ /**
+ * Encapsulates a string with associated ANSI colors and text decorations.
+ *
+ * Contains some basic string methods, as well as some ansi methods to e.g.
+ * apply particular colors or other decorations to particular sections of
+ * the [[Ansi.Str]]. [[render]] flattens it out into a `java.lang.String`
+ * with all the colors present as ANSI escapes.
+ *
+ */
+ case class Str private(chars: Array[Char], colors: Array[State]) {
+ require(chars.length == colors.length)
+
+ def ++(other: Str) = Str(chars ++ other.chars, colors ++ other.colors)
+ def splitAt(index: Int) = {
+ val (leftChars, rightChars) = chars.splitAt(index)
+ val (leftColors, rightColors) = colors.splitAt(index)
+ (new Str(leftChars, leftColors), new Str(rightChars, rightColors))
+ }
+
+ def length = chars.length
+ override def toString = render
+
+ def plainText = new String(chars.toArray)
+ def render = {
+ // Pre-size StringBuilder with approximate size (ansi colors tend
+ // to be about 5 chars long) to avoid re-allocations during growth
+ val output = new StringBuilder(chars.length + colors.length * 5)
+
+
+ var currentState = 0.toShort
+ /**
+ * Emit the ansi escapes necessary to transition
+ * between two states, if necessary.
+ */
+ def emitDiff(nextState: Short) = if (currentState != nextState){
+ // Any of these transitions from 1 to 0 within the hardOffMask
+ // categories cannot be done with a single ansi escape, and need
+ // you to emit a RESET followed by re-building whatever ansi state
+ // you previous had from scratch
+ if ((currentState & ~nextState & hardOffMask) != 0){
+ output.append(Console.RESET)
+ currentState = 0
+ }
+
+ var categoryIndex = 0
+ while(categoryIndex < categories.length){
+ val cat = categories(categoryIndex)
+ if ((cat.mask & currentState) != (cat.mask & nextState)){
+ val attr = cat.bitsMap(nextState & cat.mask)
+
+ if (attr.escape.isDefined) {
+ output.append(attr.escape.get)
+ }
+ }
+ categoryIndex += 1
+ }
+ }
+
+ var i = 0
+ while(i < colors.length){
+ // Emit ANSI escapes to change colors where necessary
+ emitDiff(colors(i))
+ currentState = colors(i)
+ output.append(chars(i))
+ i += 1
+ }
+
+ // Cap off the left-hand-side of the rendered string with any ansi escape
+ // codes necessary to rest the state to 0
+ emitDiff(0)
+ output.toString
+ }
+
+ /**
+ * Overlays the desired color over the specified range of the [[Ansi.Str]].
+ */
+ def overlay(overlayColor: Attr, start: Int, end: Int) = {
+ require(end >= start,
+ s"end:$end must be greater than start:$end in AnsiStr#overlay call"
+ )
+ val colorsOut = new Array[Short](colors.length)
+ var i = 0
+ while(i < colors.length){
+ if (i >= start && i < end) colorsOut(i) = overlayColor.transform(colors(i))
+ else colorsOut(i) = colors(i)
+ i += 1
+ }
+ new Str(chars, colorsOut)
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/Filter.scala b/src/dotty/tools/dotc/repl/ammonite/Filter.scala
new file mode 100644
index 000000000..9d34bb0f2
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/Filter.scala
@@ -0,0 +1,61 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+object Filter {
+ def apply(id: String)(f: PartialFunction[TermInfo, TermAction]): Filter =
+ new Filter {
+ val op = f.lift
+ def identifier = id
+ }
+
+ def wrap(id: String)(f: TermInfo => Option[TermAction]): Filter =
+ new Filter {
+ val op = f
+ def identifier = id
+ }
+
+ /** Merges multiple [[Filter]]s into one. */
+ def merge(pfs: Filter*) = new Filter {
+ val op = (v1: TermInfo) => pfs.iterator.map(_.op(v1)).find(_.isDefined).flatten
+ def identifier = pfs.iterator.map(_.identifier).mkString(":")
+ }
+
+ val empty = Filter.merge()
+}
+
+/**
+ * The way you configure your terminal behavior; a trivial wrapper around a
+ * function, though you should provide a good `.toString` method to make
+ * debugging easier. The [[TermInfo]] and [[TermAction]] types are its
+ * interface to the terminal.
+ *
+ * [[Filter]]s are composed sequentially: if a filter returns `None` the next
+ * filter is tried, while if a filter returns `Some` that ends the cascade.
+ * While your `op` function interacts with the terminal purely through
+ * immutable case classes, the Filter itself is free to maintain its own state
+ * and mutate it whenever, even when returning `None` to continue the cascade.
+ */
+trait Filter {
+ val op: TermInfo => Option[TermAction]
+
+ /**
+ * the `.toString` of this object, except by making it separate we force
+ * the implementer to provide something and stop them from accidentally
+ * leaving it as the meaningless default.
+ */
+ def identifier: String
+ override def toString = identifier
+}
+
+/**
+ * A filter as an abstract class, letting you provide a [[filter]] instead of
+ * an `op`, automatically providing a good `.toString` for debugging, and
+ * providing a reasonable "place" inside the inheriting class/object to put
+ * state or helpers or other logic associated with the filter.
+ */
+abstract class DelegateFilter() extends Filter {
+ def filter: Filter
+ val op = filter.op
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/FilterTools.scala b/src/dotty/tools/dotc/repl/ammonite/FilterTools.scala
new file mode 100644
index 000000000..c18b6a927
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/FilterTools.scala
@@ -0,0 +1,80 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+/**
+ * A collection of helpers that to simpify the common case of building filters
+ */
+object FilterTools {
+ val ansiRegex = "\u001B\\[[;\\d]*."
+
+ def offsetIndex(buffer: Vector[Char], in: Int) = {
+ var splitIndex = 0
+ var length = 0
+
+ while(length < in) {
+ ansiRegex.r.findPrefixOf(buffer.drop(splitIndex)) match {
+ case None =>
+ splitIndex += 1
+ length += 1
+ case Some(s) =>
+ splitIndex += s.length
+ }
+ }
+ splitIndex
+ }
+
+ /**
+ * Shorthand to construct a filter in the common case where you're
+ * switching on the prefix of the input stream and want to run some
+ * transformation on the buffer/cursor
+ */
+ def Case(s: String)
+ (f: (Vector[Char], Int, TermInfo) => (Vector[Char], Int)) = new Filter {
+ val op = new PartialFunction[TermInfo, TermAction] {
+ def isDefinedAt(x: TermInfo) = {
+
+ def rec(i: Int, c: LazyList[Int]): Boolean = {
+ if (i >= s.length) true
+ else if (c.head == s(i)) rec(i + 1, c.tail)
+ else false
+ }
+ rec(0, x.ts.inputs)
+ }
+
+ def apply(v1: TermInfo) = {
+ val (buffer1, cursor1) = f(v1.ts.buffer, v1.ts.cursor, v1)
+ TermState(
+ v1.ts.inputs.dropPrefix(s.map(_.toInt)).get,
+ buffer1,
+ cursor1
+ )
+ }
+
+ }.lift
+ def identifier = "Case"
+ }
+
+ /** Shorthand for pattern matching on [[TermState]] */
+ val TS = TermState
+
+ def findChunks(b: Vector[Char], c: Int) = {
+ val chunks = Terminal.splitBuffer(b)
+ // The index of the first character in each chunk
+ val chunkStarts = chunks.inits.map(x => x.length + x.sum).toStream.reverse
+ // Index of the current chunk that contains the cursor
+ val chunkIndex = chunkStarts.indexWhere(_ > c) match {
+ case -1 => chunks.length-1
+ case x => x - 1
+ }
+ (chunks, chunkStarts, chunkIndex)
+ }
+
+ def firstRow(cursor: Int, buffer: Vector[Char], width: Int) =
+ cursor < width && (buffer.indexOf('\n') >= cursor || buffer.indexOf('\n') == -1)
+
+ def lastRow(cursor: Int, buffer: Vector[Char], width: Int) =
+ (buffer.length - cursor) < width &&
+ (buffer.lastIndexOf('\n') < cursor || buffer.lastIndexOf('\n') == -1)
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/LICENSE b/src/dotty/tools/dotc/repl/ammonite/LICENSE
new file mode 100644
index 000000000..b15103580
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/LICENSE
@@ -0,0 +1,25 @@
+License
+=======
+
+
+The MIT License (MIT)
+
+Copyright (c) 2014 Li Haoyi (haoyi.sg@gmail.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE. \ No newline at end of file
diff --git a/src/dotty/tools/dotc/repl/ammonite/Protocol.scala b/src/dotty/tools/dotc/repl/ammonite/Protocol.scala
new file mode 100644
index 000000000..34d31aeca
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/Protocol.scala
@@ -0,0 +1,30 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+case class TermInfo(ts: TermState, width: Int)
+
+sealed trait TermAction
+case class Printing(ts: TermState, stdout: String) extends TermAction
+case class TermState(
+ inputs: LazyList[Int],
+ buffer: Vector[Char],
+ cursor: Int,
+ msg: Ansi.Str = ""
+) extends TermAction
+
+object TermState {
+ def unapply(ti: TermInfo): Option[(LazyList[Int], Vector[Char], Int, Ansi.Str)] =
+ TermState.unapply(ti.ts)
+
+ def unapply(ti: TermAction): Option[(LazyList[Int], Vector[Char], Int, Ansi.Str)] =
+ ti match {
+ case ts: TermState => TermState.unapply(ts)
+ case _ => None
+ }
+}
+
+case class ClearScreen(ts: TermState) extends TermAction
+case object Exit extends TermAction
+case class Result(s: String) extends TermAction
diff --git a/src/dotty/tools/dotc/repl/ammonite/SpecialKeys.scala b/src/dotty/tools/dotc/repl/ammonite/SpecialKeys.scala
new file mode 100644
index 000000000..d834cc10b
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/SpecialKeys.scala
@@ -0,0 +1,81 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+/**
+ * One place to assign all the esotic control key input snippets to
+ * easy-to-remember names
+ */
+object SpecialKeys {
+
+ /**
+ * Lets you easily pattern match on characters modified by ctrl,
+ * or convert a character into its ctrl-ed version
+ */
+ object Ctrl {
+ def apply(c: Char) = (c - 96).toChar.toString
+ def unapply(i: Int): Option[Int] = Some(i + 96)
+ }
+
+ /**
+ * The string value you get when you hit the alt key
+ */
+ def Alt = "\u001b"
+
+
+ val Up = Alt+"[A"
+ val Down = Alt+"[B"
+ val Right = Alt+"[C"
+ val Left = Alt+"[D"
+
+ val Home = Alt+"OH"
+ val End = Alt+"OF"
+
+ // For some reason Screen makes these print different incantations
+ // from a normal snippet, so this causes issues like
+ // https://github.com/lihaoyi/Ammonite/issues/152 unless we special
+ // case them
+ val HomeScreen = Alt+"[1~"
+ val EndScreen = Alt+"[4~"
+
+ val ShiftUp = Alt+"[1;2A"
+ val ShiftDown = Alt+"[1;2B"
+ val ShiftRight = Alt+"[1;2C"
+ val ShiftLeft = Alt+"[1;2D"
+
+ val FnUp = Alt+"[5~"
+ val FnDown = Alt+"[6~"
+ val FnRight = Alt+"[F"
+ val FnLeft = Alt+"[H"
+
+ val AltUp = Alt*2+"[A"
+ val AltDown = Alt*2+"[B"
+ val AltRight = Alt*2+"[C"
+ val AltLeft = Alt*2+"[D"
+
+ val LinuxCtrlRight = Alt+"[1;5C"
+ val LinuxCtrlLeft = Alt+"[1;5D"
+
+ val FnAltUp = Alt*2+"[5~"
+ val FnAltDown = Alt*2+"[6~"
+ val FnAltRight = Alt+"[1;9F"
+ val FnAltLeft = Alt+"[1;9H"
+
+ // Same as fn-alt-{up, down}
+// val FnShiftUp = Alt*2+"[5~"
+// val FnShiftDown = Alt*2+"[6~"
+ val FnShiftRight = Alt+"[1;2F"
+ val FnShiftLeft = Alt+"[1;2H"
+
+ val AltShiftUp = Alt+"[1;10A"
+ val AltShiftDown = Alt+"[1;10B"
+ val AltShiftRight = Alt+"[1;10C"
+ val AltShiftLeft = Alt+"[1;10D"
+
+ // Same as fn-alt-{up, down}
+// val FnAltShiftUp = Alt*2+"[5~"
+// val FnAltShiftDown = Alt*2+"[6~"
+ val FnAltShiftRight = Alt+"[1;10F"
+ val FnAltShiftLeft = Alt+"[1;10H"
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/Terminal.scala b/src/dotty/tools/dotc/repl/ammonite/Terminal.scala
new file mode 100644
index 000000000..4b18b38e3
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/Terminal.scala
@@ -0,0 +1,320 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+
+import scala.annotation.tailrec
+import scala.collection.mutable
+
+/**
+ * The core logic around a terminal; it defines the base `filters` API
+ * through which anything (including basic cursor-navigation and typing)
+ * interacts with the terminal.
+ *
+ * Maintains basic invariants, such as "cursor should always be within
+ * the buffer", and "ansi terminal should reflect most up to date TermState"
+ */
+object Terminal {
+
+ /**
+ * Computes how tall a line of text is when wrapped at `width`.
+ *
+ * Even 0-character lines still take up one row!
+ *
+ * width = 2
+ * 0 -> 1
+ * 1 -> 1
+ * 2 -> 1
+ * 3 -> 2
+ * 4 -> 2
+ * 5 -> 3
+ */
+ def fragHeight(length: Int, width: Int) = math.max(1, (length - 1) / width + 1)
+
+ def splitBuffer(buffer: Vector[Char]) = {
+ val frags = mutable.Buffer.empty[Int]
+ frags.append(0)
+ for(c <- buffer){
+ if (c == '\n') frags.append(0)
+ else frags(frags.length - 1) = frags.last + 1
+ }
+ frags
+ }
+ def calculateHeight(buffer: Vector[Char],
+ width: Int,
+ prompt: String): Seq[Int] = {
+ val rowLengths = splitBuffer(buffer)
+
+ calculateHeight0(rowLengths, width - prompt.length)
+ }
+
+ /**
+ * Given a buffer with characters and newlines, calculates how high
+ * the buffer is and where the cursor goes inside of it.
+ */
+ def calculateHeight0(rowLengths: Seq[Int],
+ width: Int): Seq[Int] = {
+ val fragHeights =
+ rowLengths
+ .inits
+ .toVector
+ .reverse // We want shortest-to-longest, inits gives longest-to-shortest
+ .filter(_.nonEmpty) // Without the first empty prefix
+ .map{ x =>
+ fragHeight(
+ // If the frag barely fits on one line, give it
+ // an extra spot for the cursor on the next line
+ x.last + 1,
+ width
+ )
+ }
+// Debug("fragHeights " + fragHeights)
+ fragHeights
+ }
+
+ def positionCursor(cursor: Int,
+ rowLengths: Seq[Int],
+ fragHeights: Seq[Int],
+ width: Int) = {
+ var leftoverCursor = cursor
+ // Debug("leftoverCursor " + leftoverCursor)
+ var totalPreHeight = 0
+ var done = false
+ // Don't check if the cursor exceeds the last chunk, because
+ // even if it does there's nowhere else for it to go
+ for(i <- 0 until rowLengths.length -1 if !done) {
+ // length of frag and the '\n' after it
+ val delta = rowLengths(i) + 1
+ // Debug("delta " + delta)
+ val nextCursor = leftoverCursor - delta
+ if (nextCursor >= 0) {
+ // Debug("nextCursor " + nextCursor)
+ leftoverCursor = nextCursor
+ totalPreHeight += fragHeights(i)
+ }else done = true
+ }
+
+ val cursorY = totalPreHeight + leftoverCursor / width
+ val cursorX = leftoverCursor % width
+
+ (cursorY, cursorX)
+ }
+
+
+ type Action = (Vector[Char], Int) => (Vector[Char], Int)
+ type MsgAction = (Vector[Char], Int) => (Vector[Char], Int, String)
+
+
+ def noTransform(x: Vector[Char], i: Int) = (Ansi.Str.parse(x), i)
+ /**
+ * Blockingly reads a line from the given input stream and returns it.
+ *
+ * @param prompt The prompt to display when requesting input
+ * @param reader The input-stream where characters come in, e.g. System.in
+ * @param writer The output-stream where print-outs go, e.g. System.out
+ * @param filters A set of actions that can be taken depending on the input,
+ * @param displayTransform code to manipulate the display of the buffer and
+ * cursor, without actually changing the logical
+ * values inside them.
+ */
+ def readLine(prompt: Prompt,
+ reader: java.io.Reader,
+ writer: java.io.Writer,
+ filters: Filter,
+ displayTransform: (Vector[Char], Int) => (Ansi.Str, Int) = noTransform)
+ : Option[String] = {
+
+ /**
+ * Erases the previous line and re-draws it with the new buffer and
+ * cursor.
+ *
+ * Relies on `ups` to know how "tall" the previous line was, to go up
+ * and erase that many rows in the console. Performs a lot of horrific
+ * math all over the place, incredibly prone to off-by-ones, in order
+ * to at the end of the day position the cursor in the right spot.
+ */
+ def redrawLine(buffer: Ansi.Str,
+ cursor: Int,
+ ups: Int,
+ rowLengths: Seq[Int],
+ fullPrompt: Boolean = true,
+ newlinePrompt: Boolean = false) = {
+
+
+ // Enable this in certain cases (e.g. cursor near the value you are
+ // interested into) see what's going on with all the ansi screen-cursor
+ // movement
+ def debugDelay() = if (false){
+ Thread.sleep(200)
+ writer.flush()
+ }
+
+
+ val promptLine =
+ if (fullPrompt) prompt.full
+ else prompt.lastLine
+
+ val promptWidth = if(newlinePrompt) 0 else prompt.lastLine.length
+ val actualWidth = width - promptWidth
+
+ ansi.up(ups)
+ ansi.left(9999)
+ ansi.clearScreen(0)
+ writer.write(promptLine.toString)
+ if (newlinePrompt) writer.write("\n")
+
+ // I'm not sure why this is necessary, but it seems that without it, a
+ // cursor that "barely" overshoots the end of a line, at the end of the
+ // buffer, does not properly wrap and ends up dangling off the
+ // right-edge of the terminal window!
+ //
+ // This causes problems later since the cursor is at the wrong X/Y,
+ // confusing the rest of the math and ending up over-shooting on the
+ // `ansi.up` calls, over-writing earlier lines. This prints a single
+ // space such that instead of dangling it forces the cursor onto the
+ // next line for-realz. If it isn't dangling the extra space is a no-op
+ val lineStuffer = ' '
+ // Under `newlinePrompt`, we print the thing almost-verbatim, since we
+ // want to avoid breaking code by adding random indentation. If not, we
+ // are guaranteed that the lines are short, so we can indent the newlines
+ // without fear of wrapping
+ val newlineReplacement =
+ if (newlinePrompt) {
+
+ Array(lineStuffer, '\n')
+ } else {
+ val indent = " " * prompt.lastLine.length
+ Array('\n', indent:_*)
+ }
+
+ writer.write(
+ buffer.render.flatMap{
+ case '\n' => newlineReplacement
+ case x => Array(x)
+ }.toArray
+ )
+ writer.write(lineStuffer)
+
+ val fragHeights = calculateHeight0(rowLengths, actualWidth)
+ val (cursorY, cursorX) = positionCursor(
+ cursor,
+ rowLengths,
+ fragHeights,
+ actualWidth
+ )
+ ansi.up(fragHeights.sum - 1)
+ ansi.left(9999)
+ ansi.down(cursorY)
+ ansi.right(cursorX)
+ if (!newlinePrompt) ansi.right(prompt.lastLine.length)
+
+ writer.flush()
+ }
+
+ @tailrec
+ def readChar(lastState: TermState, ups: Int, fullPrompt: Boolean = true): Option[String] = {
+ val moreInputComing = reader.ready()
+
+ lazy val (transformedBuffer0, cursorOffset) = displayTransform(
+ lastState.buffer,
+ lastState.cursor
+ )
+
+ lazy val transformedBuffer = transformedBuffer0 ++ lastState.msg
+ lazy val lastOffsetCursor = lastState.cursor + cursorOffset
+ lazy val rowLengths = splitBuffer(
+ lastState.buffer ++ lastState.msg.plainText
+ )
+ val narrowWidth = width - prompt.lastLine.length
+ val newlinePrompt = rowLengths.exists(_ >= narrowWidth)
+ val promptWidth = if(newlinePrompt) 0 else prompt.lastLine.length
+ val actualWidth = width - promptWidth
+ val newlineUp = if (newlinePrompt) 1 else 0
+ if (!moreInputComing) redrawLine(
+ transformedBuffer,
+ lastOffsetCursor,
+ ups,
+ rowLengths,
+ fullPrompt,
+ newlinePrompt
+ )
+
+ lazy val (oldCursorY, _) = positionCursor(
+ lastOffsetCursor,
+ rowLengths,
+ calculateHeight0(rowLengths, actualWidth),
+ actualWidth
+ )
+
+ def updateState(s: LazyList[Int],
+ b: Vector[Char],
+ c: Int,
+ msg: Ansi.Str): (Int, TermState) = {
+
+ val newCursor = math.max(math.min(c, b.length), 0)
+ val nextUps =
+ if (moreInputComing) ups
+ else oldCursorY + newlineUp
+
+ val newState = TermState(s, b, newCursor, msg)
+
+ (nextUps, newState)
+ }
+ // `.get` because we assume that *some* filter is going to match each
+ // character, even if only to dump the character to the screen. If nobody
+ // matches the character then we can feel free to blow up
+ filters.op(TermInfo(lastState, actualWidth)).get match {
+ case Printing(TermState(s, b, c, msg), stdout) =>
+ writer.write(stdout)
+ val (nextUps, newState) = updateState(s, b, c, msg)
+ readChar(newState, nextUps)
+
+ case TermState(s, b, c, msg) =>
+ val (nextUps, newState) = updateState(s, b, c, msg)
+ readChar(newState, nextUps, false)
+
+ case Result(s) =>
+ redrawLine(
+ transformedBuffer, lastState.buffer.length,
+ oldCursorY + newlineUp, rowLengths, false, newlinePrompt
+ )
+ writer.write(10)
+ writer.write(13)
+ writer.flush()
+ Some(s)
+ case ClearScreen(ts) =>
+ ansi.clearScreen(2)
+ ansi.up(9999)
+ ansi.left(9999)
+ readChar(ts, ups)
+ case Exit =>
+ None
+ }
+ }
+
+ lazy val ansi = new AnsiNav(writer)
+ lazy val (width, _, initialConfig) = TTY.init()
+ try {
+ readChar(TermState(LazyList.continually(reader.read()), Vector.empty, 0, ""), 0)
+ }finally{
+
+ // Don't close these! Closing these closes stdin/stdout,
+ // which seems to kill the entire program
+
+ // reader.close()
+ // writer.close()
+ TTY.stty(initialConfig)
+ }
+ }
+}
+object Prompt {
+ implicit def construct(prompt: String): Prompt = {
+ val parsedPrompt = Ansi.Str.parse(prompt)
+ val index = parsedPrompt.plainText.lastIndexOf('\n')
+ val (_, last) = parsedPrompt.splitAt(index+1)
+ Prompt(parsedPrompt, last)
+ }
+}
+
+case class Prompt(full: Ansi.Str, lastLine: Ansi.Str)
diff --git a/src/dotty/tools/dotc/repl/ammonite/Utils.scala b/src/dotty/tools/dotc/repl/ammonite/Utils.scala
new file mode 100644
index 000000000..64a2c1476
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/Utils.scala
@@ -0,0 +1,169 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite.terminal
+
+import java.io.{FileOutputStream, Writer, File => JFile}
+import scala.annotation.tailrec
+
+/**
+ * Prints stuff to an ad-hoc logging file when running the repl or terminal in
+ * development mode
+ *
+ * Very handy for the common case where you're debugging terminal interactions
+ * and cannot use `println` because it will stomp all over your already messed
+ * up terminal state and block debugging. With [[Debug]], you can have a
+ * separate terminal open tailing the log file and log as verbosely as you
+ * want without affecting the primary terminal you're using to interact with
+ * Ammonite.
+ */
+object Debug {
+ lazy val debugOutput =
+ new FileOutputStream(new JFile("terminal/target/log"))
+
+ def apply(s: Any) =
+ if (System.getProperty("ammonite-sbt-build") == "true")
+ debugOutput.write((System.currentTimeMillis() + "\t\t" + s + "\n").getBytes)
+}
+
+class AnsiNav(output: Writer) {
+ def control(n: Int, c: Char) = output.write(s"\033[" + n + c)
+
+ /**
+ * Move up `n` squares
+ */
+ def up(n: Int) = if (n == 0) "" else control(n, 'A')
+ /**
+ * Move down `n` squares
+ */
+ def down(n: Int) = if (n == 0) "" else control(n, 'B')
+ /**
+ * Move right `n` squares
+ */
+ def right(n: Int) = if (n == 0) "" else control(n, 'C')
+ /**
+ * Move left `n` squares
+ */
+ def left(n: Int) = if (n == 0) "" else control(n, 'D')
+
+ /**
+ * Clear the screen
+ *
+ * n=0: clear from cursor to end of screen
+ * n=1: clear from cursor to start of screen
+ * n=2: clear entire screen
+ */
+ def clearScreen(n: Int) = control(n, 'J')
+ /**
+ * Clear the current line
+ *
+ * n=0: clear from cursor to end of line
+ * n=1: clear from cursor to start of line
+ * n=2: clear entire line
+ */
+ def clearLine(n: Int) = control(n, 'K')
+}
+
+object AnsiNav {
+ val resetUnderline = "\u001b[24m"
+ val resetForegroundColor = "\u001b[39m"
+ val resetBackgroundColor = "\u001b[49m"
+}
+
+object TTY {
+
+ // Prefer standard tools. Not sure why we need to do this, but for some
+ // reason the version installed by gnu-coreutils blows up sometimes giving
+ // "unable to perform all requested operations"
+ val pathedTput = if (new java.io.File("/usr/bin/tput").exists()) "/usr/bin/tput" else "tput"
+ val pathedStty = if (new java.io.File("/bin/stty").exists()) "/bin/stty" else "stty"
+
+ def consoleDim(s: String) = {
+ import sys.process._
+ Seq("bash", "-c", s"$pathedTput $s 2> /dev/tty").!!.trim.toInt
+ }
+ def init() = {
+ stty("-a")
+
+ val width = consoleDim("cols")
+ val height = consoleDim("lines")
+// Debug("Initializing, Width " + width)
+// Debug("Initializing, Height " + height)
+ val initialConfig = stty("-g").trim
+ stty("-icanon min 1 -icrnl -inlcr -ixon")
+ sttyFailTolerant("dsusp undef")
+ stty("-echo")
+ stty("intr undef")
+// Debug("")
+ (width, height, initialConfig)
+ }
+
+ private def sttyCmd(s: String) = {
+ import sys.process._
+ Seq("bash", "-c", s"$pathedStty $s < /dev/tty"): ProcessBuilder
+ }
+
+ def stty(s: String) =
+ sttyCmd(s).!!
+ /*
+ * Executes a stty command for which failure is expected, hence the return
+ * status can be non-null and errors are ignored.
+ * This is appropriate for `stty dsusp undef`, since it's unsupported on Linux
+ * (http://man7.org/linux/man-pages/man3/termios.3.html).
+ */
+ def sttyFailTolerant(s: String) =
+ sttyCmd(s ++ " 2> /dev/null").!
+
+ def restore(initialConfig: String) = {
+ stty(initialConfig)
+ }
+}
+
+/**
+ * A truly-lazy implementation of scala.Stream
+ */
+case class LazyList[T](headThunk: () => T, tailThunk: () => LazyList[T]) {
+ var rendered = false
+ lazy val head = {
+ rendered = true
+ headThunk()
+ }
+
+ lazy val tail = tailThunk()
+
+ def dropPrefix(prefix: Seq[T]) = {
+ @tailrec def rec(n: Int, l: LazyList[T]): Option[LazyList[T]] = {
+ if (n >= prefix.length) Some(l)
+ else if (prefix(n) == l.head) rec(n + 1, l.tail)
+ else None
+ }
+ rec(0, this)
+ }
+ override def toString = {
+
+ @tailrec def rec(l: LazyList[T], res: List[T]): List[T] = {
+ if (l.rendered) rec(l.tailThunk(), l.head :: res)
+ else res
+ }
+ s"LazyList(${(rec(this, Nil).reverse ++ Seq("...")).mkString(",")})"
+ }
+
+ def ~:(other: => T) = LazyList(() => other, () => this)
+}
+
+object LazyList {
+ object ~: {
+ def unapply[T](x: LazyList[T]) = Some((x.head, x.tail))
+ }
+
+ def continually[T](t: => T): LazyList[T] = LazyList(() => t, () =>continually(t))
+
+ implicit class CS(ctx: StringContext) {
+ val base = ctx.parts.mkString
+ object p {
+ def unapply(s: LazyList[Int]): Option[LazyList[Int]] = {
+ s.dropPrefix(base.map(_.toInt))
+ }
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/filters/BasicFilters.scala b/src/dotty/tools/dotc/repl/ammonite/filters/BasicFilters.scala
new file mode 100644
index 000000000..faa97c348
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/filters/BasicFilters.scala
@@ -0,0 +1,163 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import ammonite.terminal.FilterTools._
+import ammonite.terminal.LazyList._
+import ammonite.terminal.SpecialKeys._
+import ammonite.terminal.Filter
+import ammonite.terminal._
+
+/**
+ * Filters for simple operation of a terminal: cursor-navigation
+ * (including with all the modifier keys), enter/ctrl-c-exit, etc.
+ */
+object BasicFilters {
+ def all = Filter.merge(
+ navFilter,
+ exitFilter,
+ enterFilter,
+ clearFilter,
+ //loggingFilter,
+ typingFilter
+ )
+
+ def injectNewLine(b: Vector[Char], c: Int, rest: LazyList[Int], indent: Int = 0) = {
+ val (first, last) = b.splitAt(c)
+ TermState(rest, (first :+ '\n') ++ last ++ Vector.fill(indent)(' '), c + 1 + indent)
+ }
+
+ def navFilter = Filter.merge(
+ Case(Up)((b, c, m) => moveUp(b, c, m.width)),
+ Case(Down)((b, c, m) => moveDown(b, c, m.width)),
+ Case(Right)((b, c, m) => (b, c + 1)),
+ Case(Left)((b, c, m) => (b, c - 1))
+ )
+
+ def tabColumn(indent: Int, b: Vector[Char], c: Int, rest: LazyList[Int]) = {
+ val (chunks, chunkStarts, chunkIndex) = FilterTools.findChunks(b, c)
+ val chunkCol = c - chunkStarts(chunkIndex)
+ val spacesToInject = indent - (chunkCol % indent)
+ val (lhs, rhs) = b.splitAt(c)
+ TS(rest, lhs ++ Vector.fill(spacesToInject)(' ') ++ rhs, c + spacesToInject)
+ }
+
+ def tabFilter(indent: Int): Filter = Filter("tabFilter") {
+ case TS(9 ~: rest, b, c, _) => tabColumn(indent, b, c, rest)
+ }
+
+ def loggingFilter: Filter = Filter("loggingFilter") {
+ case TS(Ctrl('q') ~: rest, b, c, _) =>
+ println("Char Display Mode Enabled! Ctrl-C to exit")
+ var curr = rest
+ while (curr.head != 3) {
+ println("Char " + curr.head)
+ curr = curr.tail
+ }
+ TS(curr, b, c)
+ }
+
+ def typingFilter: Filter = Filter("typingFilter") {
+ case TS(p"\u001b[3~$rest", b, c, _) =>
+// Debug("fn-delete")
+ val (first, last) = b.splitAt(c)
+ TS(rest, first ++ last.drop(1), c)
+
+ case TS(127 ~: rest, b, c, _) => // Backspace
+ val (first, last) = b.splitAt(c)
+ TS(rest, first.dropRight(1) ++ last, c - 1)
+
+ case TS(char ~: rest, b, c, _) =>
+// Debug("NORMAL CHAR " + char)
+ val (first, last) = b.splitAt(c)
+ TS(rest, (first :+ char.toChar) ++ last, c + 1)
+ }
+
+ def doEnter(b: Vector[Char], c: Int, rest: LazyList[Int]) = {
+ val (chunks, chunkStarts, chunkIndex) = FilterTools.findChunks(b, c)
+ if (chunkIndex == chunks.length - 1) Result(b.mkString)
+ else injectNewLine(b, c, rest)
+ }
+
+ def enterFilter: Filter = Filter("enterFilter") {
+ case TS(13 ~: rest, b, c, _) => doEnter(b, c, rest) // Enter
+ case TS(10 ~: rest, b, c, _) => doEnter(b, c, rest) // Enter
+ case TS(10 ~: 13 ~: rest, b, c, _) => doEnter(b, c, rest) // Enter
+ case TS(13 ~: 10 ~: rest, b, c, _) => doEnter(b, c, rest) // Enter
+ }
+
+ def exitFilter: Filter = Filter("exitFilter") {
+ case TS(Ctrl('c') ~: rest, b, c, _) =>
+ Result("")
+ case TS(Ctrl('d') ~: rest, b, c, _) =>
+ // only exit if the line is empty, otherwise, behave like
+ // "delete" (i.e. delete one char to the right)
+ if (b.isEmpty) Exit else {
+ val (first, last) = b.splitAt(c)
+ TS(rest, first ++ last.drop(1), c)
+ }
+ case TS(-1 ~: rest, b, c, _) => Exit // java.io.Reader.read() produces -1 on EOF
+ }
+
+ def clearFilter: Filter = Filter("clearFilter") {
+ case TS(Ctrl('l') ~: rest, b, c, _) => ClearScreen(TS(rest, b, c))
+ }
+
+ def moveStart(b: Vector[Char], c: Int, w: Int) = {
+ val (_, chunkStarts, chunkIndex) = findChunks(b, c)
+ val currentColumn = (c - chunkStarts(chunkIndex)) % w
+ b -> (c - currentColumn)
+ }
+
+ def moveEnd(b: Vector[Char], c: Int, w: Int) = {
+ val (chunks, chunkStarts, chunkIndex) = findChunks(b, c)
+ val currentColumn = (c - chunkStarts(chunkIndex)) % w
+ val c1 = chunks.lift(chunkIndex + 1) match {
+ case Some(next) =>
+ val boundary = chunkStarts(chunkIndex + 1) - 1
+ if ((boundary - c) > (w - currentColumn)) {
+ val delta= w - currentColumn
+ c + delta
+ }
+ else boundary
+ case None =>
+ c + 1 * 9999
+ }
+ b -> c1
+ }
+
+ def moveUpDown(
+ b: Vector[Char],
+ c: Int,
+ w: Int,
+ boundaryOffset: Int,
+ nextChunkOffset: Int,
+ checkRes: Int,
+ check: (Int, Int) => Boolean,
+ isDown: Boolean
+ ) = {
+ val (chunks, chunkStarts, chunkIndex) = findChunks(b, c)
+ val offset = chunkStarts(chunkIndex + boundaryOffset)
+ if (check(checkRes, offset)) checkRes
+ else chunks.lift(chunkIndex + nextChunkOffset) match {
+ case None => c + nextChunkOffset * 9999
+ case Some(next) =>
+ val boundary = chunkStarts(chunkIndex + boundaryOffset)
+ val currentColumn = (c - chunkStarts(chunkIndex)) % w
+
+ if (isDown) boundary + math.min(currentColumn, next)
+ else boundary + math.min(currentColumn - next % w, 0) - 1
+ }
+ }
+
+ def moveUp(b: Vector[Char], c: Int, w: Int) = {
+ b -> moveUpDown(b, c, w, 0, -1, c - w, _ > _, false)
+ }
+
+ def moveDown(b: Vector[Char], c: Int, w: Int) = {
+ b -> moveUpDown(b, c, w, 1, 1, c + w, _ <= _, true)
+ }
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/filters/GUILikeFilters.scala b/src/dotty/tools/dotc/repl/ammonite/filters/GUILikeFilters.scala
new file mode 100644
index 000000000..69a9769c6
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/filters/GUILikeFilters.scala
@@ -0,0 +1,170 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import terminal.FilterTools._
+import terminal.LazyList.~:
+import terminal.SpecialKeys._
+import terminal.DelegateFilter
+import terminal._
+
+/**
+ * Filters have hook into the various {Ctrl,Shift,Fn,Alt}x{Up,Down,Left,Right}
+ * combination keys, and make them behave similarly as they would on a normal
+ * GUI text editor: alt-{left, right} for word movement, hold-down-shift for
+ * text selection, etc.
+ */
+object GUILikeFilters {
+ case class SelectionFilter(indent: Int) extends DelegateFilter {
+ def identifier = "SelectionFilter"
+ var mark: Option[Int] = None
+
+ def setMark(c: Int) = {
+ Debug("setMark\t" + mark + "\t->\t" + c)
+ if (mark == None) mark = Some(c)
+ }
+
+ def doIndent(
+ b: Vector[Char],
+ c: Int,
+ rest: LazyList[Int],
+ slicer: Vector[Char] => Int
+ ) = {
+
+ val markValue = mark.get
+ val (chunks, chunkStarts, chunkIndex) = FilterTools.findChunks(b, c)
+ val min = chunkStarts.lastIndexWhere(_ <= math.min(c, markValue))
+ val max = chunkStarts.indexWhere(_ > math.max(c, markValue))
+ val splitPoints = chunkStarts.slice(min, max)
+ val frags = (0 +: splitPoints :+ 99999).sliding(2).zipWithIndex
+
+ var firstOffset = 0
+ val broken =
+ for((Seq(l, r), i) <- frags) yield {
+ val slice = b.slice(l, r)
+ if (i == 0) slice
+ else {
+ val cut = slicer(slice)
+
+ if (i == 1) firstOffset = cut
+
+ if (cut < 0) slice.drop(-cut)
+ else Vector.fill(cut)(' ') ++ slice
+ }
+ }
+ val flattened = broken.flatten.toVector
+ val deeperOffset = flattened.length - b.length
+
+ val (newMark, newC) =
+ if (mark.get > c) (mark.get + deeperOffset, c + firstOffset)
+ else (mark.get + firstOffset, c + deeperOffset)
+
+ mark = Some(newMark)
+ TS(rest, flattened, newC)
+ }
+
+ def filter = Filter.merge(
+
+ Case(ShiftUp) {(b, c, m) => setMark(c); BasicFilters.moveUp(b, c, m.width)},
+ Case(ShiftDown) {(b, c, m) => setMark(c); BasicFilters.moveDown(b, c, m.width)},
+ Case(ShiftRight) {(b, c, m) => setMark(c); (b, c + 1)},
+ Case(ShiftLeft) {(b, c, m) => setMark(c); (b, c - 1)},
+ Case(AltShiftUp) {(b, c, m) => setMark(c); BasicFilters.moveUp(b, c, m.width)},
+ Case(AltShiftDown) {(b, c, m) => setMark(c); BasicFilters.moveDown(b, c, m.width)},
+ Case(AltShiftRight) {(b, c, m) => setMark(c); wordRight(b, c)},
+ Case(AltShiftLeft) {(b, c, m) => setMark(c); wordLeft(b, c)},
+ Case(FnShiftRight) {(b, c, m) => setMark(c); BasicFilters.moveEnd(b, c, m.width)},
+ Case(FnShiftLeft) {(b, c, m) => setMark(c); BasicFilters.moveStart(b, c, m.width)},
+ Filter("fnOtherFilter") {
+ case TS(27 ~: 91 ~: 90 ~: rest, b, c, _) if mark.isDefined =>
+ doIndent(b, c, rest,
+ slice => -math.min(slice.iterator.takeWhile(_ == ' ').size, indent)
+ )
+
+ case TS(9 ~: rest, b, c, _) if mark.isDefined => // Tab
+ doIndent(b, c, rest,
+ slice => indent
+ )
+
+ // Intercept every other character.
+ case TS(char ~: inputs, buffer, cursor, _) if mark.isDefined =>
+ // If it's a special command, just cancel the current selection.
+ if (char.toChar.isControl &&
+ char != 127 /*backspace*/ &&
+ char != 13 /*enter*/ &&
+ char != 10 /*enter*/) {
+ mark = None
+ TS(char ~: inputs, buffer, cursor)
+ } else {
+ // If it's a printable character, delete the current
+ // selection and write the printable character.
+ val Seq(min, max) = Seq(mark.get, cursor).sorted
+ mark = None
+ val newBuffer = buffer.take(min) ++ buffer.drop(max)
+ val newInputs =
+ if (char == 127) inputs
+ else char ~: inputs
+ TS(newInputs, newBuffer, min)
+ }
+ }
+ )
+ }
+
+ object SelectionFilter {
+ def mangleBuffer(
+ selectionFilter: SelectionFilter,
+ string: Ansi.Str,
+ cursor: Int,
+ startColor: Ansi.Attr
+ ) = {
+ selectionFilter.mark match {
+ case Some(mark) if mark != cursor =>
+ val Seq(min, max) = Seq(cursor, mark).sorted
+ val displayOffset = if (cursor < mark) 0 else -1
+ val newStr = string.overlay(startColor, min, max)
+ (newStr, displayOffset)
+ case _ => (string, 0)
+ }
+ }
+ }
+
+ val fnFilter = Filter.merge(
+ Case(FnUp)((b, c, m) => (b, c - 9999)),
+ Case(FnDown)((b, c, m) => (b, c + 9999)),
+ Case(FnRight)((b, c, m) => BasicFilters.moveEnd(b, c, m.width)),
+ Case(FnLeft)((b, c, m) => BasicFilters.moveStart(b, c, m.width))
+ )
+ val altFilter = Filter.merge(
+ Case(AltUp) {(b, c, m) => BasicFilters.moveUp(b, c, m.width)},
+ Case(AltDown) {(b, c, m) => BasicFilters.moveDown(b, c, m.width)},
+ Case(AltRight) {(b, c, m) => wordRight(b, c)},
+ Case(AltLeft) {(b, c, m) => wordLeft(b, c)}
+ )
+
+ val fnAltFilter = Filter.merge(
+ Case(FnAltUp) {(b, c, m) => (b, c)},
+ Case(FnAltDown) {(b, c, m) => (b, c)},
+ Case(FnAltRight) {(b, c, m) => (b, c)},
+ Case(FnAltLeft) {(b, c, m) => (b, c)}
+ )
+ val fnAltShiftFilter = Filter.merge(
+ Case(FnAltShiftRight) {(b, c, m) => (b, c)},
+ Case(FnAltShiftLeft) {(b, c, m) => (b, c)}
+ )
+
+
+ def consumeWord(b: Vector[Char], c: Int, delta: Int, offset: Int) = {
+ var current = c
+ while(b.isDefinedAt(current) && !b(current).isLetterOrDigit) current += delta
+ while(b.isDefinedAt(current) && b(current).isLetterOrDigit) current += delta
+ current + offset
+ }
+
+ // c -1 to move at least one character! Otherwise you get stuck at the start of
+ // a word.
+ def wordLeft(b: Vector[Char], c: Int) = b -> consumeWord(b, c - 1, -1, 1)
+ def wordRight(b: Vector[Char], c: Int) = b -> consumeWord(b, c, 1, 0)
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/filters/HistoryFilter.scala b/src/dotty/tools/dotc/repl/ammonite/filters/HistoryFilter.scala
new file mode 100644
index 000000000..dac1c9d23
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/filters/HistoryFilter.scala
@@ -0,0 +1,334 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import terminal.FilterTools._
+import terminal.LazyList._
+import terminal._
+
+/**
+ * Provides history navigation up and down, saving the current line, a well
+ * as history-search functionality (`Ctrl R` in bash) letting you quickly find
+ * & filter previous commands by entering a sub-string.
+ */
+class HistoryFilter(
+ history: () => IndexedSeq[String],
+ commentStartColor: String,
+ commentEndColor: String
+) extends DelegateFilter {
+
+
+ def identifier = "HistoryFilter"
+ /**
+ * `-1` means we haven't started looking at history, `n >= 0` means we're
+ * currently at history command `n`
+ */
+ var historyIndex = -1
+
+ /**
+ * The term we're searching for, if any.
+ *
+ * - `None` means we're not searching for anything, e.g. we're just
+ * browsing history
+ *
+ * - `Some(term)` where `term` is not empty is what it normally looks
+ * like when we're searching for something
+ *
+ * - `Some(term)` where `term` is empty only really happens when you
+ * start searching and delete things, or if you `Ctrl-R` on an empty
+ * prompt
+ */
+ var searchTerm: Option[Vector[Char]] = None
+
+ /**
+ * Records the last buffer that the filter has observed while it's in
+ * search/history mode. If the new buffer differs from this, assume that
+ * some other filter modified the buffer and drop out of search/history
+ */
+ var prevBuffer: Option[Vector[Char]] = None
+
+ /**
+ * Kicks the HistoryFilter from passive-mode into search-history mode
+ */
+ def startHistory(b: Vector[Char], c: Int): (Vector[Char], Int, String) = {
+ if (b.nonEmpty) searchTerm = Some(b)
+ up(Vector(), c)
+ }
+
+ def searchHistory(
+ start: Int,
+ increment: Int,
+ buffer: Vector[Char],
+ skipped: Vector[Char]
+ ) = {
+
+ def nextHistoryIndexFor(v: Vector[Char]) = {
+ HistoryFilter.findNewHistoryIndex(start, v, history(), increment, skipped)
+ }
+
+ val (newHistoryIndex, newBuffer, newMsg, newCursor) = searchTerm match {
+ // We're not searching for anything, just browsing history.
+ // Pass in Vector.empty so we scroll through all items
+ case None =>
+ val (i, b, c) = nextHistoryIndexFor(Vector.empty)
+ (i, b, "", 99999)
+
+ // We're searching for some item with a particular search term
+ case Some(b) if b.nonEmpty =>
+ val (i, b1, c) = nextHistoryIndexFor(b)
+
+ val msg =
+ if (i.nonEmpty) ""
+ else commentStartColor + HistoryFilter.cannotFindSearchMessage + commentEndColor
+
+ (i, b1, msg, c)
+
+ // We're searching for nothing in particular; in this case,
+ // show a help message instead of an unhelpful, empty buffer
+ case Some(b) if b.isEmpty =>
+ val msg = commentStartColor + HistoryFilter.emptySearchMessage + commentEndColor
+ // The cursor in this case always goes to zero
+ (Some(start), Vector(), msg, 0)
+
+ }
+
+ historyIndex = newHistoryIndex.getOrElse(-1)
+
+ (newBuffer, newCursor, newMsg)
+ }
+
+ def activeHistory = searchTerm.nonEmpty || historyIndex != -1
+ def activeSearch = searchTerm.nonEmpty
+
+ def up(b: Vector[Char], c: Int) =
+ searchHistory(historyIndex + 1, 1, b, b)
+
+ def down(b: Vector[Char], c: Int) =
+ searchHistory(historyIndex - 1, -1, b, b)
+
+ def wrap(rest: LazyList[Int], out: (Vector[Char], Int, String)) =
+ TS(rest, out._1, out._2, out._3)
+
+ def ctrlR(b: Vector[Char], c: Int) =
+ if (activeSearch) up(b, c)
+ else {
+ searchTerm = Some(b)
+ up(Vector(), c)
+ }
+
+ def printableChar(char: Char)(b: Vector[Char], c: Int) = {
+ searchTerm = searchTerm.map(_ :+ char)
+ searchHistory(historyIndex.max(0), 1, b :+ char, Vector())
+ }
+
+ def backspace(b: Vector[Char], c: Int) = {
+ searchTerm = searchTerm.map(_.dropRight(1))
+ searchHistory(historyIndex, 1, b, Vector())
+ }
+
+ /**
+ * Predicate to check if either we're searching for a term or if we're in
+ * history-browsing mode and some predicate is true.
+ *
+ * Very often we want to capture keystrokes in search-mode more aggressively
+ * than in history-mode, e.g. search-mode drops you out more aggressively
+ * than history-mode does, and its up/down keys cycle through history more
+ * aggressively on every keystroke while history-mode only cycles when you
+ * reach the top/bottom line of the multi-line input.
+ */
+ def searchOrHistoryAnd(cond: Boolean) =
+ activeSearch || (activeHistory && cond)
+
+ val dropHistoryChars = Set(9, 13, 10) // Tab or Enter
+
+ def endHistory() = {
+ historyIndex = -1
+ searchTerm = None
+ }
+
+ def filter = Filter.wrap("historyFilterWrap1") {
+ (ti: TermInfo) => {
+ prelude.op(ti) match {
+ case None =>
+ prevBuffer = Some(ti.ts.buffer)
+ filter0.op(ti) match {
+ case Some(ts: TermState) =>
+ prevBuffer = Some(ts.buffer)
+ Some(ts)
+ case x => x
+ }
+ case some => some
+ }
+ }
+ }
+
+ def prelude: Filter = Filter("historyPrelude") {
+ case TS(inputs, b, c, _) if activeHistory && prevBuffer.exists(_ != b) =>
+ endHistory()
+ prevBuffer = None
+ TS(inputs, b, c)
+ }
+
+ def filter0: Filter = Filter("filter0") {
+ // Ways to kick off the history/search if you're not already in it
+
+ // `Ctrl-R`
+ case TS(18 ~: rest, b, c, _) => wrap(rest, ctrlR(b, c))
+
+ // `Up` from the first line in the input
+ case TermInfo(TS(p"\u001b[A$rest", b, c, _), w) if firstRow(c, b, w) && !activeHistory =>
+ wrap(rest, startHistory(b, c))
+
+ // `Ctrl P`
+ case TermInfo(TS(p"\u0010$rest", b, c, _), w) if firstRow(c, b, w) && !activeHistory =>
+ wrap(rest, startHistory(b, c))
+
+ // `Page-Up` from first character starts history
+ case TermInfo(TS(p"\u001b[5~$rest", b, c, _), w) if c == 0 && !activeHistory =>
+ wrap(rest, startHistory(b, c))
+
+ // Things you can do when you're already in the history search
+
+ // Navigating up and down the history. Each up or down searches for
+ // the next thing that matches your current searchTerm
+ // Up
+ case TermInfo(TS(p"\u001b[A$rest", b, c, _), w) if searchOrHistoryAnd(firstRow(c, b, w)) =>
+ wrap(rest, up(b, c))
+
+ // Ctrl P
+ case TermInfo(TS(p"\u0010$rest", b, c, _), w) if searchOrHistoryAnd(firstRow(c, b, w)) =>
+ wrap(rest, up(b, c))
+
+ // `Page-Up` from first character cycles history up
+ case TermInfo(TS(p"\u001b[5~$rest", b, c, _), w) if searchOrHistoryAnd(c == 0) =>
+ wrap(rest, up(b, c))
+
+ // Down
+ case TermInfo(TS(p"\u001b[B$rest", b, c, _), w) if searchOrHistoryAnd(lastRow(c, b, w)) =>
+ wrap(rest, down(b, c))
+
+ // `Ctrl N`
+
+ case TermInfo(TS(p"\u000e$rest", b, c, _), w) if searchOrHistoryAnd(lastRow(c, b, w)) =>
+ wrap(rest, down(b, c))
+ // `Page-Down` from last character cycles history down
+ case TermInfo(TS(p"\u001b[6~$rest", b, c, _), w) if searchOrHistoryAnd(c == b.length - 1) =>
+ wrap(rest, down(b, c))
+
+
+ // Intercept Backspace and delete a character in search-mode, preserving it, but
+ // letting it fall through and dropping you out of history-mode if you try to make
+ // edits
+ case TS(127 ~: rest, buffer, cursor, _) if activeSearch =>
+ wrap(rest, backspace(buffer, cursor))
+
+ // Any other control characters drop you out of search mode, but only the
+ // set of `dropHistoryChars` drops you out of history mode
+ case TS(char ~: inputs, buffer, cursor, _)
+ if char.toChar.isControl && searchOrHistoryAnd(dropHistoryChars(char)) =>
+ val newBuffer =
+ // If we're back to -1, it means we've wrapped around and are
+ // displaying the original search term with a wrap-around message
+ // in the terminal. Drop the message and just preserve the search term
+ if (historyIndex == -1) searchTerm.get
+ // If we're searching for an empty string, special-case this and return
+ // an empty buffer rather than the first history item (which would be
+ // the default) because that wouldn't make much sense
+ else if (searchTerm.exists(_.isEmpty)) Vector()
+ // Otherwise, pick whatever history entry we're at and use that
+ else history()(historyIndex).toVector
+ endHistory()
+
+ TS(char ~: inputs, newBuffer, cursor)
+
+ // Intercept every other printable character when search is on and
+ // enter it into the current search
+ case TS(char ~: rest, buffer, cursor, _) if activeSearch =>
+ wrap(rest, printableChar(char.toChar)(buffer, cursor))
+
+ // If you're not in search but are in history, entering any printable
+ // characters kicks you out of it and preserves the current buffer. This
+ // makes it harder for you to accidentally lose work due to history-moves
+ case TS(char ~: rest, buffer, cursor, _) if activeHistory && !char.toChar.isControl =>
+ historyIndex = -1
+ TS(char ~: rest, buffer, cursor)
+ }
+}
+
+object HistoryFilter {
+
+ def mangleBuffer(
+ historyFilter: HistoryFilter,
+ buffer: Ansi.Str,
+ cursor: Int,
+ startColor: Ansi.Attr
+ ) = {
+ if (!historyFilter.activeSearch) buffer
+ else {
+ val (searchStart, searchEnd) =
+ if (historyFilter.searchTerm.get.isEmpty) (cursor, cursor+1)
+ else {
+ val start = buffer.plainText.indexOfSlice(historyFilter.searchTerm.get)
+
+ val end = start + (historyFilter.searchTerm.get.length max 1)
+ (start, end)
+ }
+
+ val newStr = buffer.overlay(startColor, searchStart, searchEnd)
+ newStr
+ }
+ }
+
+ /**
+ * @param startIndex The first index to start looking from
+ * @param searchTerm The term we're searching from; can be empty
+ * @param history The history we're searching through
+ * @param indexIncrement Which direction to search, +1 or -1
+ * @param skipped Any buffers which we should skip in our search results,
+ * e.g. because the user has seen them before.
+ */
+ def findNewHistoryIndex(
+ startIndex: Int,
+ searchTerm: Vector[Char],
+ history: IndexedSeq[String],
+ indexIncrement: Int,
+ skipped: Vector[Char]
+ ) = {
+ /**
+ * `Some(i)` means we found a reasonable result at history element `i`
+ * `None` means we couldn't find anything, and should show a not-found
+ * error to the user
+ */
+ def rec(i: Int): Option[Int] = history.lift(i) match {
+ // If i < 0, it means the user is pressing `down` too many times, which
+ // means it doesn't show anything but we shouldn't show an error
+ case None if i < 0 => Some(-1)
+ case None => None
+ case Some(s) if s.contains(searchTerm) && !s.contentEquals(skipped) =>
+ Some(i)
+ case _ => rec(i + indexIncrement)
+ }
+
+ val newHistoryIndex = rec(startIndex)
+ val foundIndex = newHistoryIndex.find(_ != -1)
+ val newBuffer = foundIndex match {
+ case None => searchTerm
+ case Some(i) => history(i).toVector
+ }
+
+ val newCursor = foundIndex match {
+ case None => newBuffer.length
+ case Some(i) => history(i).indexOfSlice(searchTerm) + searchTerm.length
+ }
+
+ (newHistoryIndex, newBuffer, newCursor)
+ }
+
+ val emptySearchMessage =
+ s" ...enter the string to search for, then `up` for more"
+ val cannotFindSearchMessage =
+ s" ...can't be found in history; re-starting search"
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/filters/ReadlineFilters.scala b/src/dotty/tools/dotc/repl/ammonite/filters/ReadlineFilters.scala
new file mode 100644
index 000000000..eb79f2b04
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/filters/ReadlineFilters.scala
@@ -0,0 +1,165 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import terminal.FilterTools._
+import terminal.SpecialKeys._
+import terminal.{DelegateFilter, Filter, Terminal}
+/**
+ * Filters for injection of readline-specific hotkeys, the sort that
+ * are available in bash, python and most other interactive command-lines
+ */
+object ReadlineFilters {
+ // www.bigsmoke.us/readline/shortcuts
+ // Ctrl-b <- one char
+ // Ctrl-f -> one char
+ // Alt-b <- one word
+ // Alt-f -> one word
+ // Ctrl-a <- start of line
+ // Ctrl-e -> end of line
+ // Ctrl-x-x Toggle start/end
+
+ // Backspace <- delete char
+ // Del -> delete char
+ // Ctrl-u <- delete all
+ // Ctrl-k -> delete all
+ // Alt-d -> delete word
+ // Ctrl-w <- delete word
+
+ // Ctrl-u/- Undo
+ // Ctrl-l clear screen
+
+ // Ctrl-k -> cut all
+ // Alt-d -> cut word
+ // Alt-Backspace <- cut word
+ // Ctrl-y paste last cut
+
+ /**
+ * Basic readline-style navigation, using all the obscure alphabet hotkeys
+ * rather than using arrows
+ */
+ lazy val navFilter = Filter.merge(
+ Case(Ctrl('b'))((b, c, m) => (b, c - 1)), // <- one char
+ Case(Ctrl('f'))((b, c, m) => (b, c + 1)), // -> one char
+ Case(Alt + "b")((b, c, m) => GUILikeFilters.wordLeft(b, c)), // <- one word
+ Case(Alt + "B")((b, c, m) => GUILikeFilters.wordLeft(b, c)), // <- one word
+ Case(LinuxCtrlLeft)((b, c, m) => GUILikeFilters.wordLeft(b, c)), // <- one word
+ Case(Alt + "f")((b, c, m) => GUILikeFilters.wordRight(b, c)), // -> one word
+ Case(Alt + "F")((b, c, m) => GUILikeFilters.wordRight(b, c)), // -> one word
+ Case(LinuxCtrlRight)((b, c, m) => GUILikeFilters.wordRight(b, c)), // -> one word
+ Case(Home)((b, c, m) => BasicFilters.moveStart(b, c, m.width)), // <- one line
+ Case(HomeScreen)((b, c, m) => BasicFilters.moveStart(b, c, m.width)), // <- one line
+ Case(Ctrl('a'))((b, c, m) => BasicFilters.moveStart(b, c, m.width)),
+ Case(End)((b, c, m) => BasicFilters.moveEnd(b, c, m.width)), // -> one line
+ Case(EndScreen)((b, c, m) => BasicFilters.moveEnd(b, c, m.width)), // -> one line
+ Case(Ctrl('e'))((b, c, m) => BasicFilters.moveEnd(b, c, m.width)),
+ Case(Alt + "t")((b, c, m) => transposeWord(b, c)),
+ Case(Alt + "T")((b, c, m) => transposeWord(b, c)),
+ Case(Ctrl('t'))((b, c, m) => transposeLetter(b, c))
+ )
+
+ def transposeLetter(b: Vector[Char], c: Int) =
+ // If there's no letter before the cursor to transpose, don't do anything
+ if (c == 0) (b, c)
+ else if (c == b.length) (b.dropRight(2) ++ b.takeRight(2).reverse, c)
+ else (b.patch(c-1, b.slice(c-1, c+1).reverse, 2), c + 1)
+
+ def transposeWord(b: Vector[Char], c: Int) = {
+ val leftStart0 = GUILikeFilters.consumeWord(b, c - 1, -1, 1)
+ val leftEnd0 = GUILikeFilters.consumeWord(b, leftStart0, 1, 0)
+ val rightEnd = GUILikeFilters.consumeWord(b, c, 1, 0)
+ val rightStart = GUILikeFilters.consumeWord(b, rightEnd - 1, -1, 1)
+
+ // If no word to the left to transpose, do nothing
+ if (leftStart0 == 0 && rightStart == 0) (b, c)
+ else {
+ val (leftStart, leftEnd) =
+ // If there is no word to the *right* to transpose,
+ // transpose the two words to the left instead
+ if (leftEnd0 == b.length && rightEnd == b.length) {
+ val leftStart = GUILikeFilters.consumeWord(b, leftStart0 - 1, -1, 1)
+ val leftEnd = GUILikeFilters.consumeWord(b, leftStart, 1, 0)
+ (leftStart, leftEnd)
+ }else (leftStart0, leftEnd0)
+
+ val newB =
+ b.slice(0, leftStart) ++
+ b.slice(rightStart, rightEnd) ++
+ b.slice(leftEnd, rightStart) ++
+ b.slice(leftStart, leftEnd) ++
+ b.slice(rightEnd, b.length)
+
+ (newB, rightEnd)
+ }
+ }
+
+ /**
+ * All the cut-pasting logic, though for many people they simply
+ * use these shortcuts for deleting and don't use paste much at all.
+ */
+ case class CutPasteFilter() extends DelegateFilter {
+ def identifier = "CutPasteFilter"
+ var accumulating = false
+ var currentCut = Vector.empty[Char]
+ def prepend(b: Vector[Char]) = {
+ if (accumulating) currentCut = b ++ currentCut
+ else currentCut = b
+ accumulating = true
+ }
+ def append(b: Vector[Char]) = {
+ if (accumulating) currentCut = currentCut ++ b
+ else currentCut = b
+ accumulating = true
+ }
+ def cutCharLeft(b: Vector[Char], c: Int) = {
+ /* Do not edit current cut. Zsh(zle) & Bash(readline) do not edit the yank ring for Ctrl-h */
+ (b patch(from = c - 1, patch = Nil, replaced = 1), c - 1)
+ }
+
+ def cutAllLeft(b: Vector[Char], c: Int) = {
+ prepend(b.take(c))
+ (b.drop(c), 0)
+ }
+ def cutAllRight(b: Vector[Char], c: Int) = {
+ append(b.drop(c))
+ (b.take(c), c)
+ }
+
+ def cutWordRight(b: Vector[Char], c: Int) = {
+ val start = GUILikeFilters.consumeWord(b, c, 1, 0)
+ append(b.slice(c, start))
+ (b.take(c) ++ b.drop(start), c)
+ }
+
+ def cutWordLeft(b: Vector[Char], c: Int) = {
+ val start = GUILikeFilters.consumeWord(b, c - 1, -1, 1)
+ prepend(b.slice(start, c))
+ (b.take(start) ++ b.drop(c), start)
+ }
+
+ def paste(b: Vector[Char], c: Int) = {
+ accumulating = false
+ (b.take(c) ++ currentCut ++ b.drop(c), c + currentCut.length)
+ }
+
+ def filter = Filter.merge(
+ Case(Ctrl('u'))((b, c, m) => cutAllLeft(b, c)),
+ Case(Ctrl('k'))((b, c, m) => cutAllRight(b, c)),
+ Case(Alt + "d")((b, c, m) => cutWordRight(b, c)),
+ Case(Ctrl('w'))((b, c, m) => cutWordLeft(b, c)),
+ Case(Alt + "\u007f")((b, c, m) => cutWordLeft(b, c)),
+ // weird hacks to make it run code every time without having to be the one
+ // handling the input; ideally we'd change Filter to be something
+ // other than a PartialFunction, but for now this will do.
+
+ // If some command goes through that's not appending/prepending to the
+ // kill ring, stop appending and allow the next kill to override it
+ Filter.wrap("ReadLineFilterWrap") {_ => accumulating = false; None},
+ Case(Ctrl('h'))((b, c, m) => cutCharLeft(b, c)),
+ Case(Ctrl('y'))((b, c, m) => paste(b, c))
+ )
+ }
+}
diff --git a/src/dotty/tools/dotc/repl/ammonite/filters/UndoFilter.scala b/src/dotty/tools/dotc/repl/ammonite/filters/UndoFilter.scala
new file mode 100644
index 000000000..c265a7a4c
--- /dev/null
+++ b/src/dotty/tools/dotc/repl/ammonite/filters/UndoFilter.scala
@@ -0,0 +1,157 @@
+package dotty.tools
+package dotc
+package repl
+package ammonite
+package terminal
+package filters
+
+import terminal.FilterTools._
+import terminal.LazyList.~:
+import terminal._
+import scala.collection.mutable
+
+/**
+ * A filter that implements "undo" functionality in the ammonite REPL. It
+ * shares the same `Ctrl -` hotkey that the bash undo, but shares behavior
+ * with the undo behavior in desktop text editors:
+ *
+ * - Multiple `delete`s in a row get collapsed
+ * - In addition to edits you can undo cursor movements: undo will bring your
+ * cursor back to location of previous edits before it undoes them
+ * - Provides "redo" functionality under `Alt -`/`Esc -`: un-undo the things
+ * you didn't actually want to undo!
+ *
+ * @param maxUndo: the maximum number of undo-frames that are stored.
+ */
+case class UndoFilter(maxUndo: Int = 25) extends DelegateFilter {
+ def identifier = "UndoFilter"
+ /**
+ * The current stack of states that undo/redo would cycle through.
+ *
+ * Not really the appropriate data structure, since when it reaches
+ * `maxUndo` in length we remove one element from the start whenever we
+ * append one element to the end, which costs `O(n)`. On the other hand,
+ * It also costs `O(n)` to maintain the buffer of previous states, and
+ * so `n` is probably going to be pretty small anyway (tens?) so `O(n)`
+ * is perfectly fine.
+ */
+ val undoBuffer = mutable.Buffer[(Vector[Char], Int)](Vector[Char]() -> 0)
+
+ /**
+ * The current position in the undoStack that the terminal is currently in.
+ */
+ var undoIndex = 0
+ /**
+ * An enum representing what the user is "currently" doing. Used to
+ * collapse sequential actions into one undo step: e.g. 10 plai
+ * chars typed becomes 1 undo step, or 10 chars deleted becomes one undo
+ * step, but 4 chars typed followed by 3 chars deleted followed by 3 chars
+ * typed gets grouped into 3 different undo steps
+ */
+ var state = UndoState.Default
+ def currentUndo = undoBuffer(undoBuffer.length - undoIndex - 1)
+
+ def undo(b: Vector[Char], c: Int) = {
+ val msg =
+ if (undoIndex >= undoBuffer.length - 1) UndoFilter.cannotUndoMsg
+ else {
+ undoIndex += 1
+ state = UndoState.Default
+ UndoFilter.undoMsg
+ }
+ val (b1, c1) = currentUndo
+ (b1, c1, msg)
+ }
+
+ def redo(b: Vector[Char], c: Int) = {
+ val msg =
+ if (undoIndex <= 0) UndoFilter.cannotRedoMsg
+ else {
+ undoIndex -= 1
+ state = UndoState.Default
+ UndoFilter.redoMsg
+ }
+
+ currentUndo
+ val (b1, c1) = currentUndo
+ (b1, c1, msg)
+ }
+
+ def wrap(bc: (Vector[Char], Int, Ansi.Str), rest: LazyList[Int]) = {
+ val (b, c, msg) = bc
+ TS(rest, b, c, msg)
+ }
+
+ def pushUndos(b: Vector[Char], c: Int) = {
+ val (lastB, lastC) = currentUndo
+ // Since we don't have access to the `typingFilter` in this code, we
+ // instead attempt to reverse-engineer "what happened" to the buffer by
+ // comparing the old one with the new.
+ //
+ // It turns out that it's not that hard to identify the few cases we care
+ // about, since they're all result in either 0 or 1 chars being different
+ // between old and new buffers.
+ val newState =
+ // Nothing changed means nothing changed
+ if (lastC == c && lastB == b) state
+ // if cursor advanced 1, and buffer grew by 1 at the cursor, we're typing
+ else if (lastC + 1 == c && lastB == b.patch(c-1, Nil, 1)) UndoState.Typing
+ // cursor moved left 1, and buffer lost 1 char at that point, we're deleting
+ else if (lastC - 1 == c && lastB.patch(c, Nil, 1) == b) UndoState.Deleting
+ // cursor didn't move, and buffer lost 1 char at that point, we're also deleting
+ else if (lastC == c && lastB.patch(c - 1, Nil, 1) == b) UndoState.Deleting
+ // cursor moved around but buffer didn't change, we're navigating
+ else if (lastC != c && lastB == b) UndoState.Navigating
+ // otherwise, sit in the "Default" state where every change is recorded.
+ else UndoState.Default
+
+ if (state != newState || newState == UndoState.Default && (lastB, lastC) != (b, c)) {
+ // If something changes: either we enter a new `UndoState`, or we're in
+ // the `Default` undo state and the terminal buffer/cursor change, then
+ // truncate the `undoStack` and add a new tuple to the stack that we can
+ // build upon. This means that we lose all ability to re-do actions after
+ // someone starts making edits, which is consistent with most other
+ // editors
+ state = newState
+ undoBuffer.remove(undoBuffer.length - undoIndex, undoIndex)
+ undoIndex = 0
+
+ if (undoBuffer.length == maxUndo) undoBuffer.remove(0)
+
+ undoBuffer.append(b -> c)
+ } else if (undoIndex == 0 && (b, c) != undoBuffer(undoBuffer.length - 1)) {
+ undoBuffer(undoBuffer.length - 1) = (b, c)
+ }
+
+ state = newState
+ }
+
+ def filter = Filter.merge(
+ Filter.wrap("undoFilterWrapped") {
+ case TS(q ~: rest, b, c, _) =>
+ pushUndos(b, c)
+ None
+ },
+ Filter("undoFilter") {
+ case TS(31 ~: rest, b, c, _) => wrap(undo(b, c), rest)
+ case TS(27 ~: 114 ~: rest, b, c, _) => wrap(undo(b, c), rest)
+ case TS(27 ~: 45 ~: rest, b, c, _) => wrap(redo(b, c), rest)
+ }
+ )
+}
+
+
+sealed class UndoState(override val toString: String)
+object UndoState {
+ val Default = new UndoState("Default")
+ val Typing = new UndoState("Typing")
+ val Deleting = new UndoState("Deleting")
+ val Navigating = new UndoState("Navigating")
+}
+
+object UndoFilter {
+ val undoMsg = Ansi.Color.Blue(" ...undoing last action, `Alt -` or `Esc -` to redo")
+ val cannotUndoMsg = Ansi.Color.Blue(" ...no more actions to undo")
+ val redoMsg = Ansi.Color.Blue(" ...redoing last action")
+ val cannotRedoMsg = Ansi.Color.Blue(" ...no more actions to redo")
+}
diff --git a/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
index f35293d8d..da3df6984 100644
--- a/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
+++ b/src/dotty/tools/dotc/reporting/ConsoleReporter.scala
@@ -4,53 +4,142 @@ package reporting
import scala.collection.mutable
import util.SourcePosition
-import core.Contexts._
+import core.Contexts._, core.Decorators._
import Reporter._
import java.io.{ BufferedReader, IOException, PrintWriter }
import scala.reflect.internal.util._
+import printing.SyntaxHighlighting._
+import printing.Highlighting._
+import diagnostic.{ Message, MessageContainer, NoExplanation }
+import diagnostic.messages._
/**
- * This class implements a Reporter that displays messages on a text
- * console.
- */
+ * This class implements a Reporter that displays messages on a text console
+ */
class ConsoleReporter(
- reader: BufferedReader = Console.in,
- writer: PrintWriter = new PrintWriter(Console.err, true))
- extends Reporter with UniqueMessagePositions with HideNonSensicalMessages {
+ reader: BufferedReader = Console.in,
+ writer: PrintWriter = new PrintWriter(Console.err, true)
+) extends Reporter with UniqueMessagePositions with HideNonSensicalMessages {
+
+ import MessageContainer._
/** maximal number of error messages to be printed */
protected def ErrorLimit = 100
- def printSourceLine(pos: SourcePosition) =
- printMessage(pos.lineContent.stripLineEnd)
-
- def printColumnMarker(pos: SourcePosition) =
- if (pos.exists) { printMessage(" " * pos.column + "^") }
-
/** Prints the message. */
def printMessage(msg: String): Unit = { writer.print(msg + "\n"); writer.flush() }
+ def stripColor(str: String): String =
+ str.replaceAll("\u001B\\[[;\\d]*m", "")
+
+ def sourceLines(pos: SourcePosition)(implicit ctx: Context): (List[String], List[String], Int) = {
+ var maxLen = Int.MinValue
+ def render(xs: List[Int]) =
+ xs.map(pos.source.offsetToLine(_))
+ .map { lineNbr =>
+ val prefix = s"${lineNbr + 1} |"
+ maxLen = math.max(maxLen, prefix.length)
+ (prefix, pos.lineContent(lineNbr).stripLineEnd)
+ }
+ .map { case (prefix, line) =>
+ val lnum = Red(" " * math.max(0, maxLen - prefix.length) + prefix)
+ hl"$lnum$line"
+ }
+
+ val (before, after) = pos.beforeAndAfterPoint
+ (render(before), render(after), maxLen)
+ }
+
+ def columnMarker(pos: SourcePosition, offset: Int)(implicit ctx: Context) = {
+ val prefix = " " * (offset - 1)
+ val whitespace = " " * pos.startColumn
+ val carets = Red {
+ if (pos.startLine == pos.endLine)
+ "^" * math.max(1, pos.endColumn - pos.startColumn)
+ else "^"
+ }
+
+ s"$prefix|$whitespace${carets.show}"
+ }
+
+ def errorMsg(pos: SourcePosition, msg: String, offset: Int)(implicit ctx: Context) = {
+ val leastWhitespace = msg.lines.foldLeft(Int.MaxValue) { (minPad, line) =>
+ val lineLength = stripColor(line).length
+ val padding =
+ math.min(math.max(0, ctx.settings.pageWidth.value - offset - lineLength), offset + pos.startColumn)
+
+ if (padding < minPad) padding
+ else minPad
+ }
+
+ msg.lines
+ .map { line => " " * (offset - 1) + "|" + (" " * (leastWhitespace - offset)) + line }
+ .mkString(sys.props("line.separator"))
+ }
+
+ def posStr(pos: SourcePosition, diagnosticLevel: String, message: Message)(implicit ctx: Context) =
+ if (pos.exists) Blue({
+ val file = pos.source.file.toString
+ val errId =
+ if (message.errorId != NoExplanation.ID)
+ s"[E${"0" * (3 - message.errorId.toString.length) + message.errorId}] "
+ else ""
+ val kind =
+ if (message.kind == "") diagnosticLevel
+ else s"${message.kind} $diagnosticLevel"
+ val prefix = s"-- ${errId}${kind}: $file "
+
+ prefix +
+ ("-" * math.max(ctx.settings.pageWidth.value - stripColor(prefix).length, 0))
+ }).show else ""
+
/** Prints the message with the given position indication. */
- def printMessageAndPos(msg: String, pos: SourcePosition)(implicit ctx: Context): Unit = {
- val posStr = if (pos.exists) s"$pos: " else ""
- printMessage(posStr + msg)
+ def printMessageAndPos(msg: Message, pos: SourcePosition, diagnosticLevel: String)(implicit ctx: Context): Boolean = {
+ printMessage(posStr(pos, diagnosticLevel, msg))
if (pos.exists) {
- printSourceLine(pos)
- printColumnMarker(pos)
- }
+ val (srcBefore, srcAfter, offset) = sourceLines(pos)
+ val marker = columnMarker(pos, offset)
+ val err = errorMsg(pos, msg.msg, offset)
+
+ printMessage((srcBefore ::: marker :: err :: srcAfter).mkString("\n"))
+ } else printMessage(msg.msg)
+ true
+ }
+
+ def printExplanation(m: Message)(implicit ctx: Context): Unit = {
+ printMessage(hl"""|
+ |${Blue("Explanation")}
+ |${Blue("===========")}""".stripMargin)
+ printMessage(m.explanation)
+ if (m.explanation.lastOption != Some('\n')) printMessage("")
}
- override def doReport(d: Diagnostic)(implicit ctx: Context): Unit = d match {
- case d: Error =>
- printMessageAndPos(s"error: ${d.message}", d.pos)
- if (ctx.settings.prompt.value) displayPrompt()
- case d: ConditionalWarning if !d.enablingOption.value =>
- case d: MigrationWarning =>
- printMessageAndPos(s"migration warning: ${d.message}", d.pos)
- case d: Warning =>
- printMessageAndPos(s"warning: ${d.message}", d.pos)
- case _ =>
- printMessageAndPos(d.message, d.pos)
+ override def doReport(m: MessageContainer)(implicit ctx: Context): Unit = {
+ val didPrint = m match {
+ case m: Error =>
+ val didPrint = printMessageAndPos(m.contained, m.pos, "Error")
+ if (ctx.settings.prompt.value) displayPrompt()
+ didPrint
+ case m: ConditionalWarning if !m.enablingOption.value =>
+ false
+ case m: FeatureWarning =>
+ printMessageAndPos(m.contained, m.pos, "Feature Warning")
+ case m: DeprecationWarning =>
+ printMessageAndPos(m.contained, m.pos, "Deprecation Warning")
+ case m: UncheckedWarning =>
+ printMessageAndPos(m.contained, m.pos, "Unchecked Warning")
+ case m: MigrationWarning =>
+ printMessageAndPos(m.contained, m.pos, "Migration Warning")
+ case m: Warning =>
+ printMessageAndPos(m.contained, m.pos, "Warning")
+ case m: Info =>
+ printMessageAndPos(m.contained, m.pos, "Info")
+ }
+
+ if (didPrint && ctx.shouldExplain(m))
+ printExplanation(m.contained)
+ else if (didPrint && m.contained.explanation.nonEmpty)
+ printMessage("\nlonger explanation available when compiling with `-explain`")
}
def displayPrompt(): Unit = {
@@ -70,3 +159,4 @@ class ConsoleReporter(
override def flush()(implicit ctx: Context): Unit = { writer.flush() }
}
+
diff --git a/src/dotty/tools/dotc/reporting/Diagnostic.scala b/src/dotty/tools/dotc/reporting/Diagnostic.scala
deleted file mode 100644
index bcf55e993..000000000
--- a/src/dotty/tools/dotc/reporting/Diagnostic.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-package dotty.tools
-package dotc
-package reporting
-
-import util.SourcePosition
-
-import java.util.Optional
-
-object Diagnostic {
- val nonSensicalStartTag = "<nonsensical>"
- val nonSensicalEndTag = "</nonsensical>"
-}
-
-class Diagnostic(msgFn: => String, val pos: SourcePosition, val level: Int)
- extends Exception with interfaces.Diagnostic {
- import Diagnostic._
- private var myMsg: String = null
- private var myIsNonSensical: Boolean = false
-
- override def position: Optional[interfaces.SourcePosition] =
- if (pos.exists && pos.source.exists) Optional.of(pos) else Optional.empty()
-
- /** The message to report */
- def message: String = {
- if (myMsg == null) {
- myMsg = msgFn
- if (myMsg.contains(nonSensicalStartTag)) {
- myIsNonSensical = true
- // myMsg might be composed of several d"..." invocations -> nested nonsensical tags possible
- myMsg = myMsg.replaceAllLiterally(nonSensicalStartTag, "").replaceAllLiterally(nonSensicalEndTag, "")
- }
- }
- myMsg
- }
-
- /** A message is non-sensical if it contains references to <nonsensical> tags.
- * Such tags are inserted by the error diagnostic framework if a message
- * contains references to internally generated error types. Normally we
- * want to suppress error messages referring to types like this because
- * they look weird and are normally follow-up errors to something that
- * was diagnosed before.
- */
- def isNonSensical = { message; myIsNonSensical }
-
- override def toString = s"$getClass at $pos: $message"
- override def getMessage() = message
-}
diff --git a/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala b/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala
index a325fe9f7..ba1ab9b33 100644
--- a/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala
+++ b/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala
@@ -3,6 +3,7 @@ package dotc
package reporting
import core.Contexts.Context
+import diagnostic.MessageContainer
/**
* This trait implements `isHidden` so that we avoid reporting non-sensical messages.
@@ -11,9 +12,9 @@ trait HideNonSensicalMessages extends Reporter {
/** Hides non-sensical messages, unless we haven't reported any error yet or
* `-Yshow-suppressed-errors` is set.
*/
- override def isHidden(d: Diagnostic)(implicit ctx: Context): Boolean =
- super.isHidden(d) || {
- d.isNonSensical &&
+ override def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean =
+ super.isHidden(m) || {
+ m.isNonSensical &&
hasErrors && // if there are no errors yet, report even if diagnostic is non-sensical
!ctx.settings.YshowSuppressedErrors.value
}
diff --git a/src/dotty/tools/dotc/reporting/Reporter.scala b/src/dotty/tools/dotc/reporting/Reporter.scala
index 8236f93ef..b38334412 100644
--- a/src/dotty/tools/dotc/reporting/Reporter.scala
+++ b/src/dotty/tools/dotc/reporting/Reporter.scala
@@ -4,43 +4,24 @@ package reporting
import core.Contexts._
import util.{SourcePosition, NoSourcePosition}
-import util.{SourceFile, NoSource}
import core.Decorators.PhaseListDecorator
import collection.mutable
-import config.Settings.Setting
import config.Printers
import java.lang.System.currentTimeMillis
-import typer.Mode
-import interfaces.Diagnostic.{ERROR, WARNING, INFO}
+import core.Mode
+import dotty.tools.dotc.core.Symbols.Symbol
+import diagnostic.messages._
+import diagnostic._
+import Message._
object Reporter {
- class Error(msgFn: => String, pos: SourcePosition) extends Diagnostic(msgFn, pos, ERROR)
- class Warning(msgFn: => String, pos: SourcePosition) extends Diagnostic(msgFn, pos, WARNING)
- class Info(msgFn: => String, pos: SourcePosition) extends Diagnostic(msgFn, pos, INFO)
-
- abstract class ConditionalWarning(msgFn: => String, pos: SourcePosition) extends Warning(msgFn, pos) {
- def enablingOption(implicit ctx: Context): Setting[Boolean]
- }
- class FeatureWarning(msgFn: => String, pos: SourcePosition) extends ConditionalWarning(msgFn, pos) {
- def enablingOption(implicit ctx: Context) = ctx.settings.feature
- }
- class UncheckedWarning(msgFn: => String, pos: SourcePosition) extends ConditionalWarning(msgFn, pos) {
- def enablingOption(implicit ctx: Context) = ctx.settings.unchecked
- }
- class DeprecationWarning(msgFn: => String, pos: SourcePosition) extends ConditionalWarning(msgFn, pos) {
- def enablingOption(implicit ctx: Context) = ctx.settings.deprecation
- }
- class MigrationWarning(msgFn: => String, pos: SourcePosition) extends ConditionalWarning(msgFn, pos) {
- def enablingOption(implicit ctx: Context) = ctx.settings.migration
- }
-
/** Convert a SimpleReporter into a real Reporter */
def fromSimpleReporter(simple: interfaces.SimpleReporter): Reporter =
new Reporter with UniqueMessagePositions with HideNonSensicalMessages {
- override def doReport(d: Diagnostic)(implicit ctx: Context): Unit = d match {
- case d: ConditionalWarning if !d.enablingOption.value =>
+ override def doReport(m: MessageContainer)(implicit ctx: Context): Unit = m match {
+ case m: ConditionalWarning if !m.enablingOption.value =>
case _ =>
- simple.report(d)
+ simple.report(m)
}
}
}
@@ -51,43 +32,64 @@ trait Reporting { this: Context =>
/** For sending messages that are printed only if -verbose is set */
def inform(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
- if (this.settings.verbose.value) this.println(msg, pos)
+ if (this.settings.verbose.value) this.echo(msg, pos)
- def println(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
+ def echo(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
reporter.report(new Info(msg, pos))
- def deprecationWarning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
- reporter.report(new DeprecationWarning(msg, pos))
-
- def migrationWarning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
- reporter.report(new MigrationWarning(msg, pos))
-
- def uncheckedWarning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
- reporter.report(new UncheckedWarning(msg, pos))
+ def deprecationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(msg.deprecationWarning(pos))
+
+ def migrationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(msg.migrationWarning(pos))
+
+ def uncheckedWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(msg.uncheckedWarning(pos))
+
+ def featureWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(msg.featureWarning(pos))
+
+ def featureWarning(feature: String, featureDescription: String, isScala2Feature: Boolean,
+ featureUseSite: Symbol, required: Boolean, pos: SourcePosition): Unit = {
+ val req = if (required) "needs to" else "should"
+ val prefix = if (isScala2Feature) "scala." else "dotty."
+ val fqname = prefix + "language." + feature
+
+ val explain = {
+ if (reporter.isReportedFeatureUseSite(featureUseSite)) ""
+ else {
+ reporter.reportNewFeatureUseSite(featureUseSite)
+ s"""
+ |This can be achieved by adding the import clause 'import $fqname'
+ |or by setting the compiler option -language:$feature.
+ |See the Scala docs for value $fqname for a discussion
+ |why the feature $req be explicitly enabled."""
+ }
+ }
- def featureWarning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
- reporter.report(new FeatureWarning(msg, pos))
+ val msg = s"$featureDescription $req be enabled\nby making the implicit value $fqname visible.$explain"
+ if (required) error(msg, pos)
+ else reporter.report(new FeatureWarning(msg, pos))
+ }
- def warning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
- reporter.report(new Warning(msg, pos))
+ def warning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(msg.warning(pos))
- def strictWarning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
+ def strictWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
if (this.settings.strict.value) error(msg, pos)
- else warning(msg + "\n(This would be an error under strict mode)", pos)
+ else warning(msg.mapMsg(_ + "\n(This would be an error under strict mode)"), pos)
- def error(msg: => String, pos: SourcePosition = NoSourcePosition): Unit = {
- // println("*** ERROR: " + msg) // !!! DEBUG
- reporter.report(new Error(msg, pos))
- }
+ def error(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ reporter.report(msg.error(pos))
- def errorOrMigrationWarning(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
+ def errorOrMigrationWarning(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
if (ctx.scala2Mode) migrationWarning(msg, pos) else error(msg, pos)
- def restrictionError(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
- error(s"Implementation restriction: $msg", pos)
+ def restrictionError(msg: => Message, pos: SourcePosition = NoSourcePosition): Unit =
+ error(msg.mapMsg(m => s"Implementation restriction: $m"), pos)
- def incompleteInputError(msg: String, pos: SourcePosition = NoSourcePosition)(implicit ctx: Context): Unit =
- reporter.incomplete(new Error(msg, pos))(ctx)
+ def incompleteInputError(msg: Message, pos: SourcePosition = NoSourcePosition)(implicit ctx: Context): Unit =
+ reporter.incomplete(msg.error(pos))(ctx)
/** Log msg if settings.log contains the current phase.
* See [[config.CompilerCommand#explainAdvanced]] for the exact meaning of
@@ -95,7 +97,7 @@ trait Reporting { this: Context =>
*/
def log(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
if (this.settings.log.value.containsPhase(phase))
- this.println(s"[log ${ctx.phasesStack.reverse.mkString(" -> ")}] $msg", pos)
+ echo(s"[log ${ctx.phasesStack.reverse.mkString(" -> ")}] $msg", pos)
def debuglog(msg: => String): Unit =
if (ctx.debug) log(msg)
@@ -116,28 +118,32 @@ trait Reporting { this: Context =>
def debugwarn(msg: => String, pos: SourcePosition = NoSourcePosition): Unit =
if (this.settings.debug.value) warning(msg, pos)
- def debugTraceIndented[T](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T =
+ @inline
+ def debugTraceIndented[TD](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => TD): TD =
conditionalTraceIndented(this.settings.debugTrace.value, question, printer, show)(op)
- def conditionalTraceIndented[T](cond: Boolean, question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T =
- if (cond) traceIndented(question, printer, show)(op)
+ @inline
+ def conditionalTraceIndented[TC](cond: Boolean, question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => TC): TC =
+ if (cond) traceIndented[TC](question, printer, show)(op)
else op
- def traceIndented[T](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T = {
+ @inline
+ def traceIndented[T](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T =
+ if (printer eq config.Printers.noPrinter) op
+ else doTraceIndented[T](question, printer, show)(op)
+
+ private def doTraceIndented[T](question: => String, printer: Printers.Printer = Printers.default, show: Boolean = false)(op: => T): T = {
def resStr(res: Any): String = res match {
case res: printing.Showable if show => res.show
case _ => String.valueOf(res)
}
- if (printer eq config.Printers.noPrinter) op
- else {
- // Avoid evaluating question multiple time, since each evaluation
- // may cause some extra logging output.
- lazy val q: String = question
- traceIndented[T](s"==> $q?", (res: Any) => s"<== $q = ${resStr(res)}")(op)
- }
+ // Avoid evaluating question multiple time, since each evaluation
+ // may cause some extra logging output.
+ lazy val q: String = question
+ doTraceIndented[T](s"==> $q?", (res: Any) => s"<== $q = ${resStr(res)}")(op)
}
- def traceIndented[T](leading: => String, trailing: Any => String)(op: => T): T =
+ def doTraceIndented[T](leading: => String, trailing: Any => String)(op: => T): T =
if (ctx.mode.is(Mode.Printing)) op
else {
var finalized = false
@@ -170,9 +176,9 @@ trait Reporting { this: Context =>
abstract class Reporter extends interfaces.ReporterResult {
/** Report a diagnostic */
- def doReport(d: Diagnostic)(implicit ctx: Context): Unit
+ def doReport(d: MessageContainer)(implicit ctx: Context): Unit
- /** Whether very long lines can be truncated. This exists so important
+ /** Whether very long lines can be truncated. This exists so important
* debugging information (like printing the classpath) is not rendered
* invisible due to the max message length.
*/
@@ -185,7 +191,7 @@ abstract class Reporter extends interfaces.ReporterResult {
finally _truncationOK = saved
}
- type ErrorHandler = Diagnostic => Context => Unit
+ type ErrorHandler = MessageContainer => Context => Unit
private var incompleteHandler: ErrorHandler = d => c => report(d)(c)
def withIncompleteHandler[T](handler: ErrorHandler)(op: => T): T = {
val saved = incompleteHandler
@@ -206,11 +212,15 @@ abstract class Reporter extends interfaces.ReporterResult {
*/
def errorsReported = hasErrors
+ private[this] var reportedFeaturesUseSites = Set[Symbol]()
+ def isReportedFeatureUseSite(featureTrait: Symbol): Boolean = reportedFeaturesUseSites.contains(featureTrait)
+ def reportNewFeatureUseSite(featureTrait: Symbol): Unit = reportedFeaturesUseSites += featureTrait
+
val unreportedWarnings = new mutable.HashMap[String, Int] {
override def default(key: String) = 0
}
- def report(d: Diagnostic)(implicit ctx: Context): Unit =
+ def report(d: MessageContainer)(implicit ctx: Context): Unit =
if (!isHidden(d)) {
doReport(d)(ctx.addMode(Mode.Printing))
d match {
@@ -224,10 +234,9 @@ abstract class Reporter extends interfaces.ReporterResult {
}
}
- def incomplete(d: Diagnostic)(implicit ctx: Context): Unit =
+ def incomplete(d: MessageContainer)(implicit ctx: Context): Unit =
incompleteHandler(d)(ctx)
-
/** Summary of warnings and errors */
def summary: String = {
val b = new mutable.ListBuffer[String]
@@ -243,12 +252,11 @@ abstract class Reporter extends interfaces.ReporterResult {
/** Print the summary of warnings and errors */
def printSummary(implicit ctx: Context): Unit = {
val s = summary
- if (s != "")
- ctx.println(s)
+ if (s != "") ctx.echo(s)
}
/** Returns a string meaning "n elements". */
- private def countString(n: Int, elements: String): String = n match {
+ protected def countString(n: Int, elements: String): String = n match {
case 0 => "no " + elements + "s"
case 1 => "one " + elements
case 2 => "two " + elements + "s"
@@ -258,7 +266,7 @@ abstract class Reporter extends interfaces.ReporterResult {
}
/** Should this diagnostic not be reported at all? */
- def isHidden(d: Diagnostic)(implicit ctx: Context): Boolean = ctx.mode.is(Mode.Printing)
+ def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean = ctx.mode.is(Mode.Printing)
/** Does this reporter contain not yet reported errors or warnings? */
def hasPending: Boolean = false
diff --git a/src/dotty/tools/dotc/reporting/StoreReporter.scala b/src/dotty/tools/dotc/reporting/StoreReporter.scala
index 954bff88e..e85017ed2 100644
--- a/src/dotty/tools/dotc/reporting/StoreReporter.scala
+++ b/src/dotty/tools/dotc/reporting/StoreReporter.scala
@@ -4,26 +4,27 @@ package reporting
import core.Contexts.Context
import collection.mutable
-import Reporter.{Error, Warning}
-import config.Printers._
+import config.Printers.typr
+import diagnostic.MessageContainer
+import diagnostic.messages._
/**
* This class implements a Reporter that stores all messages
*/
class StoreReporter(outer: Reporter) extends Reporter {
- private var infos: mutable.ListBuffer[Diagnostic] = null
+ private var infos: mutable.ListBuffer[MessageContainer] = null
- def doReport(d: Diagnostic)(implicit ctx: Context): Unit = {
- typr.println(s">>>> StoredError: ${d.message}") // !!! DEBUG
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit = {
+ typr.println(s">>>> StoredError: ${m.message}") // !!! DEBUG
if (infos == null) infos = new mutable.ListBuffer
- infos += d
+ infos += m
}
override def hasPending: Boolean = infos != null && {
infos exists {
- case d: Error => true
- case d: Warning => true
+ case _: Error => true
+ case _: Warning => true
case _ => false
}
}
diff --git a/src/dotty/tools/dotc/reporting/ThrowingReporter.scala b/src/dotty/tools/dotc/reporting/ThrowingReporter.scala
index 026453036..d8e03ab66 100644
--- a/src/dotty/tools/dotc/reporting/ThrowingReporter.scala
+++ b/src/dotty/tools/dotc/reporting/ThrowingReporter.scala
@@ -4,6 +4,8 @@ package reporting
import core.Contexts.Context
import collection.mutable
+import diagnostic.MessageContainer
+import diagnostic.messages.Error
import Reporter._
/**
@@ -11,8 +13,8 @@ import Reporter._
* info to the underlying reporter.
*/
class ThrowingReporter(reportInfo: Reporter) extends Reporter {
- def doReport(d: Diagnostic)(implicit ctx: Context): Unit = d match {
- case _: Error => throw d
- case _ => reportInfo.doReport(d)
+ def doReport(m: MessageContainer)(implicit ctx: Context): Unit = m match {
+ case _: Error => throw m
+ case _ => reportInfo.doReport(m)
}
}
diff --git a/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
index 32554e6b6..6fd971c2a 100644
--- a/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
+++ b/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala
@@ -5,11 +5,10 @@ package reporting
import scala.collection.mutable
import util.{SourcePosition, SourceFile}
import core.Contexts.Context
+import diagnostic.MessageContainer
-/**
- * This trait implements `isHidden` so that multiple messages per position
- * are suppressed, unless they are of increasing severity.
- */
+/** This trait implements `isHidden` so that multiple messages per position
+ * are suppressed, unless they are of increasing severity. */
trait UniqueMessagePositions extends Reporter {
private val positions = new mutable.HashMap[(SourceFile, Int), Int]
@@ -17,13 +16,17 @@ trait UniqueMessagePositions extends Reporter {
/** Logs a position and returns true if it was already logged.
* @note Two positions are considered identical for logging if they have the same point.
*/
- override def isHidden(d: Diagnostic)(implicit ctx: Context): Boolean =
- super.isHidden(d) || {
- d.pos.exists && {
- positions get (ctx.source, d.pos.point) match {
- case Some(level) if level >= d.level => true
- case _ => positions((ctx.source, d.pos.point)) = d.level; false
+ override def isHidden(m: MessageContainer)(implicit ctx: Context): Boolean =
+ super.isHidden(m) || {
+ m.pos.exists && {
+ var shouldHide = false
+ for (pos <- m.pos.start to m.pos.end) {
+ positions get (ctx.source, pos) match {
+ case Some(level) if level >= m.level => shouldHide = true
+ case _ => positions((ctx.source, pos)) = m.level
+ }
}
+ shouldHide
}
}
}
diff --git a/src/dotty/tools/dotc/reporting/diagnostic/Message.scala b/src/dotty/tools/dotc/reporting/diagnostic/Message.scala
new file mode 100644
index 000000000..8b1f65673
--- /dev/null
+++ b/src/dotty/tools/dotc/reporting/diagnostic/Message.scala
@@ -0,0 +1,106 @@
+package dotty.tools
+package dotc
+package reporting
+package diagnostic
+
+import util.SourcePosition
+import core.Contexts.Context
+
+object Message {
+ /** This implicit conversion provides a fallback for error messages that have
+ * not yet been ported to the new scheme. Comment out this `implicit def` to
+ * see where old errors still exist
+ */
+ implicit def toNoExplanation(str: String): Message =
+ new NoExplanation(str)
+}
+
+/** A `Message` contains all semantic information necessary to easily
+ * comprehend what caused the message to be logged. Each message can be turned
+ * into a `MessageContainer` which contains the log level and can later be
+ * consumed by a subclass of `Reporter`.
+ *
+ * @param errorId a unique number identifying the message, this will later be
+ * used to reference documentation online
+ */
+abstract class Message(val errorId: Int) { self =>
+ import messages._
+
+ /** The `msg` contains the diagnostic message e.g:
+ *
+ * > expected: String
+ * > found: Int
+ *
+ * This message wil be placed underneath the position given by the enclosing
+ * `MessageContainer`
+ */
+ def msg: String
+
+ /** The kind of the error message is something like "Syntax" or "Type
+ * Mismatch"
+ */
+ def kind: String
+
+ /** The explanation should provide a detailed description of why the error
+ * occurred and use examples from the user's own code to illustrate how to
+ * avoid these errors.
+ */
+ def explanation: String
+
+ /** It is possible to map `msg` to add details, this is at the loss of
+ * precision since the type of the resulting `Message` won't be original
+ * extending class
+ *
+ * @return a `Message` with the mapped message
+ */
+ def mapMsg(f: String => String) = new Message(errorId) {
+ val msg = f(self.msg)
+ val kind = self.kind
+ val explanation = self.explanation
+ }
+
+ /** Enclose this message in an `Error` container */
+ def error(pos: SourcePosition) =
+ new Error(self, pos)
+
+ /** Enclose this message in an `Warning` container */
+ def warning(pos: SourcePosition) =
+ new Warning(self, pos)
+
+ /** Enclose this message in an `Info` container */
+ def info(pos: SourcePosition) =
+ new Info(self, pos)
+
+ /** Enclose this message in an `FeatureWarning` container */
+ def featureWarning(pos: SourcePosition) =
+ new FeatureWarning(self, pos)
+
+ /** Enclose this message in an `UncheckedWarning` container */
+ def uncheckedWarning(pos: SourcePosition) =
+ new UncheckedWarning(self, pos)
+
+ /** Enclose this message in an `DeprecationWarning` container */
+ def deprecationWarning(pos: SourcePosition) =
+ new DeprecationWarning(self, pos)
+
+ /** Enclose this message in an `MigrationWarning` container */
+ def migrationWarning(pos: SourcePosition) =
+ new MigrationWarning(self, pos)
+}
+
+/** The fallback `Message` containing no explanation and having no `kind` */
+class NoExplanation(val msg: String) extends Message(NoExplanation.ID) {
+ val explanation = ""
+ val kind = ""
+}
+
+/** The extractor for `NoExplanation` can be used to check whether any error
+ * lacks an explanation
+ */
+object NoExplanation {
+ final val ID = -1
+
+ def unapply(m: Message): Option[Message] =
+ if (m.explanation == "") Some(m)
+ else None
+}
diff --git a/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala b/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala
new file mode 100644
index 000000000..7fd50bfdc
--- /dev/null
+++ b/src/dotty/tools/dotc/reporting/diagnostic/MessageContainer.scala
@@ -0,0 +1,74 @@
+package dotty.tools
+package dotc
+package reporting
+package diagnostic
+
+import util.SourcePosition
+import core.Contexts.Context
+
+import java.util.Optional
+
+object MessageContainer {
+ val nonSensicalStartTag = "<nonsensical>"
+ val nonSensicalEndTag = "</nonsensical>"
+
+ implicit class MessageContext(val c: Context) extends AnyVal {
+ def shouldExplain(cont: MessageContainer): Boolean = {
+ implicit val ctx: Context = c
+ cont.contained.explanation match {
+ case "" => false
+ case _ => ctx.settings.explain.value
+ }
+ }
+ }
+}
+
+class MessageContainer(
+ msgFn: => Message,
+ val pos: SourcePosition,
+ val level: Int
+) extends Exception with interfaces.Diagnostic {
+ import MessageContainer._
+ private var myMsg: String = null
+ private var myIsNonSensical: Boolean = false
+ private var myContained: Message = null
+
+ override def position: Optional[interfaces.SourcePosition] =
+ if (pos.exists && pos.source.exists) Optional.of(pos) else Optional.empty()
+
+ /** The message to report */
+ def message: String = {
+ if (myMsg == null) {
+ myMsg = contained.msg.replaceAll("\u001B\\[[;\\d]*m", "")
+ if (myMsg.contains(nonSensicalStartTag)) {
+ myIsNonSensical = true
+ // myMsg might be composed of several d"..." invocations -> nested
+ // nonsensical tags possible
+ myMsg =
+ myMsg
+ .replaceAllLiterally(nonSensicalStartTag, "")
+ .replaceAllLiterally(nonSensicalEndTag, "")
+ }
+ }
+ myMsg
+ }
+
+ def contained: Message = {
+ if (myContained == null)
+ myContained = msgFn
+
+ myContained
+ }
+
+ /** A message is non-sensical if it contains references to <nonsensical>
+ * tags. Such tags are inserted by the error diagnostic framework if a
+ * message contains references to internally generated error types. Normally
+ * we want to suppress error messages referring to types like this because
+ * they look weird and are normally follow-up errors to something that was
+ * diagnosed before.
+ */
+ def isNonSensical = { message; myIsNonSensical }
+
+ override def toString = s"$getClass at $pos: ${message}"
+ override def getMessage() = message
+}
diff --git a/src/dotty/tools/dotc/reporting/diagnostic/messages.scala b/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
new file mode 100644
index 000000000..9cfac4801
--- /dev/null
+++ b/src/dotty/tools/dotc/reporting/diagnostic/messages.scala
@@ -0,0 +1,277 @@
+package dotty.tools
+package dotc
+package reporting
+package diagnostic
+
+import dotc.core._
+import Contexts.Context, Decorators._, Symbols._, Names._, Types._
+import util.{SourceFile, NoSource}
+import util.{SourcePosition, NoSourcePosition}
+import config.Settings.Setting
+import interfaces.Diagnostic.{ERROR, WARNING, INFO}
+import printing.SyntaxHighlighting._
+import printing.Formatting
+
+object messages {
+
+ // `MessageContainer`s to be consumed by `Reporter` ---------------------- //
+ class Error(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends MessageContainer(msgFn, pos, ERROR)
+
+ class Warning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends MessageContainer(msgFn, pos, WARNING)
+
+ class Info(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends MessageContainer(msgFn, pos, INFO)
+
+ abstract class ConditionalWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends Warning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context): Setting[Boolean]
+ }
+
+ class FeatureWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends ConditionalWarning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context) = ctx.settings.feature
+ }
+
+ class UncheckedWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends ConditionalWarning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context) = ctx.settings.unchecked
+ }
+
+ class DeprecationWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends ConditionalWarning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context) = ctx.settings.deprecation
+ }
+
+ class MigrationWarning(
+ msgFn: => Message,
+ pos: SourcePosition
+ ) extends ConditionalWarning(msgFn, pos) {
+ def enablingOption(implicit ctx: Context) = ctx.settings.migration
+ }
+
+ /** Messages
+ * ========
+ * The role of messages is to provide the necessary details for a simple to
+ * understand diagnostic event. Each message can be turned into a message
+ * container (one of the above) by calling the appropriate method on them.
+ * For instance:
+ *
+ * ```scala
+ * EmptyCatchBlock(tree).error(pos) // res: Error
+ * EmptyCatchBlock(tree).warning(pos) // res: Warning
+ * ```
+ */
+ import dotc.ast.Trees._
+ import dotc.ast.untpd
+
+ // Syntax Errors ---------------------------------------------------------- //
+ abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: Int)(implicit ctx: Context)
+ extends Message(errNo) {
+ val explanation = {
+ val tryString = tryBody match {
+ case Block(Nil, untpd.EmptyTree) => "{}"
+ case _ => tryBody.show
+ }
+
+ val code1 =
+ s"""|import scala.util.control.NonFatal
+ |
+ |try $tryString catch {
+ | case NonFatal(e) => ???
+ |}""".stripMargin
+
+ val code2 =
+ s"""|try $tryString finally {
+ | // perform your cleanup here!
+ |}""".stripMargin
+
+ hl"""|A ${"try"} expression should be followed by some mechanism to handle any exceptions
+ |thrown. Typically a ${"catch"} expression follows the ${"try"} and pattern matches
+ |on any expected exceptions. For example:
+ |
+ |$code1
+ |
+ |It is also possible to follow a ${"try"} immediately by a ${"finally"} - letting the
+ |exception propagate - but still allowing for some clean up in ${"finally"}:
+ |
+ |$code2
+ |
+ |It is recommended to use the ${"NonFatal"} extractor to catch all exceptions as it
+ |correctly handles transfer functions like ${"return"}.""".stripMargin
+ }
+ }
+
+ case class EmptyCatchBlock(tryBody: untpd.Tree)(implicit ctx: Context)
+ extends EmptyCatchOrFinallyBlock(tryBody, 1) {
+ val kind = "Syntax"
+ val msg =
+ hl"""|The ${"catch"} block does not contain a valid expression, try
+ |adding a case like - `${"case e: Exception =>"}` to the block""".stripMargin
+ }
+
+ case class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(implicit ctx: Context)
+ extends EmptyCatchOrFinallyBlock(tryBody, 2) {
+ val kind = "Syntax"
+ val msg =
+ hl"""|A ${"try"} without ${"catch"} or ${"finally"} is equivalent to putting
+ |its body in a block; no exceptions are handled.""".stripMargin
+ }
+
+ case class DeprecatedWithOperator()(implicit ctx: Context)
+ extends Message(3) {
+ val kind = "Syntax"
+ val msg =
+ hl"""${"with"} as a type operator has been deprecated; use `&' instead"""
+ val explanation =
+ hl"""|Dotty introduces intersection types - `&' types. These replace the
+ |use of the ${"with"} keyword. There are a few differences in
+ |semantics between intersection types and using `${"with"}'.""".stripMargin
+ }
+
+ case class CaseClassMissingParamList(cdef: untpd.TypeDef)(implicit ctx: Context)
+ extends Message(4) {
+ val kind = "Syntax"
+ val msg =
+ hl"""|A ${"case class"} must have at least one parameter list"""
+
+ val explanation =
+ hl"""|${cdef.name} must have at least one parameter list, if you would rather
+ |have a singleton representation of ${cdef.name}, use a "${"case object"}".
+ |Or, add an explicit `()' as a parameter list to ${cdef.name}.""".stripMargin
+ }
+
+
+ // Type Errors ------------------------------------------------------------ //
+ case class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(implicit ctx: Context)
+ extends Message(5) {
+ val kind = "Naming"
+ val msg = em"duplicate pattern variable: `${bind.name}`"
+
+ val explanation = {
+ val pat = tree.pat.show
+ val guard = tree.guard match {
+ case untpd.EmptyTree => ""
+ case guard => s"if ${guard.show}"
+ }
+
+ val body = tree.body match {
+ case Block(Nil, untpd.EmptyTree) => ""
+ case body => s" ${body.show}"
+ }
+
+ val caseDef = s"case $pat$guard => $body"
+
+ hl"""|For each ${"case"} bound variable names have to be unique. In:
+ |
+ |$caseDef
+ |
+ |`${bind.name}` is not unique. Rename one of the bound variables!""".stripMargin
+ }
+ }
+
+ case class MissingIdent(tree: untpd.Ident, treeKind: String, name: String)(implicit ctx: Context)
+ extends Message(6) {
+ val kind = "Missing Identifier"
+ val msg = em"not found: $treeKind$name"
+
+ val explanation = {
+ hl"""|An identifier for `$treeKind$name` is missing. This means that something
+ |has either been misspelt or you're forgetting an import""".stripMargin
+ }
+ }
+
+ case class TypeMismatch(found: Type, expected: Type, whyNoMatch: String = "", implicitFailure: String = "")(implicit ctx: Context)
+ extends Message(7) {
+ val kind = "Type Mismatch"
+ val msg = {
+ val (where, printCtx) = Formatting.disambiguateTypes(found, expected)
+ val (fnd, exp) = Formatting.typeDiff(found, expected)(printCtx)
+ s"""|found: $fnd
+ |required: $exp
+ |
+ |$where""".stripMargin + whyNoMatch + implicitFailure
+ }
+
+ val explanation = ""
+ }
+
+ case class NotAMember(site: Type, name: Name, selected: String)(implicit ctx: Context)
+ extends Message(8) {
+ val kind = "Member Not Found"
+
+ val msg = {
+ import core.Flags._
+ val maxDist = 3
+ val decls = site.decls.flatMap { sym =>
+ if (sym.is(Synthetic | PrivateOrLocal) || sym.isConstructor) Nil
+ else List((sym.name.show, sym))
+ }
+
+ // Calculate Levenshtein distance
+ def distance(n1: Iterable[_], n2: Iterable[_]) =
+ n1.foldLeft(List.range(0, n2.size)) { (prev, x) =>
+ (prev zip prev.tail zip n2).scanLeft(prev.head + 1) {
+ case (h, ((d, v), y)) => math.min(
+ math.min(h + 1, v + 1),
+ if (x == y) d else d + 1
+ )
+ }
+ }.last
+
+ // Count number of wrong characters
+ def incorrectChars(x: (String, Int, Symbol)): (String, Symbol, Int) = {
+ val (currName, _, sym) = x
+ val matching = name.show.zip(currName).foldLeft(0) {
+ case (acc, (x,y)) => if (x != y) acc + 1 else acc
+ }
+ (currName, sym, matching)
+ }
+
+ // Get closest match in `site`
+ val closest =
+ decls
+ .map { case (n, sym) => (n, distance(n, name.show), sym) }
+ .collect { case (n, dist, sym) if dist <= maxDist => (n, dist, sym) }
+ .groupBy(_._2).toList
+ .sortBy(_._1)
+ .headOption.map(_._2).getOrElse(Nil)
+ .map(incorrectChars).toList
+ .sortBy(_._3)
+ .take(1).map { case (n, sym, _) => (n, sym) }
+
+ val siteName = site match {
+ case site: NamedType => site.name.show
+ case site => i"$site"
+ }
+
+ val closeMember = closest match {
+ case (n, sym) :: Nil => hl""" - did you mean `${s"$siteName.$n"}`?"""
+ case Nil => ""
+ case _ => assert(
+ false,
+ "Could not single out one distinct member to match on input with"
+ )
+ }
+
+ ex"$selected `$name` is not a member of $site$closeMember"
+ }
+
+ val explanation = ""
+ }
+}
diff --git a/src/dotty/tools/dotc/rewrite/Rewrites.scala b/src/dotty/tools/dotc/rewrite/Rewrites.scala
index 7ab0e5d59..c42c808fe 100644
--- a/src/dotty/tools/dotc/rewrite/Rewrites.scala
+++ b/src/dotty/tools/dotc/rewrite/Rewrites.scala
@@ -75,7 +75,7 @@ object Rewrites {
*/
def writeBack()(implicit ctx: Context) =
for (rewrites <- ctx.settings.rewrite.value; source <- rewrites.patched.keys) {
- ctx.println(s"[patched file ${source.file.path}]")
+ ctx.echo(s"[patched file ${source.file.path}]")
rewrites.patched(source).writeBack()
}
}
diff --git a/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/src/dotty/tools/dotc/sbt/ExtractAPI.scala
new file mode 100644
index 000000000..bc8528c05
--- /dev/null
+++ b/src/dotty/tools/dotc/sbt/ExtractAPI.scala
@@ -0,0 +1,518 @@
+package dotty.tools.dotc
+package sbt
+
+import ast.{Trees, tpd}
+import core._, core.Decorators._
+import Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._
+import Names._, NameOps._, StdNames._
+import typer.Inliner
+
+import dotty.tools.io.Path
+import java.io.PrintWriter
+
+import scala.collection.mutable
+
+/** This phase sends a representation of the API of classes to sbt via callbacks.
+ *
+ * This is used by sbt for incremental recompilation.
+ *
+ * See the documentation of `ExtractAPICollector`, `ExtractDependencies`,
+ * `ExtractDependenciesCollector` and
+ * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html for more
+ * information on incremental recompilation.
+ *
+ * The following flags affect this phase:
+ * -Yforce-sbt-phases
+ * -Ydump-sbt-inc
+ *
+ * @see ExtractDependencies
+ */
+class ExtractAPI extends Phase {
+ override def phaseName: String = "sbt-api"
+
+ // SuperAccessors need to be part of the API (see the scripted test
+ // `trait-super` for an example where this matters), this is only the case
+ // after `PostTyper` (unlike `ExtractDependencies`, the simplication to trees
+ // done by `PostTyper` do not affect this phase because it only cares about
+ // definitions, and `PostTyper` does not change definitions).
+ override def runsAfter = Set(classOf[transform.PostTyper])
+
+ override def run(implicit ctx: Context): Unit = {
+ val unit = ctx.compilationUnit
+ val dumpInc = ctx.settings.YdumpSbtInc.value
+ val forceRun = dumpInc || ctx.settings.YforceSbtPhases.value
+ if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) {
+ val sourceFile = unit.source.file.file
+ val apiTraverser = new ExtractAPICollector
+ val source = apiTraverser.apiSource(unit.tpdTree)
+
+ if (dumpInc) {
+ // Append to existing file that should have been created by ExtractDependencies
+ val pw = new PrintWriter(Path(sourceFile).changeExtension("inc").toFile
+ .bufferedWriter(append = true), true)
+ try {
+ pw.println(DefaultShowAPI(source))
+ } finally pw.close()
+ }
+
+ if (ctx.sbtCallback != null)
+ ctx.sbtCallback.api(sourceFile, source)
+ }
+ }
+}
+
+/** Extracts full (including private members) API representation out of Symbols and Types.
+ *
+ * The exact representation used for each type is not important: the only thing
+ * that matters is that a binary-incompatible or source-incompatible change to
+ * the API (for example, changing the signature of a method, or adding a parent
+ * to a class) should result in a change to the API representation so that sbt
+ * can recompile files that depend on this API.
+ *
+ * Note that we only records types as they are defined and never "as seen from"
+ * some other prefix because `Types#asSeenFrom` is a complex operation and
+ * doing it for every inherited member would be slow, and because the number
+ * of prefixes can be enormous in some cases:
+ *
+ * class Outer {
+ * type T <: S
+ * type S
+ * class A extends Outer { /*...*/ }
+ * class B extends Outer { /*...*/ }
+ * class C extends Outer { /*...*/ }
+ * class D extends Outer { /*...*/ }
+ * class E extends Outer { /*...*/ }
+ * }
+ *
+ * `S` might be refined in an arbitrary way inside `A` for example, this
+ * affects the type of `T` as seen from `Outer#A`, so we could record that, but
+ * the class `A` also contains itself as a member, so `Outer#A#A#A#...` is a
+ * valid prefix for `T`. Even if we avoid loops, we still have a combinatorial
+ * explosion of possible prefixes, like `Outer#A#B#C#D#E`.
+ *
+ * It is much simpler to record `T` once where it is defined, but that means
+ * that the API representation of `T` may not change even though `T` as seen
+ * from some prefix has changed. This is why in `ExtractDependencies` we need
+ * to traverse used types to not miss dependencies, see the documentation of
+ * `ExtractDependencies#usedTypeTraverser`.
+ *
+ * TODO: sbt does not store the full representation that we compute, instead it
+ * hashes parts of it to reduce memory usage, then to see if something changed,
+ * it compares the hashes instead of comparing the representations. We should
+ * investigate whether we can just directly compute hashes in this phase
+ * without going through an intermediate representation, see
+ * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation
+ */
+private class ExtractAPICollector(implicit val ctx: Context) extends ThunkHolder {
+ import tpd._
+ import xsbti.api
+
+ /** This cache is necessary for correctness, see the comment about inherited
+ * members in `apiClassStructure`
+ */
+ private[this] val classLikeCache = new mutable.HashMap[ClassSymbol, api.ClassLike]
+ /** This cache is optional, it avoids recomputing representations */
+ private[this] val typeCache = new mutable.HashMap[Type, api.Type]
+
+ private[this] object Constants {
+ val emptyStringArray = Array[String]()
+ val local = new api.ThisQualifier
+ val public = new api.Public
+ val privateLocal = new api.Private(local)
+ val protectedLocal = new api.Protected(local)
+ val unqualified = new api.Unqualified
+ val thisPath = new api.This
+ val emptyType = new api.EmptyType
+ val emptyModifiers =
+ new api.Modifiers(false, false, false, false, false,false, false, false)
+ }
+
+ /** Some Dotty types do not have a corresponding type in xsbti.api.* that
+ * represents them. Until this is fixed we can workaround this by using
+ * special annotations that can never appear in the source code to
+ * represent these types.
+ *
+ * @param tp An approximation of the type we're trying to represent
+ * @param marker A special annotation to differentiate our type
+ */
+ private def withMarker(tp: api.Type, marker: api.Annotation) =
+ new api.Annotated(tp, Array(marker))
+ private def marker(name: String) =
+ new api.Annotation(new api.Constant(Constants.emptyType, name), Array())
+ val orMarker = marker("Or")
+ val byNameMarker = marker("ByName")
+
+
+ /** Extract the API representation of a source file */
+ def apiSource(tree: Tree): api.SourceAPI = {
+ val classes = new mutable.ListBuffer[api.ClassLike]
+ def apiClasses(tree: Tree): Unit = tree match {
+ case PackageDef(_, stats) =>
+ stats.foreach(apiClasses)
+ case tree: TypeDef =>
+ classes += apiClass(tree.symbol.asClass)
+ case _ =>
+ }
+
+ apiClasses(tree)
+ forceThunks()
+ new api.SourceAPI(Array(), classes.toArray)
+ }
+
+ def apiClass(sym: ClassSymbol): api.ClassLike =
+ classLikeCache.getOrElseUpdate(sym, computeClass(sym))
+
+ private def computeClass(sym: ClassSymbol): api.ClassLike = {
+ import xsbti.api.{DefinitionType => dt}
+ val defType =
+ if (sym.is(Trait)) dt.Trait
+ else if (sym.is(ModuleClass)) {
+ if (sym.is(PackageClass)) dt.PackageModule
+ else dt.Module
+ } else dt.ClassDef
+
+ val selfType = apiType(sym.classInfo.givenSelfType)
+
+ val name = if (sym.is(ModuleClass)) sym.fullName.sourceModuleName else sym.fullName
+
+ val tparams = sym.typeParams.map(apiTypeParameter)
+
+ val structure = apiClassStructure(sym)
+
+ new api.ClassLike(
+ defType, strict2lzy(selfType), strict2lzy(structure), Constants.emptyStringArray,
+ tparams.toArray, name.toString, apiAccess(sym), apiModifiers(sym),
+ apiAnnotations(sym).toArray)
+ }
+
+ private[this] val LegacyAppClass = ctx.requiredClass("dotty.runtime.LegacyApp")
+
+ def apiClassStructure(csym: ClassSymbol): api.Structure = {
+ val cinfo = csym.classInfo
+
+ val bases = linearizedAncestorTypes(cinfo)
+ val apiBases = bases.map(apiType)
+
+ // Synthetic methods that are always present do not affect the API
+ // and can therefore be ignored.
+ def alwaysPresent(s: Symbol) =
+ s.isCompanionMethod || (csym.is(ModuleClass) && s.isConstructor)
+ val decls = cinfo.decls.filterNot(alwaysPresent).toList
+ val apiDecls = apiDefinitions(decls)
+
+ val declSet = decls.toSet
+ // TODO: We shouldn't have to compute inherited members. Instead, `Structure`
+ // should have a lazy `parentStructures` field.
+ val inherited = cinfo.baseClasses
+ // We cannot filter out `LegacyApp` because it contains the main method,
+ // see the comment about main class discovery in `computeType`.
+ .filter(bc => !bc.is(Scala2x) || bc.eq(LegacyAppClass))
+ .flatMap(_.classInfo.decls.filterNot(s => s.is(Private) || declSet.contains(s)))
+ // Inherited members need to be computed lazily because a class might contain
+ // itself as an inherited member, like in `class A { class B extends A }`,
+ // this works because of `classLikeCache`
+ val apiInherited = lzy(apiDefinitions(inherited).toArray)
+
+ new api.Structure(strict2lzy(apiBases.toArray), strict2lzy(apiDecls.toArray), apiInherited)
+ }
+
+ def linearizedAncestorTypes(info: ClassInfo): List[Type] = {
+ val ref = info.fullyAppliedRef
+ // Note that the ordering of classes in `baseClasses` is important.
+ info.baseClasses.tail.map(ref.baseTypeWithArgs)
+ }
+
+ def apiDefinitions(defs: List[Symbol]): List[api.Definition] = {
+ // The hash generated by sbt for definitions is supposed to be symmetric so
+ // we shouldn't have to sort them, but it actually isn't symmetric for
+ // definitions which are classes, therefore we need to sort classes to
+ // ensure a stable hash.
+ // Modules and classes come first and are sorted by name, all other
+ // definitions come later and are not sorted.
+ object classFirstSort extends Ordering[Symbol] {
+ override def compare(a: Symbol, b: Symbol) = {
+ val aIsClass = a.isClass
+ val bIsClass = b.isClass
+ if (aIsClass == bIsClass) {
+ if (aIsClass) {
+ if (a.is(Module) == b.is(Module))
+ a.fullName.toString.compareTo(b.fullName.toString)
+ else if (a.is(Module))
+ -1
+ else
+ 1
+ } else
+ 0
+ } else if (aIsClass)
+ -1
+ else
+ 1
+ }
+ }
+
+ defs.sorted(classFirstSort).map(apiDefinition)
+ }
+
+ def apiDefinition(sym: Symbol): api.Definition = {
+ if (sym.isClass) {
+ apiClass(sym.asClass)
+ } else if (sym.isType) {
+ apiTypeMember(sym.asType)
+ } else if (sym.is(Mutable, butNot = Accessor)) {
+ new api.Var(apiType(sym.info), sym.name.toString,
+ apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray)
+ } else if (sym.isStable) {
+ new api.Val(apiType(sym.info), sym.name.toString,
+ apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray)
+ } else {
+ apiDef(sym.asTerm)
+ }
+ }
+
+ def apiDef(sym: TermSymbol): api.Def = {
+ def paramLists(t: Type, start: Int = 0): List[api.ParameterList] = t match {
+ case pt: PolyType =>
+ assert(start == 0)
+ paramLists(pt.resultType)
+ case mt @ MethodType(pnames, ptypes) =>
+ // TODO: We shouldn't have to work so hard to find the default parameters
+ // of a method, Dotty should expose a convenience method for that, see #1143
+ val defaults =
+ if (sym.is(DefaultParameterized)) {
+ val qual =
+ if (sym.isClassConstructor)
+ sym.owner.companionModule // default getters for class constructors are found in the companion object
+ else
+ sym.owner
+ (0 until pnames.length).map(i => qual.info.member(sym.name.defaultGetterName(start + i)).exists)
+ } else
+ (0 until pnames.length).map(Function.const(false))
+ val params = (pnames, ptypes, defaults).zipped.map((pname, ptype, isDefault) =>
+ new api.MethodParameter(pname.toString, apiType(ptype),
+ isDefault, api.ParameterModifier.Plain))
+ new api.ParameterList(params.toArray, mt.isImplicit) :: paramLists(mt.resultType, params.length)
+ case _ =>
+ Nil
+ }
+
+ val tparams = sym.info match {
+ case pt: PolyType =>
+ (pt.paramNames, pt.paramBounds).zipped.map((pname, pbounds) =>
+ apiTypeParameter(pname.toString, 0, pbounds.lo, pbounds.hi))
+ case _ =>
+ Nil
+ }
+ val vparamss = paramLists(sym.info)
+ val retTp = sym.info.finalResultType.widenExpr
+
+ new api.Def(vparamss.toArray, apiType(retTp), tparams.toArray,
+ sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym).toArray)
+ }
+
+ def apiTypeMember(sym: TypeSymbol): api.TypeMember = {
+ val typeParams = Array[api.TypeParameter]()
+ val name = sym.name.toString
+ val access = apiAccess(sym)
+ val modifiers = apiModifiers(sym)
+ val as = apiAnnotations(sym)
+ val tpe = sym.info
+
+ if (sym.isAliasType)
+ new api.TypeAlias(apiType(tpe.bounds.hi), typeParams, name, access, modifiers, as.toArray)
+ else {
+ assert(sym.isAbstractType)
+ new api.TypeDeclaration(apiType(tpe.bounds.lo), apiType(tpe.bounds.hi), typeParams, name, access, modifiers, as.to)
+ }
+ }
+
+ def apiType(tp: Type): api.Type = {
+ typeCache.getOrElseUpdate(tp, computeType(tp))
+ }
+
+ private def computeType(tp: Type): api.Type = {
+ // TODO: Never dealias. We currently have to dealias because
+ // sbt main class discovery relies on the signature of the main
+ // method being fully dealiased. See https://github.com/sbt/zinc/issues/102
+ val tp2 = if (!tp.isHK) tp.dealias else tp
+ tp2 match {
+ case NoPrefix | NoType =>
+ Constants.emptyType
+ case tp: NamedType =>
+ val sym = tp.symbol
+ // Normalize package prefix to avoid instability of representation
+ val prefix = if (sym.isClass && sym.owner.is(Package))
+ sym.owner.thisType
+ else
+ tp.prefix
+ new api.Projection(simpleType(prefix), sym.name.toString)
+ case TypeApplications.AppliedType(tycon, args) =>
+ def processArg(arg: Type): api.Type = arg match {
+ case arg @ TypeBounds(lo, hi) => // Handle wildcard parameters
+ if (lo.eq(defn.NothingType) && hi.eq(defn.AnyType))
+ Constants.emptyType
+ else {
+ val name = "_"
+ val ref = new api.ParameterRef(name)
+ new api.Existential(ref,
+ Array(apiTypeParameter(name, arg.variance, lo, hi)))
+ }
+ case _ =>
+ apiType(arg)
+ }
+
+ val apiTycon = simpleType(tycon)
+ val apiArgs = args.map(processArg)
+ new api.Parameterized(apiTycon, apiArgs.toArray)
+ case PolyType(tparams, res) =>
+ val apiTparams = tparams.map(apiTypeParameter)
+ val apiRes = apiType(res)
+ new api.Polymorphic(apiRes, apiTparams.toArray)
+ case rt: RefinedType =>
+ val name = rt.refinedName.toString
+ val parent = apiType(rt.parent)
+
+ def typeRefinement(name: String, tp: TypeBounds): api.TypeMember = tp match {
+ case TypeAlias(alias) =>
+ new api.TypeAlias(apiType(alias),
+ Array(), name, Constants.public, Constants.emptyModifiers, Array())
+ case TypeBounds(lo, hi) =>
+ new api.TypeDeclaration(apiType(lo), apiType(hi),
+ Array(), name, Constants.public, Constants.emptyModifiers, Array())
+ }
+
+ val decl: Array[api.Definition] = rt.refinedInfo match {
+ case rinfo: TypeBounds =>
+ Array(typeRefinement(name, rinfo))
+ case _ =>
+ ctx.debuglog(i"sbt-api: skipped structural refinement in $rt")
+ Array()
+ }
+ new api.Structure(strict2lzy(Array(parent)), strict2lzy(decl), strict2lzy(Array()))
+ case tp: RecType =>
+ apiType(tp.parent)
+ case RecThis(recType) =>
+ // `tp` must be present inside `recType`, so calling `apiType` on
+ // `recType` would lead to an infinite recursion, we avoid this by
+ // computing the representation of `recType` lazily.
+ apiLazy(recType)
+ case tp: AndOrType =>
+ val parents = List(apiType(tp.tp1), apiType(tp.tp2))
+
+ // TODO: Add a real representation for AndOrTypes in xsbti. The order of
+ // types in an `AndOrType` does not change the API, so the API hash should
+ // be symmetric.
+ val s = new api.Structure(strict2lzy(parents.toArray), strict2lzy(Array()), strict2lzy(Array()))
+ if (tp.isAnd)
+ s
+ else
+ withMarker(s, orMarker)
+ case ExprType(resultType) =>
+ withMarker(apiType(resultType), byNameMarker)
+ case ConstantType(constant) =>
+ new api.Constant(apiType(constant.tpe), constant.stringValue)
+ case AnnotatedType(tpe, annot) =>
+ // TODO: Annotation support
+ ctx.debuglog(i"sbt-api: skipped annotation in $tp2")
+ apiType(tpe)
+ case tp: ThisType =>
+ apiThis(tp.cls)
+ case tp: ParamType =>
+ // TODO: Distinguishing parameters based on their names alone is not enough,
+ // the binder is also needed (at least for type lambdas).
+ new api.ParameterRef(tp.paramName.toString)
+ case tp: LazyRef =>
+ apiType(tp.ref)
+ case tp: TypeVar =>
+ apiType(tp.underlying)
+ case _ => {
+ ctx.warning(i"sbt-api: Unhandled type ${tp.getClass} : $tp")
+ Constants.emptyType
+ }
+ }
+ }
+
+ // TODO: Get rid of this method. See https://github.com/sbt/zinc/issues/101
+ def simpleType(tp: Type): api.SimpleType = apiType(tp) match {
+ case tp: api.SimpleType =>
+ tp
+ case _ =>
+ ctx.debuglog("sbt-api: Not a simple type: " + tp.show)
+ Constants.emptyType
+ }
+
+ def apiLazy(tp: => Type): api.Type = {
+ // TODO: The sbt api needs a convenient way to make a lazy type.
+ // For now, we repurpose Structure for this.
+ val apiTp = lzy(Array(apiType(tp)))
+ new api.Structure(apiTp, strict2lzy(Array()), strict2lzy(Array()))
+ }
+
+ def apiThis(sym: Symbol): api.Singleton = {
+ val pathComponents = sym.ownersIterator.takeWhile(!_.isEffectiveRoot)
+ .map(s => new api.Id(s.name.toString))
+ new api.Singleton(new api.Path(pathComponents.toArray.reverse ++ Array(Constants.thisPath)))
+ }
+
+ def apiTypeParameter(tparam: TypeParamInfo): api.TypeParameter =
+ apiTypeParameter(tparam.paramName.toString, tparam.paramVariance,
+ tparam.paramBounds.lo, tparam.paramBounds.hi)
+
+ def apiTypeParameter(name: String, variance: Int, lo: Type, hi: Type): api.TypeParameter =
+ new api.TypeParameter(name, Array(), Array(), apiVariance(variance),
+ apiType(lo), apiType(hi))
+
+ def apiVariance(v: Int): api.Variance = {
+ import api.Variance._
+ if (v < 0) Contravariant
+ else if (v > 0) Covariant
+ else Invariant
+ }
+
+ def apiAccess(sym: Symbol): api.Access = {
+ // Symbols which are private[foo] do not have the flag Private set,
+ // but their `privateWithin` exists, see `Parsers#ParserCommon#normalize`.
+ if (!sym.is(Protected | Private) && !sym.privateWithin.exists)
+ Constants.public
+ else if (sym.is(PrivateLocal))
+ Constants.privateLocal
+ else if (sym.is(ProtectedLocal))
+ Constants.protectedLocal
+ else {
+ val qualifier =
+ if (sym.privateWithin eq NoSymbol)
+ Constants.unqualified
+ else
+ new api.IdQualifier(sym.privateWithin.fullName.toString)
+ if (sym.is(Protected))
+ new api.Protected(qualifier)
+ else
+ new api.Private(qualifier)
+ }
+ }
+
+ def apiModifiers(sym: Symbol): api.Modifiers = {
+ val absOver = sym.is(AbsOverride)
+ val abs = sym.is(Abstract) || sym.is(Deferred) || absOver
+ val over = sym.is(Override) || absOver
+ new api.Modifiers(abs, over, sym.is(Final), sym.is(Sealed),
+ sym.is(Implicit), sym.is(Lazy), sym.is(Macro), sym.is(SuperAccessor))
+ }
+
+ // TODO: Support other annotations
+ def apiAnnotations(s: Symbol): List[api.Annotation] = {
+ val annots = new mutable.ListBuffer[api.Annotation]
+
+ if (Inliner.hasBodyToInline(s)) {
+ // FIXME: If the body of an inline method changes, all the reverse
+ // dependencies of this method need to be recompiled. sbt has no way
+ // of tracking method bodies, so as a hack we include the pretty-printed
+ // typed tree of the method as part of the signature we send to sbt.
+ // To do this properly we would need a way to hash trees and types in
+ // dotty itself.
+ val printTypesCtx = ctx.fresh.setSetting(ctx.settings.printtypes, true)
+ annots += marker(Inliner.bodyToInline(s).show(printTypesCtx).toString)
+ }
+
+ annots.toList
+ }
+}
diff --git a/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
new file mode 100644
index 000000000..229e35360
--- /dev/null
+++ b/src/dotty/tools/dotc/sbt/ExtractDependencies.scala
@@ -0,0 +1,268 @@
+package dotty.tools.dotc
+package sbt
+
+import ast.{Trees, tpd}
+import core._, core.Decorators._
+import Contexts._, Flags._, Phases._, Trees._, Types._, Symbols._
+import Names._, NameOps._, StdNames._
+
+import scala.collection.{Set, mutable}
+
+import dotty.tools.io.{AbstractFile, Path, PlainFile, ZipArchive}
+import java.io.File
+
+import java.util.{Arrays, Comparator}
+
+import xsbti.DependencyContext
+
+/** This phase sends information on classes' dependencies to sbt via callbacks.
+ *
+ * This is used by sbt for incremental recompilation. Briefly, when a file
+ * changes sbt will recompile it, if its API has changed (determined by what
+ * `ExtractAPI` sent) then sbt will determine which reverse-dependencies
+ * (determined by what `ExtractDependencies` sent) of the API have to be
+ * recompiled depending on what changed.
+ *
+ * See the documentation of `ExtractDependenciesCollector`, `ExtractAPI`,
+ * `ExtractAPICollector` and
+ * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html for more
+ * information on how sbt incremental compilation works.
+ *
+ * The following flags affect this phase:
+ * -Yforce-sbt-phases
+ * -Ydump-sbt-inc
+ *
+ * @see ExtractAPI
+ */
+class ExtractDependencies extends Phase {
+ override def phaseName: String = "sbt-deps"
+
+ // This phase should be run directly after `Frontend`, if it is run after
+ // `PostTyper`, some dependencies will be lost because trees get simplified.
+ // See the scripted test `constants` for an example where this matters.
+ // TODO: Add a `Phase#runsBefore` method ?
+
+ override def run(implicit ctx: Context): Unit = {
+ val unit = ctx.compilationUnit
+ val dumpInc = ctx.settings.YdumpSbtInc.value
+ val forceRun = dumpInc || ctx.settings.YforceSbtPhases.value
+ if ((ctx.sbtCallback != null || forceRun) && !unit.isJava) {
+ val sourceFile = unit.source.file.file
+ val extractDeps = new ExtractDependenciesCollector
+ extractDeps.traverse(unit.tpdTree)
+
+ if (dumpInc) {
+ val names = extractDeps.usedNames.map(_.toString).toArray[Object]
+ val deps = extractDeps.topLevelDependencies.map(_.toString).toArray[Object]
+ val inhDeps = extractDeps.topLevelInheritanceDependencies.map(_.toString).toArray[Object]
+ Arrays.sort(names)
+ Arrays.sort(deps)
+ Arrays.sort(inhDeps)
+
+ val pw = Path(sourceFile).changeExtension("inc").toFile.printWriter()
+ try {
+ pw.println(s"// usedNames: ${names.mkString(",")}")
+ pw.println(s"// topLevelDependencies: ${deps.mkString(",")}")
+ pw.println(s"// topLevelInheritanceDependencies: ${inhDeps.mkString(",")}")
+ } finally pw.close()
+ }
+
+ if (ctx.sbtCallback != null) {
+ extractDeps.usedNames.foreach(name =>
+ ctx.sbtCallback.usedName(sourceFile, name.toString))
+ extractDeps.topLevelDependencies.foreach(dep =>
+ recordDependency(sourceFile, dep, DependencyContext.DependencyByMemberRef))
+ extractDeps.topLevelInheritanceDependencies.foreach(dep =>
+ recordDependency(sourceFile, dep, DependencyContext.DependencyByInheritance))
+ }
+ }
+ }
+
+ /** Record that `currentSourceFile` depends on the file where `dep` was loaded from.
+ *
+ * @param currentSourceFile The source file of the current unit
+ * @param dep The dependency
+ * @param context Describes how `currentSourceFile` depends on `dep`
+ */
+ def recordDependency(currentSourceFile: File, dep: Symbol, context: DependencyContext)
+ (implicit ctx: Context) = {
+ val depFile = dep.associatedFile
+ if (depFile != null) {
+ if (depFile.path.endsWith(".class")) {
+ /** Transform `List(java, lang, String.class)` into `java.lang.String` */
+ def className(classSegments: List[String]) =
+ classSegments.mkString(".").stripSuffix(".class")
+ def binaryDependency(file: File, className: String) =
+ ctx.sbtCallback.binaryDependency(file, className, currentSourceFile, context)
+
+ depFile match {
+ case ze: ZipArchive#Entry =>
+ for (zip <- ze.underlyingSource; zipFile <- Option(zip.file)) {
+ val classSegments = Path(ze.path).segments
+ binaryDependency(zipFile, className(classSegments))
+ }
+ case pf: PlainFile =>
+ val packages = dep.ownersIterator
+ .filter(x => x.is(PackageClass) && !x.isEffectiveRoot).length
+ // We can recover the fully qualified name of a classfile from
+ // its path
+ val classSegments = pf.givenPath.segments.takeRight(packages + 1)
+ binaryDependency(pf.file, className(classSegments))
+ case _ =>
+ }
+ } else if (depFile.file != currentSourceFile) {
+ ctx.sbtCallback.sourceDependency(depFile.file, currentSourceFile, context)
+ }
+ }
+ }
+}
+
+/** Extract the dependency information of a compilation unit.
+ *
+ * To understand why we track the used names see the section "Name hashing
+ * algorithm" in http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html
+ * To understand why we need to track dependencies introduced by inheritance
+ * specially, see the subsection "Dependencies introduced by member reference and
+ * inheritance" in the "Name hashing algorithm" section.
+ */
+private class ExtractDependenciesCollector(implicit val ctx: Context) extends tpd.TreeTraverser {
+ import tpd._
+
+ private[this] val _usedNames = new mutable.HashSet[Name]
+ private[this] val _topLevelDependencies = new mutable.HashSet[Symbol]
+ private[this] val _topLevelInheritanceDependencies = new mutable.HashSet[Symbol]
+
+ /** The names used in this class, this does not include names which are only
+ * defined and not referenced.
+ */
+ def usedNames: Set[Name] = _usedNames
+
+ /** The set of top-level classes that the compilation unit depends on
+ * because it refers to these classes or something defined in them.
+ * This is always a superset of `topLevelInheritanceDependencies` by definition.
+ */
+ def topLevelDependencies: Set[Symbol] = _topLevelDependencies
+
+ /** The set of top-level classes that the compilation unit extends or that
+ * contain a non-top-level class that the compilaion unit extends.
+ */
+ def topLevelInheritanceDependencies: Set[Symbol] = _topLevelInheritanceDependencies
+
+ private def addUsedName(name: Name) =
+ _usedNames += name
+
+ private def addDependency(sym: Symbol): Unit =
+ if (!ignoreDependency(sym)) {
+ val tlClass = sym.topLevelClass
+ if (tlClass.ne(NoSymbol)) // Some synthetic type aliases like AnyRef do not belong to any class
+ _topLevelDependencies += sym.topLevelClass
+ addUsedName(sym.name)
+ }
+
+ private def ignoreDependency(sym: Symbol) =
+ sym.eq(NoSymbol) ||
+ sym.isEffectiveRoot ||
+ sym.isAnonymousFunction ||
+ sym.isAnonymousClass
+
+ private def addInheritanceDependency(sym: Symbol): Unit =
+ _topLevelInheritanceDependencies += sym.topLevelClass
+
+ /** Traverse the tree of a source file and record the dependencies which
+ * can be retrieved using `topLevelDependencies`, `topLevelInheritanceDependencies`,
+ * and `usedNames`
+ */
+ override def traverse(tree: Tree)(implicit ctx: Context): Unit = {
+ tree match {
+ case Import(expr, selectors) =>
+ def lookupImported(name: Name) = expr.tpe.member(name).symbol
+ def addImported(name: Name) = {
+ // importing a name means importing both a term and a type (if they exist)
+ addDependency(lookupImported(name.toTermName))
+ addDependency(lookupImported(name.toTypeName))
+ }
+ selectors foreach {
+ case Ident(name) =>
+ addImported(name)
+ case Thicket(Ident(name) :: Ident(rename) :: Nil) =>
+ addImported(name)
+ if (rename ne nme.WILDCARD)
+ addUsedName(rename)
+ case _ =>
+ }
+ case Inlined(call, _, _) =>
+ // The inlined call is normally ignored by TreeTraverser but we need to
+ // record it as a dependency
+ traverse(call)
+ case t: TypeTree =>
+ usedTypeTraverser.traverse(t.tpe)
+ case ref: RefTree =>
+ addDependency(ref.symbol)
+ usedTypeTraverser.traverse(ref.tpe)
+ case t @ Template(_, parents, _, _) =>
+ t.parents.foreach(p => addInheritanceDependency(p.tpe.typeSymbol))
+ case _ =>
+ }
+ traverseChildren(tree)
+ }
+
+ /** Traverse a used type and record all the dependencies we need to keep track
+ * of for incremental recompilation.
+ *
+ * As a motivating example, given a type `T` defined as:
+ *
+ * type T >: L <: H
+ * type L <: A1
+ * type H <: B1
+ * class A1 extends A0
+ * class B1 extends B0
+ *
+ * We need to record a dependency on `T`, `L`, `H`, `A1`, `B1`. This is
+ * necessary because the API representation that `ExtractAPI` produces for
+ * `T` just refers to the strings "L" and "H", it does not contain their API
+ * representation. Therefore, the name hash of `T` does not change if for
+ * example the definition of `L` changes.
+ *
+ * We do not need to keep track of superclasses like `A0` and `B0` because
+ * the API representation of a class (and therefore its name hash) already
+ * contains all necessary information on superclasses.
+ *
+ * A natural question to ask is: Since traversing all referenced types to
+ * find all these names is costly, why not change the API representation
+ * produced by `ExtractAPI` to contain that information? This way the name
+ * hash of `T` would change if any of the types it depends on change, and we
+ * would only need to record a dependency on `T`. Unfortunately there is no
+ * simple answer to the question "what does T depend on?" because it depends
+ * on the prefix and `ExtractAPI` does not compute types as seen from every
+ * possible prefix, the documentation of `ExtractAPI` explains why.
+ *
+ * The tests in sbt `types-in-used-names-a`, `types-in-used-names-b`,
+ * `as-seen-from-a` and `as-seen-from-b` rely on this.
+ */
+ private object usedTypeTraverser extends TypeTraverser {
+ val seen = new mutable.HashSet[Type]
+ def traverse(tp: Type): Unit = if (!seen.contains(tp)) {
+ seen += tp
+ tp match {
+ case tp: NamedType =>
+ val sym = tp.symbol
+ if (!sym.is(Package)) {
+ addDependency(sym)
+ if (!sym.isClass)
+ traverse(tp.info)
+ traverse(tp.prefix)
+ }
+ case tp: ThisType =>
+ traverse(tp.underlying)
+ case tp: ConstantType =>
+ traverse(tp.underlying)
+ case tp: MethodParam =>
+ traverse(tp.underlying)
+ case tp: PolyParam =>
+ traverse(tp.underlying)
+ case _ =>
+ traverseChildren(tp)
+ }
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/sbt/ShowAPI.scala b/src/dotty/tools/dotc/sbt/ShowAPI.scala
new file mode 100644
index 000000000..0e6b19867
--- /dev/null
+++ b/src/dotty/tools/dotc/sbt/ShowAPI.scala
@@ -0,0 +1,156 @@
+// This file is copied straight from
+// https://github.com/sbt/sbt/blob/0.13/compile/api/src/main/scala/xsbt/api/ShowAPI.scala
+// It is convenient to be able to pretty-print the API from Dotty itself to test
+// the sbt phase without having to run sbt.
+
+/* sbt -- Simple Build Tool
+ * Copyright 2010 Mark Harrah
+ */
+package dotty.tools.dotc
+package sbt
+
+import xsbti.api._
+
+import scala.util.Try
+
+object DefaultShowAPI {
+ private lazy val defaultNesting = Try { java.lang.Integer.parseInt(sys.props.get("sbt.inc.apidiff.depth").get) } getOrElse 2
+
+ def apply(d: Definition) = ShowAPI.showDefinition(d)(defaultNesting)
+ def apply(d: Type) = ShowAPI.showType(d)(defaultNesting)
+ def apply(a: SourceAPI) = ShowAPI.showApi(a)(defaultNesting)
+}
+
+object ShowAPI {
+ private lazy val numDecls = Try { java.lang.Integer.parseInt(sys.props.get("sbt.inc.apidiff.decls").get) } getOrElse 0
+
+ private def truncateDecls(decls: Array[Definition]): Array[Definition] = if (numDecls <= 0) decls else decls.take(numDecls)
+ private def lines(ls: Seq[String]): String = ls.mkString("\n", "\n", "\n")
+
+ def showApi(a: SourceAPI)(implicit nesting: Int) =
+ a.packages.map(pkg => "package " + pkg.name).mkString("\n") + lines(truncateDecls(a.definitions).map(showDefinition))
+
+ def showDefinition(d: Definition)(implicit nesting: Int): String = d match {
+ case v: Val => showMonoDef(v, "val") + ": " + showType(v.tpe)
+ case v: Var => showMonoDef(v, "var") + ": " + showType(v.tpe)
+ case d: Def => showPolyDef(d, "def") + showValueParams(d.valueParameters) + ": " + showType(d.returnType)
+ case ta: TypeAlias => showPolyDef(ta, "type") + " = " + showType(ta.tpe)
+ case td: TypeDeclaration => showPolyDef(td, "type") + showBounds(td.lowerBound, td.upperBound)
+ case cl: ClassLike => showPolyDef(cl, showDefinitionType(cl.definitionType)) + " extends " + showTemplate(cl)
+ }
+
+ private def showTemplate(cl: ClassLike)(implicit nesting: Int) =
+ if (nesting <= 0) "<nesting level reached>"
+ else {
+ val showSelf = if (cl.selfType.isInstanceOf[EmptyType]) "" else " self: " + showNestedType(cl.selfType) + " =>"
+
+ cl.structure.parents.map(showNestedType).mkString("", " with ", " {") + showSelf +
+ lines(truncateDecls(cl.structure.inherited).map(d => "^inherited^ " + showNestedDefinition(d))) +
+ lines(truncateDecls(cl.structure.declared).map(showNestedDefinition)) +
+ "}"
+ }
+
+ def showType(t: Type)(implicit nesting: Int): String = t match {
+ case st: Projection => showType(st.prefix) + "#" + st.id
+ case st: ParameterRef => "<" + st.id + ">"
+ case st: Singleton => showPath(st.path)
+ case st: EmptyType => "<empty>"
+ case p: Parameterized => showType(p.baseType) + p.typeArguments.map(showType).mkString("[", ", ", "]")
+ case c: Constant => showType(c.baseType) + "(" + c.value + ")"
+ case a: Annotated => showAnnotations(a.annotations) + " " + showType(a.baseType)
+ case s: Structure =>
+ s.parents.map(showType).mkString(" with ") + (
+ if (nesting <= 0) "{ <nesting level reached> }"
+ else truncateDecls(s.declared).map(showNestedDefinition).mkString(" {", "\n", "}"))
+ case e: Existential =>
+ showType(e.baseType) + (
+ if (nesting <= 0) " forSome { <nesting level reached> }"
+ else e.clause.map(t => "type " + showNestedTypeParameter(t)).mkString(" forSome { ", "; ", " }"))
+ case p: Polymorphic => showType(p.baseType) + (
+ if (nesting <= 0) " [ <nesting level reached> ]"
+ else showNestedTypeParameters(p.parameters))
+ }
+
+ private def showPath(p: Path): String = p.components.map(showPathComponent).mkString(".")
+ private def showPathComponent(pc: PathComponent) = pc match {
+ case s: Super => "super[" + showPath(s.qualifier) + "]"
+ case _: This => "this"
+ case i: Id => i.id
+ }
+
+ private def space(s: String) = if (s.isEmpty) s else s + " "
+ private def showMonoDef(d: Definition, label: String)(implicit nesting: Int): String =
+ space(showAnnotations(d.annotations)) + space(showAccess(d.access)) + space(showModifiers(d.modifiers)) + space(label) + d.name
+
+ private def showPolyDef(d: ParameterizedDefinition, label: String)(implicit nesting: Int): String =
+ showMonoDef(d, label) + showTypeParameters(d.typeParameters)
+
+ private def showTypeParameters(tps: Seq[TypeParameter])(implicit nesting: Int): String =
+ if (tps.isEmpty) ""
+ else tps.map(showTypeParameter).mkString("[", ", ", "]")
+
+ private def showTypeParameter(tp: TypeParameter)(implicit nesting: Int): String =
+ showAnnotations(tp.annotations) + " " + showVariance(tp.variance) + tp.id + showTypeParameters(tp.typeParameters) + " " + showBounds(tp.lowerBound, tp.upperBound)
+
+ private def showAnnotations(as: Seq[Annotation])(implicit nesting: Int) = as.map(showAnnotation).mkString(" ")
+ private def showAnnotation(a: Annotation)(implicit nesting: Int) =
+ "@" + showType(a.base) + (
+ if (a.arguments.isEmpty) ""
+ else a.arguments.map(a => a.name + " = " + a.value).mkString("(", ", ", ")")
+ )
+
+ private def showBounds(lower: Type, upper: Type)(implicit nesting: Int): String = ">: " + showType(lower) + " <: " + showType(upper)
+
+ private def showValueParams(ps: Seq[ParameterList])(implicit nesting: Int): String =
+ ps.map(pl =>
+ pl.parameters.map(mp =>
+ mp.name + ": " + showParameterModifier(showType(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "")
+ ).mkString(if (pl.isImplicit) "(implicit " else "(", ", ", ")")
+ ).mkString("")
+
+ private def showParameterModifier(base: String, pm: ParameterModifier): String = pm match {
+ case ParameterModifier.Plain => base
+ case ParameterModifier.Repeated => base + "*"
+ case ParameterModifier.ByName => "=> " + base
+ }
+
+ private def showDefinitionType(d: DefinitionType) = d match {
+ case DefinitionType.Trait => "trait"
+ case DefinitionType.ClassDef => "class"
+ case DefinitionType.Module => "object"
+ case DefinitionType.PackageModule => "package object"
+ }
+
+ private def showAccess(a: Access) = a match {
+ case p: Public => ""
+ case p: Protected => "protected" + showQualifier(p.qualifier)
+ case p: Private => "private" + showQualifier(p.qualifier)
+ }
+
+ private def showQualifier(q: Qualifier) = q match {
+ case _: Unqualified => ""
+ case _: ThisQualifier => "[this]"
+ case i: IdQualifier => "[" + i.value + "]"
+ }
+
+ private def showModifiers(m: Modifiers) = List(
+ (m.isOverride, "override"),
+ (m.isFinal, "final"),
+ (m.isSealed, "sealed"),
+ (m.isImplicit, "implicit"),
+ (m.isAbstract, "abstract"),
+ (m.isLazy, "lazy")
+ ).collect { case (true, mod) => mod }.mkString(" ")
+
+ private def showVariance(v: Variance) = v match {
+ case Variance.Invariant => ""
+ case Variance.Covariant => "+"
+ case Variance.Contravariant => "-"
+ }
+
+ // limit nesting to prevent cycles and generally keep output from getting humongous
+ private def showNestedType(tp: Type)(implicit nesting: Int) = showType(tp)(nesting - 1)
+ private def showNestedTypeParameter(tp: TypeParameter)(implicit nesting: Int) = showTypeParameter(tp)(nesting - 1)
+ private def showNestedTypeParameters(tps: Seq[TypeParameter])(implicit nesting: Int) = showTypeParameters(tps)(nesting - 1)
+ private def showNestedDefinition(d: Definition)(implicit nesting: Int) = showDefinition(d)(nesting - 1)
+}
diff --git a/src/dotty/tools/dotc/sbt/ThunkHolder.scala b/src/dotty/tools/dotc/sbt/ThunkHolder.scala
new file mode 100644
index 000000000..e377de6da
--- /dev/null
+++ b/src/dotty/tools/dotc/sbt/ThunkHolder.scala
@@ -0,0 +1,61 @@
+package dotty.tools.dotc
+package sbt
+
+import scala.annotation.tailrec
+import scala.collection.mutable.ListBuffer
+import xsbti.api
+
+/** Create and hold thunks. A thunk is a (potentially) unevaluated value
+ * that may be evaluated once.
+ */
+private[sbt] trait ThunkHolder {
+ private[this] val thunks = new ListBuffer[api.Lazy[_]]
+
+ /** Force all unevaluated thunks to prevent space leaks. */
+ @tailrec protected final def forceThunks(): Unit = if (!thunks.isEmpty) {
+ val toForce = thunks.toList
+ thunks.clear()
+ toForce.foreach(_.get())
+ // Forcing thunks may create new thunks
+ forceThunks()
+ }
+
+ /** Store the by-name parameter `s` in a `Lazy` container without evaluating it.
+ * It will be forced by the next call to `forceThunks()`
+ */
+ def lzy[T <: AnyRef](t: => T): api.Lazy[T] = {
+ val l = SafeLazy(() => t)
+ thunks += l
+ l
+ }
+
+ /** Store the parameter `s` in a `Lazy` container, since `s` is not by-name, there
+ * is nothing to force.
+ *
+ * TODO: Get rid of this method. It is only needed because some xsbti.api classes
+ * take lazy arguments when they could be strict, but this can be fixed in sbt,
+ * see https://github.com/sbt/zinc/issues/114
+ */
+ def strict2lzy[T <: AnyRef](t: T): api.Lazy[T] =
+ SafeLazy.strict(t)
+}
+
+// TODO: Use xsbti.SafeLazy once https://github.com/sbt/zinc/issues/113 is fixed
+private object SafeLazy {
+ def apply[T <: AnyRef](eval: () => T): xsbti.api.Lazy[T] =
+ new Impl(eval)
+
+ def strict[T <: AnyRef](value: T): xsbti.api.Lazy[T] =
+ new Strict(value)
+
+ private[this] final class Impl[T <: AnyRef](private[this] var eval: () => T) extends xsbti.api.AbstractLazy[T] {
+ private[this] lazy val _t = {
+ val t = eval()
+ eval = null // clear the reference, ensuring the only memory we hold onto is the result
+ t
+ }
+ def get: T = _t
+ }
+
+ private[this] final class Strict[T <: AnyRef](val get: T) extends xsbti.api.Lazy[T] with java.io.Serializable
+}
diff --git a/src/dotty/tools/dotc/transform/ArrayConstructors.scala b/src/dotty/tools/dotc/transform/ArrayConstructors.scala
new file mode 100644
index 000000000..74213d332
--- /dev/null
+++ b/src/dotty/tools/dotc/transform/ArrayConstructors.scala
@@ -0,0 +1,59 @@
+package dotty.tools.dotc
+package transform
+
+import core._
+import TreeTransforms._
+import Contexts.Context
+import Flags._
+import SymUtils._
+import Symbols._
+import SymDenotations._
+import Types._
+import Decorators._
+import DenotTransformers._
+import StdNames._
+import NameOps._
+import ast.Trees._
+import dotty.tools.dotc.ast.tpd
+import util.Positions._
+import Names._
+
+import collection.mutable
+import ResolveSuper._
+
+import scala.collection.immutable.::
+
+
+/** This phase rewrites calls to array constructors to newArray method in Dotty.runtime.Arrays module.
+ *
+ * It assummes that generic arrays have already been handled by typer(see Applications.convertNewGenericArray).
+ * Additionally it optimizes calls to scala.Array.ofDim functions by replacing them with calls to newArray with specific dimensions
+ */
+class ArrayConstructors extends MiniPhaseTransform { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "arrayConstructors"
+
+ override def transformApply(tree: tpd.Apply)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ def rewrite(elemType: Type, dims: List[Tree]) =
+ tpd.newArray(elemType, tree.tpe, tree.pos, JavaSeqLiteral(dims, TypeTree(defn.IntClass.typeRef)))
+
+ if (tree.fun.symbol eq defn.ArrayConstructor) {
+ val TypeApply(tycon, targ :: Nil) = tree.fun
+ rewrite(targ.tpe, tree.args)
+ } else if ((tree.fun.symbol.maybeOwner eq defn.ArrayModule) && (tree.fun.symbol.name eq nme.ofDim) && !tree.tpe.isInstanceOf[MethodicType]) {
+ val Apply(Apply(TypeApply(_, List(tp)), _), _) = tree
+ val cs = tp.tpe.widen.classSymbol
+ tree.fun match {
+ case Apply(TypeApply(t: Ident, targ), dims)
+ if !TypeErasure.isUnboundedGeneric(targ.head.tpe) && !ValueClasses.isDerivedValueClass(cs) =>
+ rewrite(targ.head.tpe, dims)
+ case Apply(TypeApply(t: Select, targ), dims)
+ if !TypeErasure.isUnboundedGeneric(targ.head.tpe) && !ValueClasses.isDerivedValueClass(cs) =>
+ Block(t.qualifier :: Nil, rewrite(targ.head.tpe, dims))
+ case _ => tree
+ }
+
+ } else tree
+ }
+}
diff --git a/src/dotty/tools/dotc/transform/CheckReentrant.scala b/src/dotty/tools/dotc/transform/CheckReentrant.scala
index 2569b3aae..c9eefb22f 100644
--- a/src/dotty/tools/dotc/transform/CheckReentrant.scala
+++ b/src/dotty/tools/dotc/transform/CheckReentrant.scala
@@ -3,7 +3,7 @@ package transform
import core._
import Names._
-import dotty.tools.dotc.transform.TreeTransforms.{AnnotationTransformer, TransformerInfo, MiniPhaseTransform, TreeTransformer}
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, MiniPhaseTransform, TreeTransformer}
import ast.Trees._
import Flags._
import Types._
@@ -74,7 +74,7 @@ class CheckReentrant extends MiniPhaseTransform { thisTransformer =>
if (sym.is(Mutable)) {
ctx.error(
i"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info}
- | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""".stripMargin)
+ | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""")
shared += sym
} else if (!sym.is(Method) || sym.is(Accessor | ParamAccessor)) {
scanning(sym) {
diff --git a/src/dotty/tools/dotc/transform/CheckStatic.scala b/src/dotty/tools/dotc/transform/CheckStatic.scala
index 445e9f839..937a4f1cc 100644
--- a/src/dotty/tools/dotc/transform/CheckStatic.scala
+++ b/src/dotty/tools/dotc/transform/CheckStatic.scala
@@ -5,7 +5,7 @@ import core._
import Names._
import StdNames.nme
import Types._
-import dotty.tools.dotc.transform.TreeTransforms.{AnnotationTransformer, TransformerInfo, MiniPhaseTransform, TreeTransformer}
+import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, MiniPhaseTransform, TreeTransformer}
import ast.Trees._
import Flags._
import Contexts.Context
@@ -32,6 +32,7 @@ import TypeUtils._
* is not allowed to inherit classes that define a term member with name `foo`.
* 5. Only `@static` methods and vals are supported in companions of traits.
* Java8 supports those, but not vars, and JavaScript does not have interfaces at all.
+ * 6. `@static` Lazy vals are currently unsupported.
*/
class CheckStatic extends MiniPhaseTransform { thisTransformer =>
import ast.tpd._
@@ -57,17 +58,18 @@ class CheckStatic extends MiniPhaseTransform { thisTransformer =>
}
val companion = ctx.owner.companionClass
- if (!companion.exists) {
- ctx.error("object that conatin @static members should have companion class", defn.pos)
- }
+ def clashes = companion.asClass.membersNamed(defn.name)
- val clashes = companion.asClass.membersNamed(defn.name)
- if (clashes.exists) {
+ if (!companion.exists) {
+ ctx.error("object that contains @static members should have companion class", defn.pos)
+ } else if (clashes.exists) {
ctx.error("companion classes cannot define members with same name as @static member", defn.pos)
- }
-
- if (defn.symbol.is(Flags.Mutable) && companion.is(Flags.Trait)) {
- ctx.error("Companions of traits cannot define mutable @static fields")
+ } else if (defn.symbol.is(Flags.Mutable) && companion.is(Flags.Trait)) {
+ ctx.error("Companions of traits cannot define mutable @static fields", defn.pos)
+ } else if (defn.symbol.is(Flags.Lazy)) {
+ ctx.error("Lazy @static fields are not supported", defn.pos)
+ } else if (defn.symbol.allOverriddenSymbols.nonEmpty) {
+ ctx.error("@static members cannot override or implement non-static ones", defn.pos)
}
} else hadNonStaticField = hadNonStaticField || defn.isInstanceOf[ValDef]
diff --git a/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
index b85c44647..714255962 100644
--- a/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
+++ b/src/dotty/tools/dotc/transform/CollectEntryPoints.scala
@@ -75,7 +75,7 @@ class CollectEntryPoints extends MiniPhaseTransform {
val javaPlatform = ctx.platform.asInstanceOf[JavaPlatform]
if (javaPlatform.hasJavaMainMethod(companion))
failNoForwarder("companion contains its own main method")
- else if (companion != NoSymbol && companion.info.member(nme.main) != NoSymbol)
+ else if (companion.exists && companion.info.member(nme.main).exists)
// this is only because forwarders aren't smart enough yet
failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
else if (companion.flags is Flags.Trait)
diff --git a/src/dotty/tools/dotc/transform/Constructors.scala b/src/dotty/tools/dotc/transform/Constructors.scala
index 44638ce48..db850e944 100644
--- a/src/dotty/tools/dotc/transform/Constructors.scala
+++ b/src/dotty/tools/dotc/transform/Constructors.scala
@@ -91,7 +91,7 @@ class Constructors extends MiniPhaseTransform with IdentityDenotTransformer { th
*/
override def checkPostCondition(tree: tpd.Tree)(implicit ctx: Context): Unit = {
tree match {
- case tree: ValDef if tree.symbol.exists && tree.symbol.owner.isClass && !tree.symbol.is(Lazy) =>
+ case tree: ValDef if tree.symbol.exists && tree.symbol.owner.isClass && !tree.symbol.is(Lazy) && !tree.symbol.hasAnnotation(defn.ScalaStaticAnnot) =>
assert(tree.rhs.isEmpty, i"$tree: initializer should be moved to constructors")
case tree: DefDef if !tree.symbol.is(LazyOrDeferred) =>
assert(!tree.rhs.isEmpty, i"unimplemented: $tree")
@@ -181,7 +181,7 @@ class Constructors extends MiniPhaseTransform with IdentityDenotTransformer { th
def splitStats(stats: List[Tree]): Unit = stats match {
case stat :: stats1 =>
stat match {
- case stat @ ValDef(name, tpt, _) if !stat.symbol.is(Lazy) =>
+ case stat @ ValDef(name, tpt, _) if !stat.symbol.is(Lazy) && !stat.symbol.hasAnnotation(defn.ScalaStaticAnnot) =>
val sym = stat.symbol
if (isRetained(sym)) {
if (!stat.rhs.isEmpty && !isWildcardArg(stat.rhs))
diff --git a/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled b/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled
index 65362f199..7b37c5881 100644
--- a/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled
+++ b/src/dotty/tools/dotc/transform/DropEmptyCompanions.scala.disabled
@@ -30,7 +30,7 @@ import dotty.tools.dotc.transform.TreeTransforms.TransformerInfo
*/
class DropEmptyCompanions extends MiniPhaseTransform { thisTransform =>
import ast.tpd._
- override def phaseName = "dropEmpty"
+ override def phaseName = "dropEmptyCompanions"
override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Flatten])
override def transformPackageDef(pdef: PackageDef)(implicit ctx: Context, info: TransformerInfo) = {
diff --git a/src/dotty/tools/dotc/transform/DropInlined.scala b/src/dotty/tools/dotc/transform/DropInlined.scala
new file mode 100644
index 000000000..775663b5c
--- /dev/null
+++ b/src/dotty/tools/dotc/transform/DropInlined.scala
@@ -0,0 +1,15 @@
+package dotty.tools.dotc
+package transform
+
+import typer.Inliner
+import core.Contexts.Context
+import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+
+/** Drop Inlined nodes */
+class DropInlined extends MiniPhaseTransform {
+ import ast.tpd._
+ override def phaseName = "dropInlined"
+
+ override def transformInlined(tree: Inlined)(implicit ctx: Context, info: TransformerInfo): Tree =
+ Inliner.dropInlined(tree)
+}
diff --git a/src/dotty/tools/dotc/transform/ElimByName.scala b/src/dotty/tools/dotc/transform/ElimByName.scala
index b65a46249..192227261 100644
--- a/src/dotty/tools/dotc/transform/ElimByName.scala
+++ b/src/dotty/tools/dotc/transform/ElimByName.scala
@@ -77,8 +77,9 @@ class ElimByName extends MiniPhaseTransform with InfoTransformer { thisTransform
if qual.tpe.derivesFrom(defn.FunctionClass(0)) && isPureExpr(qual) =>
qual
case _ =>
+ val inSuper = if (ctx.mode.is(Mode.InSuperCall)) InSuperCall else EmptyFlags
val meth = ctx.newSymbol(
- ctx.owner, nme.ANON_FUN, Synthetic | Method, MethodType(Nil, Nil, argType))
+ ctx.owner, nme.ANON_FUN, Synthetic | Method | inSuper, MethodType(Nil, Nil, argType))
Closure(meth, _ => arg.changeOwner(ctx.owner, meth))
}
ref(defn.dummyApply).appliedToType(argType).appliedTo(argFun)
diff --git a/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
index a3f8b56ff..24c8cdc8d 100644
--- a/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
+++ b/src/dotty/tools/dotc/transform/ElimErasedValueType.scala
@@ -67,6 +67,9 @@ class ElimErasedValueType extends MiniPhaseTransform with InfoTransformer {
transformTypeOfTree(t)
}
+ override def transformInlined(tree: Inlined)(implicit ctx: Context, info: TransformerInfo): Tree =
+ transformTypeOfTree(tree)
+
// FIXME: transformIf and transformBlock won't be required anymore once #444 is fixed.
override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo): Tree =
transformTypeOfTree(tree)
diff --git a/src/dotty/tools/dotc/transform/ElimRepeated.scala b/src/dotty/tools/dotc/transform/ElimRepeated.scala
index 30778267d..258b7f234 100644
--- a/src/dotty/tools/dotc/transform/ElimRepeated.scala
+++ b/src/dotty/tools/dotc/transform/ElimRepeated.scala
@@ -74,7 +74,7 @@ class ElimRepeated extends MiniPhaseTransform with InfoTransformer with Annotati
case SeqLiteral(elems, elemtpt) =>
JavaSeqLiteral(elems, elemtpt)
case _ =>
- val elemType = tree.tpe.firstBaseArgInfo(defn.SeqClass)
+ val elemType = tree.tpe.elemType
var elemClass = elemType.classSymbol
if (defn.PhantomClasses contains elemClass) elemClass = defn.ObjectClass
ref(defn.DottyArraysModule)
diff --git a/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/src/dotty/tools/dotc/transform/ElimStaticThis.scala
index 7df29b0b0..0601e0122 100644
--- a/src/dotty/tools/dotc/transform/ElimStaticThis.scala
+++ b/src/dotty/tools/dotc/transform/ElimStaticThis.scala
@@ -10,7 +10,7 @@ import dotty.tools.dotc.core.SymDenotations.SymDenotation
import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
import dotty.tools.dotc.core.Types.{ThisType, TermRef}
-/** Replace This references to module classes in static methods by global identifiers to the
+/** Replace This references to module classes in static methods by global identifiers to the
* corresponding modules.
*/
class ElimStaticThis extends MiniPhaseTransform {
@@ -27,9 +27,11 @@ class ElimStaticThis extends MiniPhaseTransform {
override def transformIdent(tree: tpd.Ident)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
if (ctx.owner.enclosingMethod.is(JavaStatic)) {
tree.tpe match {
+ case TermRef(thiz: ThisType, _) if thiz.cls.is(ModuleClass) =>
+ ref(thiz.cls.sourceModule).select(tree.symbol)
case TermRef(thiz: ThisType, _) =>
- assert(thiz.underlying.typeSymbol.is(ModuleClass))
- ref(thiz.underlying.typeSymbol.sourceModule).select(tree.symbol)
+ assert(tree.symbol.is(Flags.JavaStatic))
+ tree
case _ => tree
}
}
diff --git a/src/dotty/tools/dotc/transform/Erasure.scala b/src/dotty/tools/dotc/transform/Erasure.scala
index 3445b4c44..a503d55e5 100644
--- a/src/dotty/tools/dotc/transform/Erasure.scala
+++ b/src/dotty/tools/dotc/transform/Erasure.scala
@@ -21,11 +21,11 @@ import core.Decorators._
import dotty.tools.dotc.ast.{Trees, tpd, untpd}
import ast.Trees._
import scala.collection.mutable.ListBuffer
-import dotty.tools.dotc.core.Flags
+import dotty.tools.dotc.core.{Constants, Flags}
import ValueClasses._
import TypeUtils._
import ExplicitOuter._
-import typer.Mode
+import core.Mode
class Erasure extends Phase with DenotTransformer { thisTransformer =>
@@ -41,13 +41,19 @@ class Erasure extends Phase with DenotTransformer { thisTransformer =>
// Aftre erasure, all former Any members are now Object members
val ClassInfo(pre, _, ps, decls, selfInfo) = ref.info
val extendedScope = decls.cloneScope
- defn.AnyClass.classInfo.decls.foreach(extendedScope.enter)
+ for (decl <- defn.AnyClass.classInfo.decls)
+ if (!decl.isConstructor) extendedScope.enter(decl)
ref.copySymDenotation(
info = transformInfo(ref.symbol,
ClassInfo(pre, defn.ObjectClass, ps, extendedScope, selfInfo))
)
}
else {
+ val oldSymbol = ref.symbol
+ val newSymbol =
+ if ((oldSymbol.owner eq defn.AnyClass) && oldSymbol.isConstructor)
+ defn.ObjectClass.primaryConstructor
+ else oldSymbol
val oldOwner = ref.owner
val newOwner = if (oldOwner eq defn.AnyClass) defn.ObjectClass else oldOwner
val oldInfo = ref.info
@@ -55,10 +61,10 @@ class Erasure extends Phase with DenotTransformer { thisTransformer =>
val oldFlags = ref.flags
val newFlags = ref.flags &~ Flags.HasDefaultParams // HasDefaultParams needs to be dropped because overriding might become overloading
// TODO: define derivedSymDenotation?
- if ((oldOwner eq newOwner) && (oldInfo eq newInfo) && (oldFlags == newFlags)) ref
+ if ((oldSymbol eq newSymbol) && (oldOwner eq newOwner) && (oldInfo eq newInfo) && (oldFlags == newFlags)) ref
else {
assert(!ref.is(Flags.PackageClass), s"trans $ref @ ${ctx.phase} oldOwner = $oldOwner, newOwner = $newOwner, oldInfo = $oldInfo, newInfo = $newInfo ${oldOwner eq newOwner} ${oldInfo eq newInfo}")
- ref.copySymDenotation(owner = newOwner, initFlags = newFlags, info = newInfo)
+ ref.copySymDenotation(symbol = newSymbol, owner = newOwner, initFlags = newFlags, info = newInfo)
}
}
case ref =>
@@ -153,8 +159,8 @@ object Erasure extends TypeTestsCasts{
final def box(tree: Tree, target: => String = "")(implicit ctx: Context): Tree = ctx.traceIndented(i"boxing ${tree.showSummary}: ${tree.tpe} into $target") {
tree.tpe.widen match {
- case ErasedValueType(clazz, _) =>
- New(clazz.typeRef, cast(tree, underlyingOfValueClass(clazz)) :: Nil) // todo: use adaptToType?
+ case ErasedValueType(tycon, _) =>
+ New(tycon, cast(tree, underlyingOfValueClass(tycon.symbol.asClass)) :: Nil) // todo: use adaptToType?
case tp =>
val cls = tp.classSymbol
if (cls eq defn.UnitClass) constant(tree, ref(defn.BoxedUnit_UNIT))
@@ -173,10 +179,10 @@ object Erasure extends TypeTestsCasts{
def unbox(tree: Tree, pt: Type)(implicit ctx: Context): Tree = ctx.traceIndented(i"unboxing ${tree.showSummary}: ${tree.tpe} as a $pt") {
pt match {
- case ErasedValueType(clazz, underlying) =>
+ case ErasedValueType(tycon, underlying) =>
def unboxedTree(t: Tree) =
- adaptToType(t, clazz.typeRef)
- .select(valueClassUnbox(clazz))
+ adaptToType(t, tycon)
+ .select(valueClassUnbox(tycon.symbol.asClass))
.appliedToNone
// Null unboxing needs to be treated separately since we cannot call a method on null.
@@ -185,7 +191,7 @@ object Erasure extends TypeTestsCasts{
val tree1 =
if (tree.tpe isRef defn.NullClass)
adaptToType(tree, underlying)
- else if (!(tree.tpe <:< clazz.typeRef)) {
+ else if (!(tree.tpe <:< tycon)) {
assert(!(tree.tpe.typeSymbol.isPrimitiveValueClass))
val nullTree = Literal(Constant(null))
val unboxedNull = adaptToType(nullTree, underlying)
@@ -223,12 +229,12 @@ object Erasure extends TypeTestsCasts{
if treeElem.widen.isPrimitiveValueType && !ptElem.isPrimitiveValueType =>
// See SI-2386 for one example of when this might be necessary.
cast(ref(defn.runtimeMethodRef(nme.toObjectArray)).appliedTo(tree), pt)
- case (_, ErasedValueType(cls, _)) =>
- ref(u2evt(cls)).appliedTo(tree)
+ case (_, ErasedValueType(tycon, _)) =>
+ ref(u2evt(tycon.symbol.asClass)).appliedTo(tree)
case _ =>
tree.tpe.widen match {
- case ErasedValueType(cls, _) =>
- ref(evt2u(cls)).appliedTo(tree)
+ case ErasedValueType(tycon, _) =>
+ ref(evt2u(tycon.symbol.asClass)).appliedTo(tree)
case _ =>
if (pt.isPrimitiveValueType)
primitiveConversion(tree, pt.classSymbol)
@@ -299,8 +305,9 @@ object Erasure extends TypeTestsCasts{
assignType(untpd.cpy.Typed(tree)(expr1, tpt1), tpt1)
}
- override def typedLiteral(tree: untpd.Literal)(implicit ctc: Context): Literal =
+ override def typedLiteral(tree: untpd.Literal)(implicit ctx: Context): Literal =
if (tree.typeOpt.isRef(defn.UnitClass)) tree.withType(tree.typeOpt)
+ else if (tree.const.tag == Constants.ClazzTag) Literal(Constant(erasure(tree.const.typeValue)))
else super.typedLiteral(tree)
/** Type check select nodes, applying the following rewritings exhaustively
@@ -381,9 +388,6 @@ object Erasure extends TypeTestsCasts{
recur(typed(tree.qualifier, AnySelectionProto))
}
- override def typedSelectFromTypeTree(tree: untpd.SelectFromTypeTree, pt: Type)(implicit ctx: Context) =
- untpd.Ident(tree.name).withPos(tree.pos).withType(erasedType(tree))
-
override def typedThis(tree: untpd.This)(implicit ctx: Context): Tree =
if (tree.symbol == ctx.owner.enclosingClass || tree.symbol.isStaticOwner) promote(tree)
else {
@@ -467,28 +471,18 @@ object Erasure extends TypeTestsCasts{
tpt = untpd.TypedSplice(TypeTree(sym.info).withPos(vdef.tpt.pos))), sym)
override def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) = {
- var effectiveSym = sym
- if (sym == defn.newRefArrayMethod) {
- // newRefArray is treated specially: It's the only source-defined method
- // that has a polymorphic type after erasure. But treating its (dummy) definition
- // with a polymorphic type at and after erasure is an awkward special case.
- // We therefore rewrite the method definition with a new Symbol of type
- // (length: Int)Object
- val MethodType(pnames, ptypes) = sym.info.resultType
- effectiveSym = sym.copy(info = MethodType(pnames, ptypes, defn.ObjectType))
- }
val restpe =
- if (effectiveSym.isConstructor) defn.UnitType
- else effectiveSym.info.resultType
+ if (sym.isConstructor) defn.UnitType
+ else sym.info.resultType
val ddef1 = untpd.cpy.DefDef(ddef)(
tparams = Nil,
- vparamss = (outer.paramDefs(effectiveSym) ::: ddef.vparamss.flatten) :: Nil,
+ vparamss = (outer.paramDefs(sym) ::: ddef.vparamss.flatten) :: Nil,
tpt = untpd.TypedSplice(TypeTree(restpe).withPos(ddef.tpt.pos)),
rhs = ddef.rhs match {
case id @ Ident(nme.WILDCARD) => untpd.TypedSplice(id.withType(restpe))
case _ => ddef.rhs
})
- super.typedDefDef(ddef1, effectiveSym)
+ super.typedDefDef(ddef1, sym)
}
/** After erasure, we may have to replace the closure method by a bridge.
@@ -562,43 +556,47 @@ object Erasure extends TypeTestsCasts{
before match {
case Nil => emittedBridges.toList
case (oldMember: untpd.DefDef) :: oldTail =>
- val oldSymbol = oldMember.symbol(beforeCtx)
- val newSymbol = member.symbol(ctx)
- assert(oldSymbol.name(beforeCtx) == newSymbol.name,
- s"${oldSymbol.name(beforeCtx)} bridging with ${newSymbol.name}")
- val newOverridden = oldSymbol.denot.allOverriddenSymbols.toSet // TODO: clarify new <-> old in a comment; symbols are swapped here
- val oldOverridden = newSymbol.allOverriddenSymbols(beforeCtx).toSet // TODO: can we find a more efficient impl? newOverridden does not have to be a set!
- def stillInBaseClass(sym: Symbol) = ctx.owner derivesFrom sym.owner
- val neededBridges = (oldOverridden -- newOverridden).filter(stillInBaseClass)
-
- var minimalSet = Set[Symbol]()
- // compute minimal set of bridges that are needed:
- for (bridge <- neededBridges) {
- val isRequired = minimalSet.forall(nxtBridge => !(bridge.info =:= nxtBridge.info))
-
- if (isRequired) {
- // check for clashes
- val clash: Option[Symbol] = oldSymbol.owner.info.decls.lookupAll(bridge.name).find {
- sym =>
- (sym.name eq bridge.name) && sym.info.widen =:= bridge.info.widen
- }.orElse(
+ try {
+ val oldSymbol = oldMember.symbol(beforeCtx)
+ val newSymbol = member.symbol(ctx)
+ assert(oldSymbol.name(beforeCtx) == newSymbol.name,
+ s"${oldSymbol.name(beforeCtx)} bridging with ${newSymbol.name}")
+ val newOverridden = oldSymbol.denot.allOverriddenSymbols.toSet // TODO: clarify new <-> old in a comment; symbols are swapped here
+ val oldOverridden = newSymbol.allOverriddenSymbols(beforeCtx).toSet // TODO: can we find a more efficient impl? newOverridden does not have to be a set!
+ def stillInBaseClass(sym: Symbol) = ctx.owner derivesFrom sym.owner
+ val neededBridges = (oldOverridden -- newOverridden).filter(stillInBaseClass)
+
+ var minimalSet = Set[Symbol]()
+ // compute minimal set of bridges that are needed:
+ for (bridge <- neededBridges) {
+ val isRequired = minimalSet.forall(nxtBridge => !(bridge.info =:= nxtBridge.info))
+
+ if (isRequired) {
+ // check for clashes
+ val clash: Option[Symbol] = oldSymbol.owner.info.decls.lookupAll(bridge.name).find {
+ sym =>
+ (sym.name eq bridge.name) && sym.info.widen =:= bridge.info.widen
+ }.orElse(
emittedBridges.find(stat => (stat.name == bridge.name) && stat.tpe.widen =:= bridge.info.widen)
- .map(_.symbol)
- )
- clash match {
- case Some(cl) =>
- ctx.error(i"bridge for method ${newSymbol.showLocated(beforeCtx)} of type ${newSymbol.info(beforeCtx)}\n" +
- i"clashes with ${cl.symbol.showLocated(beforeCtx)} of type ${cl.symbol.info(beforeCtx)}\n" +
- i"both have same type after erasure: ${bridge.symbol.info}")
- case None => minimalSet += bridge
+ .map(_.symbol))
+ clash match {
+ case Some(cl) =>
+ ctx.error(i"bridge for method ${newSymbol.showLocated(beforeCtx)} of type ${newSymbol.info(beforeCtx)}\n" +
+ i"clashes with ${cl.symbol.showLocated(beforeCtx)} of type ${cl.symbol.info(beforeCtx)}\n" +
+ i"both have same type after erasure: ${bridge.symbol.info}")
+ case None => minimalSet += bridge
+ }
}
}
- }
- val bridgeImplementations = minimalSet.map {
- sym => makeBridgeDef(member, sym)(ctx)
+ val bridgeImplementations = minimalSet.map {
+ sym => makeBridgeDef(member, sym)(ctx)
+ }
+ emittedBridges ++= bridgeImplementations
+ } catch {
+ case ex: MergeError => ctx.error(ex.getMessage, member.pos)
}
- emittedBridges ++= bridgeImplementations
+
traverse(newTail, oldTail, emittedBridges)
case notADefDef :: oldTail =>
traverse(after, oldTail, emittedBridges)
@@ -611,7 +609,7 @@ object Erasure extends TypeTestsCasts{
traverse(newStats, oldStats)
}
- private final val NoBridgeFlags = Flags.Accessor | Flags.Deferred | Flags.Lazy
+ private final val NoBridgeFlags = Flags.Accessor | Flags.Deferred | Flags.Lazy | Flags.ParamAccessor
/** Create a bridge DefDef which overrides a parent method.
*
diff --git a/src/dotty/tools/dotc/transform/ExpandPrivate.scala b/src/dotty/tools/dotc/transform/ExpandPrivate.scala
index a6f203478..83cd395ff 100644
--- a/src/dotty/tools/dotc/transform/ExpandPrivate.scala
+++ b/src/dotty/tools/dotc/transform/ExpandPrivate.scala
@@ -16,10 +16,15 @@ import TreeTransforms._
import Decorators._
import ast.Trees._
import TreeTransforms._
+import java.io.File.separatorChar
+import ValueClasses._
/** Make private term members that are accessed from another class
* non-private by resetting the Private flag and expanding their name.
*
+ * Make private accessor in value class not-private. Ihis is necessary to unbox
+ * the value class when accessing it from separate compilation units
+ *
* Also, make non-private any private parameter forwarders that forward to an inherited
* public or protected parameter accessor with the same name as the forwarder.
* This is necessary since private methods are not allowed to have the same name
@@ -51,14 +56,32 @@ class ExpandPrivate extends MiniPhaseTransform with IdentityDenotTransformer { t
}
}
+ private def isVCPrivateParamAccessor(d: SymDenotation)(implicit ctx: Context) =
+ d.isTerm && d.is(PrivateParamAccessor) && isDerivedValueClass(d.owner)
+
/** Make private terms accessed from different classes non-private.
* Note: this happens also for accesses between class and linked module class.
* If we change the scheme at one point to make static module class computations
* static members of the companion class, we should tighten the condition below.
*/
private def ensurePrivateAccessible(d: SymDenotation)(implicit ctx: Context) =
- if (d.is(PrivateTerm) && d.owner != ctx.owner.enclosingClass) {
- assert(d.symbol.sourceFile == ctx.source.file,
+ if (isVCPrivateParamAccessor(d))
+ d.ensureNotPrivate.installAfter(thisTransform)
+ else if (d.is(PrivateTerm) && d.owner != ctx.owner.enclosingClass) {
+ // Paths `p1` and `p2` are similar if they have a common suffix that follows
+ // possibly different directory paths. That is, their common suffix extends
+ // in both cases either to the start of the path or to a file separator character.
+ def isSimilar(p1: String, p2: String): Boolean = {
+ var i = p1.length - 1
+ var j = p2.length - 1
+ while (i >= 0 && j >= 0 && p1(i) == p2(j) && p1(i) != separatorChar) {
+ i -= 1
+ j -= 1
+ }
+ (i < 0 || p1(i) == separatorChar) &&
+ (j < 0 || p1(j) == separatorChar)
+ }
+ assert(isSimilar(d.symbol.sourceFile.path, ctx.source.file.path),
i"private ${d.symbol.showLocated} in ${d.symbol.sourceFile} accessed from ${ctx.owner.showLocated} in ${ctx.source.file}")
d.ensureNotPrivate.installAfter(thisTransform)
}
@@ -80,6 +103,8 @@ class ExpandPrivate extends MiniPhaseTransform with IdentityDenotTransformer { t
if sym.is(PrivateParamAccessor) && sel.symbol.is(ParamAccessor) && sym.name == sel.symbol.name =>
sym.ensureNotPrivate.installAfter(thisTransform)
case _ =>
+ if (isVCPrivateParamAccessor(sym))
+ sym.ensureNotPrivate.installAfter(thisTransform)
}
tree
}
diff --git a/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/src/dotty/tools/dotc/transform/ExpandSAMs.scala
index d9445d046..91399f91a 100644
--- a/src/dotty/tools/dotc/transform/ExpandSAMs.scala
+++ b/src/dotty/tools/dotc/transform/ExpandSAMs.scala
@@ -74,7 +74,8 @@ class ExpandSAMs extends MiniPhaseTransform { thisTransformer =>
Bind(defaultSym, Underscore(selector.tpe.widen)),
EmptyTree,
Literal(Constant(false)))
- cpy.Match(applyRhs)(paramRef, cases.map(translateCase) :+ defaultCase)
+ val annotated = Annotated(paramRef, New(ref(defn.UncheckedAnnotType)))
+ cpy.Match(applyRhs)(annotated, cases.map(translateCase) :+ defaultCase)
case _ =>
tru
}
diff --git a/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/src/dotty/tools/dotc/transform/ExplicitOuter.scala
index 7ec0739c1..3f235dca7 100644
--- a/src/dotty/tools/dotc/transform/ExplicitOuter.scala
+++ b/src/dotty/tools/dotc/transform/ExplicitOuter.scala
@@ -15,7 +15,7 @@ import ast.Trees._
import SymUtils._
import dotty.tools.dotc.ast.tpd
import dotty.tools.dotc.core.Phases.Phase
-import util.Attachment
+import util.Property
import collection.mutable
/** This phase adds outer accessors to classes and traits that need them.
@@ -36,7 +36,7 @@ class ExplicitOuter extends MiniPhaseTransform with InfoTransformer { thisTransf
import ExplicitOuter._
import ast.tpd._
- val Outer = new Attachment.Key[Tree]
+ val Outer = new Property.Key[Tree]
override def phaseName: String = "explicitOuter"
@@ -47,7 +47,7 @@ class ExplicitOuter extends MiniPhaseTransform with InfoTransformer { thisTransf
/** Add outer accessors if a class always needs an outer pointer */
override def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context) = tp match {
- case tp @ ClassInfo(_, cls, _, decls, _) if needsOuterAlways(cls) =>
+ case tp @ ClassInfo(_, cls, _, decls, _) if needsOuterAlways(cls) && !sym.is(JavaDefined) =>
val newDecls = decls.cloneScope
newOuterAccessors(cls).foreach(newDecls.enter)
tp.derivedClassInfo(decls = newDecls)
@@ -57,6 +57,12 @@ class ExplicitOuter extends MiniPhaseTransform with InfoTransformer { thisTransf
override def mayChange(sym: Symbol)(implicit ctx: Context): Boolean = sym.isClass
+ /** Convert a selection of the form `qual.C_<OUTER>` to an outer path from `qual` to `C` */
+ override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo) =
+ if (tree.name.isOuterSelect)
+ outer.path(tree.tpe.widen.classSymbol, tree.qualifier).ensureConforms(tree.tpe)
+ else tree
+
/** First, add outer accessors if a class does not have them yet and it references an outer this.
* If the class has outer accessors, implement them.
* Furthermore, if a parent trait might have an outer accessor,
diff --git a/src/dotty/tools/dotc/transform/ExplicitSelf.scala b/src/dotty/tools/dotc/transform/ExplicitSelf.scala
index c6a218157..7bb65e575 100644
--- a/src/dotty/tools/dotc/transform/ExplicitSelf.scala
+++ b/src/dotty/tools/dotc/transform/ExplicitSelf.scala
@@ -16,21 +16,31 @@ import Flags._
* where `C` is a class with explicit self type and `C` is not a
* subclass of the owner of `m` to
*
- * C.this.asInstanceOf[S].m
+ * C.this.asInstanceOf[S & C.this.type].m
*
* where `S` is the self type of `C`.
+ * See run/i789.scala for a test case why this is needed.
+ *
+ * Also replaces idents referring to the self type with ThisTypes.
*/
class ExplicitSelf extends MiniPhaseTransform { thisTransform =>
import ast.tpd._
override def phaseName = "explicitSelf"
+ override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo) = tree.tpe match {
+ case tp: ThisType =>
+ ctx.debuglog(s"owner = ${ctx.owner}, context = ${ctx}")
+ This(tp.cls) withPos tree.pos
+ case _ => tree
+ }
+
override def transformSelect(tree: Select)(implicit ctx: Context, info: TransformerInfo): Tree = tree match {
case Select(thiz: This, name) if name.isTermName =>
val cls = thiz.symbol.asClass
val cinfo = cls.classInfo
if (cinfo.givenSelfType.exists && !cls.derivesFrom(tree.symbol.owner))
- cpy.Select(tree)(thiz.asInstance(cinfo.selfType), name)
+ cpy.Select(tree)(thiz.asInstance(AndType(cinfo.selfType, thiz.tpe)), name)
else tree
case _ => tree
}
diff --git a/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/src/dotty/tools/dotc/transform/ExtensionMethods.scala
index c5ab49c9c..62a21198d 100644
--- a/src/dotty/tools/dotc/transform/ExtensionMethods.scala
+++ b/src/dotty/tools/dotc/transform/ExtensionMethods.scala
@@ -33,7 +33,7 @@ import SymUtils._
* This is different from the implementation of value classes in Scala 2
* (see SIP-15) which uses `asInstanceOf` which does not typecheck.
*/
-class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with FullParameterization with NeedsCompanions { thisTransformer =>
+class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with FullParameterization { thisTransformer =>
import tpd._
import ExtensionMethods._
@@ -45,10 +45,6 @@ class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with Ful
override def runsAfterGroupsOf = Set(classOf[FirstTransform]) // need companion objects to exist
- def isCompanionNeeded(cls: ClassSymbol)(implicit ctx: Context): Boolean = {
- isDerivedValueClass(cls)
- }
-
override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref match {
case moduleClassSym: ClassDenotation if moduleClassSym is ModuleClass =>
moduleClassSym.linkedClass match {
@@ -70,7 +66,7 @@ class ExtensionMethods extends MiniPhaseTransform with DenotTransformer with Ful
}
val underlying = valueErasure(underlyingOfValueClass(valueClass))
- val evt = ErasedValueType(valueClass, underlying)
+ val evt = ErasedValueType(valueClass.typeRef, underlying)
val u2evtSym = ctx.newSymbol(moduleSym, nme.U2EVT, Synthetic | Method,
MethodType(List(nme.x_0), List(underlying), evt))
val evt2uSym = ctx.newSymbol(moduleSym, nme.EVT2U, Synthetic | Method,
@@ -219,19 +215,19 @@ object ExtensionMethods {
val candidates = extensionNames(imeth) map (companionInfo.decl(_).symbol) filter (_.exists)
val matching = candidates filter (c => FullParameterization.memberSignature(c.info) == imeth.signature)
assert(matching.nonEmpty,
- sm"""|no extension method found for:
- |
- | $imeth:${imeth.info.show} with signature ${imeth.signature}
- |
- | Candidates:
- |
- | ${candidates.map(c => c.name + ":" + c.info.show).mkString("\n")}
- |
- | Candidates (signatures normalized):
- |
- | ${candidates.map(c => c.name + ":" + c.info.signature + ":" + FullParameterization.memberSignature(c.info)).mkString("\n")}
- |
- | Eligible Names: ${extensionNames(imeth).mkString(",")}""")
+ i"""no extension method found for:
+ |
+ | $imeth:${imeth.info.show} with signature ${imeth.signature}
+ |
+ | Candidates:
+ |
+ | ${candidates.map(c => c.name + ":" + c.info.show).mkString("\n")}
+ |
+ | Candidates (signatures normalized):
+ |
+ | ${candidates.map(c => c.name + ":" + c.info.signature + ":" + FullParameterization.memberSignature(c.info)).mkString("\n")}
+ |
+ | Eligible Names: ${extensionNames(imeth).mkString(",")}""")
matching.head.asTerm
}
}
diff --git a/src/dotty/tools/dotc/transform/FirstTransform.scala b/src/dotty/tools/dotc/transform/FirstTransform.scala
index 37ae1d94e..74dc9b9d6 100644
--- a/src/dotty/tools/dotc/transform/FirstTransform.scala
+++ b/src/dotty/tools/dotc/transform/FirstTransform.scala
@@ -28,8 +28,9 @@ import StdNames._
* - ensures there are companion objects for all classes except module classes
* - eliminates some kinds of trees: Imports, NamedArgs
* - stubs out native methods
+ * - eliminate self tree in Template and self symbol in ClassInfo
*/
-class FirstTransform extends MiniPhaseTransform with IdentityDenotTransformer with AnnotationTransformer { thisTransformer =>
+class FirstTransform extends MiniPhaseTransform with InfoTransformer with AnnotationTransformer { thisTransformer =>
import ast.tpd._
override def phaseName = "firstTransform"
@@ -44,7 +45,15 @@ class FirstTransform extends MiniPhaseTransform with IdentityDenotTransformer wi
this
}
- def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type = {
+ /** eliminate self symbol in ClassInfo */
+ override def transformInfo(tp: Type, sym: Symbol)(implicit ctx: Context): Type = tp match {
+ case tp @ ClassInfo(_, _, _, _, self: Symbol) =>
+ tp.derivedClassInfo(selfInfo = self.info)
+ case _ =>
+ tp
+ }
+
+ /*
tp match {
//create companions for value classes that are not from currently compiled source file
case tp@ClassInfo(_, cls, _, decls, _)
@@ -59,11 +68,12 @@ class FirstTransform extends MiniPhaseTransform with IdentityDenotTransformer wi
case _ => tp
}
}
+ */
override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = {
tree match {
- case Select(qual, _) if tree.symbol.exists =>
- assert(qual.tpe derivesFrom tree.symbol.owner, i"non member selection of ${tree.symbol.showLocated} from ${qual.tpe}")
+ case Select(qual, name) if !name.isOuterSelect && tree.symbol.exists =>
+ assert(qual.tpe derivesFrom tree.symbol.owner, i"non member selection of ${tree.symbol.showLocated} from ${qual.tpe} in $tree")
case _: TypeTree =>
case _: Import | _: NamedArg | _: TypTree =>
assert(false, i"illegal tree: $tree")
@@ -132,6 +142,11 @@ class FirstTransform extends MiniPhaseTransform with IdentityDenotTransformer wi
(modul, mcComp, classComp)
}
+ /** elimiate self in Template */
+ override def transformTemplate(impl: Template)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ cpy.Template(impl)(self = EmptyValDef)
+ }
+
override def transformDefDef(ddef: DefDef)(implicit ctx: Context, info: TransformerInfo) = {
if (ddef.symbol.hasAnnotation(defn.NativeAnnot)) {
ddef.symbol.resetFlag(Deferred)
diff --git a/src/dotty/tools/dotc/transform/FullParameterization.scala b/src/dotty/tools/dotc/transform/FullParameterization.scala
index e9057e885..6c69c735b 100644
--- a/src/dotty/tools/dotc/transform/FullParameterization.scala
+++ b/src/dotty/tools/dotc/transform/FullParameterization.scala
@@ -12,6 +12,8 @@ import NameOps._
import ast._
import ast.Trees._
+import scala.reflect.internal.util.Collections
+
/** Provides methods to produce fully parameterized versions of instance methods,
* where the `this` of the enclosing class is abstracted out in an extra leading
* `$this` parameter and type parameters of the class become additional type
@@ -86,11 +88,14 @@ trait FullParameterization {
* }
*
* If a self type is present, $this has this self type as its type.
+ *
* @param abstractOverClass if true, include the type parameters of the class in the method's list of type parameters.
+ * @param liftThisType if true, require created $this to be $this: (Foo[A] & Foo,this).
+ * This is needed if created member stays inside scope of Foo(as in tailrec)
*/
- def fullyParameterizedType(info: Type, clazz: ClassSymbol, abstractOverClass: Boolean = true)(implicit ctx: Context): Type = {
+ def fullyParameterizedType(info: Type, clazz: ClassSymbol, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(implicit ctx: Context): Type = {
val (mtparamCount, origResult) = info match {
- case info @ PolyType(mtnames) => (mtnames.length, info.resultType)
+ case info: PolyType => (info.paramNames.length, info.resultType)
case info: ExprType => (0, info.resultType)
case _ => (0, info)
}
@@ -100,7 +105,8 @@ trait FullParameterization {
/** The method result type */
def resultType(mapClassParams: Type => Type) = {
val thisParamType = mapClassParams(clazz.classInfo.selfType)
- MethodType(nme.SELF :: Nil, thisParamType :: Nil)(mt =>
+ val firstArgType = if (liftThisType) thisParamType & clazz.thisType else thisParamType
+ MethodType(nme.SELF :: Nil, firstArgType :: Nil)(mt =>
mapClassParams(origResult).substThisUnlessStatic(clazz, MethodParam(mt, 0)))
}
@@ -115,8 +121,8 @@ trait FullParameterization {
ctparams.map(tparam => mapClassParams(tparam.info, pt).bounds)
info match {
- case info @ PolyType(mtnames) =>
- PolyType(mtnames ++ ctnames)(
+ case info: PolyType =>
+ PolyType(info.paramNames ++ ctnames)(
pt =>
(info.paramBounds.map(mapClassParams(_, pt).bounds) ++
mappedClassBounds(pt)).mapConserve(_.subst(info, pt).bounds),
@@ -217,12 +223,26 @@ trait FullParameterization {
* - the `this` of the enclosing class,
* - the value parameters of the original method `originalDef`.
*/
- def forwarder(derived: TermSymbol, originalDef: DefDef, abstractOverClass: Boolean = true)(implicit ctx: Context): Tree =
- ref(derived.termRef)
- .appliedToTypes(allInstanceTypeParams(originalDef, abstractOverClass).map(_.typeRef))
- .appliedTo(This(originalDef.symbol.enclosingClass.asClass))
- .appliedToArgss(originalDef.vparamss.nestedMap(vparam => ref(vparam.symbol)))
- .withPos(originalDef.rhs.pos)
+ def forwarder(derived: TermSymbol, originalDef: DefDef, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(implicit ctx: Context): Tree = {
+ val fun =
+ ref(derived.termRef)
+ .appliedToTypes(allInstanceTypeParams(originalDef, abstractOverClass).map(_.typeRef))
+ .appliedTo(This(originalDef.symbol.enclosingClass.asClass))
+
+ (if (!liftThisType)
+ fun.appliedToArgss(originalDef.vparamss.nestedMap(vparam => ref(vparam.symbol)))
+ else {
+ // this type could have changed on forwarding. Need to insert a cast.
+ val args = Collections.map2(originalDef.vparamss, fun.tpe.paramTypess)((vparams, paramTypes) =>
+ Collections.map2(vparams, paramTypes)((vparam, paramType) => {
+ assert(vparam.tpe <:< paramType.widen) // type should still conform to widened type
+ ref(vparam.symbol).ensureConforms(paramType)
+ })
+ )
+ fun.appliedToArgss(args)
+
+ }).withPos(originalDef.rhs.pos)
+ }
}
object FullParameterization {
diff --git a/src/dotty/tools/dotc/transform/GetClass.scala b/src/dotty/tools/dotc/transform/GetClass.scala
index f25fd6f64..6a9a5fda2 100644
--- a/src/dotty/tools/dotc/transform/GetClass.scala
+++ b/src/dotty/tools/dotc/transform/GetClass.scala
@@ -20,7 +20,8 @@ class GetClass extends MiniPhaseTransform {
override def phaseName: String = "getClass"
- override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Erasure])
+ // getClass transformation should be applied to specialized methods
+ override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Erasure], classOf[FunctionalInterfaces])
override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = {
import ast.Trees._
diff --git a/src/dotty/tools/dotc/transform/Getters.scala b/src/dotty/tools/dotc/transform/Getters.scala
index 75235d0f5..31171dfab 100644
--- a/src/dotty/tools/dotc/transform/Getters.scala
+++ b/src/dotty/tools/dotc/transform/Getters.scala
@@ -56,6 +56,7 @@ class Getters extends MiniPhaseTransform with SymTransformer { thisTransform =>
d.is(NoGetterNeeded) ||
d.initial.asInstanceOf[SymDenotation].is(PrivateLocal) && !d.owner.is(Trait) && !isDerivedValueClass(d.owner) && !d.is(Flags.Lazy) ||
d.is(Module) && d.isStatic ||
+ d.hasAnnotation(defn.ScalaStaticAnnot) ||
d.isSelfSym
if (d.isTerm && (d.is(Lazy) || d.owner.isClass) && d.info.isValueType && !noGetterNeeded) {
val maybeStable = if (d.isStable) Stable else EmptyFlags
diff --git a/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/src/dotty/tools/dotc/transform/InterceptedMethods.scala
index ffb4ae756..7c60e8d72 100644
--- a/src/dotty/tools/dotc/transform/InterceptedMethods.scala
+++ b/src/dotty/tools/dotc/transform/InterceptedMethods.scala
@@ -22,7 +22,6 @@ import dotty.tools.dotc.ast.{untpd, tpd}
import dotty.tools.dotc.core.Constants.Constant
import dotty.tools.dotc.core.Types.MethodType
import dotty.tools.dotc.core.Names.Name
-import dotty.runtime.LazyVals
import scala.collection.mutable.ListBuffer
import dotty.tools.dotc.core.Denotations.SingleDenotation
import dotty.tools.dotc.core.SymDenotations.SymDenotation
diff --git a/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala b/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala
new file mode 100644
index 000000000..8bc4a2aa9
--- /dev/null
+++ b/src/dotty/tools/dotc/transform/IsInstanceOfEvaluator.scala
@@ -0,0 +1,168 @@
+package dotty.tools.dotc
+package transform
+
+import dotty.tools.dotc.util.Positions._
+import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+import core._
+import Contexts.Context, Types._, Constants._, Decorators._, Symbols._
+import TypeUtils._, TypeErasure._, Flags._
+
+
+/** Implements partial evaluation of `sc.isInstanceOf[Sel]` according to:
+ *
+ * +-------------+----------------------------+----------------------------+------------------+
+ * | Sel\sc | trait | class | final class |
+ * +-------------+----------------------------+----------------------------+------------------+
+ * | trait | ? | ? | statically known |
+ * | class | ? | false if classes unrelated | statically known |
+ * | final class | false if classes unrelated | false if classes unrelated | statically known |
+ * +-------------+----------------------------+----------------------------+------------------+
+ *
+ * This is a generalized solution to raising an error on unreachable match
+ * cases and warnings on other statically known results of `isInstanceOf`.
+ *
+ * Steps taken:
+ *
+ * 1. evalTypeApply will establish the matrix and choose the appropriate
+ * handling for the case:
+ * 2. a) Sel/sc is a value class or scrutinee is `Any`
+ * b) handleStaticallyKnown
+ * c) falseIfUnrelated with `scrutinee <:< selector`
+ * d) handleFalseUnrelated
+ * e) leave as is (aka `happens`)
+ * 3. Rewrite according to step taken in `2`
+ */
+class IsInstanceOfEvaluator extends MiniPhaseTransform { thisTransformer =>
+
+ import dotty.tools.dotc.ast.tpd._
+
+ def phaseName = "isInstanceOfEvaluator"
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val defn = ctx.definitions
+
+ /** Handles the four cases of statically known `isInstanceOf`s and gives
+ * the correct warnings, or an error if statically known to be false in
+ * match
+ */
+ def handleStaticallyKnown(select: Select, scrutinee: Type, selector: Type, inMatch: Boolean, pos: Position): Tree = {
+ val scrutineeSubSelector = scrutinee <:< selector
+ if (!scrutineeSubSelector && inMatch) {
+ ctx.error(
+ s"this case is unreachable due to `${selector.show}` not being a subclass of `${scrutinee.show}`",
+ Position(pos.start - 5, pos.end - 5)
+ )
+ rewrite(select, to = false)
+ } else if (!scrutineeSubSelector && !inMatch) {
+ ctx.warning(
+ s"this will always yield false since `${scrutinee.show}` is not a subclass of `${selector.show}` (will be optimized away)",
+ pos
+ )
+ rewrite(select, to = false)
+ } else if (scrutineeSubSelector && !inMatch) {
+ ctx.warning(
+ s"this will always yield true if the scrutinee is non-null, since `${scrutinee.show}` is a subclass of `${selector.show}` (will be optimized away)",
+ pos
+ )
+ rewrite(select, to = true)
+ } else /* if (scrutineeSubSelector && inMatch) */ rewrite(select, to = true)
+ }
+
+ /** Rewrites cases with unrelated types */
+ def handleFalseUnrelated(select: Select, scrutinee: Type, selector: Type, inMatch: Boolean) =
+ if (inMatch) {
+ ctx.error(
+ s"will never match since `${selector.show}` is not a subclass of `${scrutinee.show}`",
+ Position(select.pos.start - 5, select.pos.end - 5)
+ )
+ rewrite(select, to = false)
+ } else {
+ ctx.warning(
+ s"will always yield false since `${scrutinee.show}` is not a subclass of `${selector.show}`",
+ select.pos
+ )
+ rewrite(select, to = false)
+ }
+
+ /** Rewrites the select to a boolean if `to` is false or if the qualifier
+ * is a value class.
+ *
+ * If `to` is set to true and the qualifier is not a primitive, the
+ * instanceOf is replaced by a null check, since:
+ *
+ * `scrutinee.isInstanceOf[Selector]` if `scrutinee eq null`
+ */
+ def rewrite(tree: Select, to: Boolean): Tree =
+ if (!to || !tree.qualifier.tpe.widen.derivesFrom(defn.AnyRefAlias)) {
+ val literal = Literal(Constant(to))
+ if (!isPureExpr(tree.qualifier)) Block(List(tree.qualifier), literal)
+ else literal
+ } else
+ Apply(tree.qualifier.select(defn.Object_ne), List(Literal(Constant(null))))
+
+ /** Attempts to rewrite TypeApply to either `scrutinee ne null` or a
+ * constant
+ */
+ def evalTypeApply(tree: TypeApply): Tree =
+ if (tree.symbol != defn.Any_isInstanceOf) tree
+ else tree.fun match {
+ case s: Select => {
+ val scrutinee = erasure(s.qualifier.tpe.widen)
+ val selector = erasure(tree.args.head.tpe.widen)
+
+ val scTrait = scrutinee.typeSymbol is Trait
+ val scClass =
+ scrutinee.typeSymbol.isClass &&
+ !(scrutinee.typeSymbol is Trait) &&
+ !(scrutinee.typeSymbol is Module)
+
+ val scClassNonFinal = scClass && !(scrutinee.typeSymbol is Final)
+ val scFinalClass = scClass && (scrutinee.typeSymbol is Final)
+
+ val selTrait = selector.typeSymbol is Trait
+ val selClass =
+ selector.typeSymbol.isClass &&
+ !(selector.typeSymbol is Trait) &&
+ !(selector.typeSymbol is Module)
+
+ val selClassNonFinal = selClass && !(selector.typeSymbol is Final)
+ val selFinalClass = selClass && (selector.typeSymbol is Final)
+
+ // Cases ---------------------------------
+ val valueClassesOrAny =
+ ValueClasses.isDerivedValueClass(scrutinee.typeSymbol) ||
+ ValueClasses.isDerivedValueClass(selector.typeSymbol) ||
+ scrutinee == defn.ObjectType
+
+ val knownStatically = scFinalClass
+
+ val falseIfUnrelated =
+ (scClassNonFinal && selClassNonFinal) ||
+ (scClassNonFinal && selFinalClass) ||
+ (scTrait && selFinalClass)
+
+ val happens =
+ (scClassNonFinal && selClassNonFinal) ||
+ (scTrait && selClassNonFinal) ||
+ (scTrait && selTrait)
+
+ val inMatch = s.qualifier.symbol is Case
+
+ if (valueClassesOrAny) tree
+ else if (knownStatically)
+ handleStaticallyKnown(s, scrutinee, selector, inMatch, tree.pos)
+ else if (falseIfUnrelated && scrutinee <:< selector)
+ // scrutinee is a subtype of the selector, safe to rewrite
+ rewrite(s, to = true)
+ else if (falseIfUnrelated && !(selector <:< scrutinee))
+ // selector and scrutinee are unrelated
+ handleFalseUnrelated(s, scrutinee, selector, inMatch)
+ else if (happens) tree
+ else tree
+ }
+
+ case _ => tree
+ }
+
+ evalTypeApply(tree)
+ }
+}
diff --git a/src/dotty/tools/dotc/transform/LambdaLift.scala b/src/dotty/tools/dotc/transform/LambdaLift.scala
index 3ef684e55..19fb3dd0c 100644
--- a/src/dotty/tools/dotc/transform/LambdaLift.scala
+++ b/src/dotty/tools/dotc/transform/LambdaLift.scala
@@ -121,7 +121,10 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisTransform
private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
f.getOrElseUpdate(sym, newSymSet)
- def freeVars(sym: Symbol): List[Symbol] = free.getOrElse(sym, Nil).toList
+ def freeVars(sym: Symbol): List[Symbol] = free get sym match {
+ case Some(set) => set.toList
+ case None => Nil
+ }
def proxyOf(sym: Symbol, fv: Symbol) = proxyMap.getOrElse(sym, Map.empty)(fv)
@@ -249,14 +252,21 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisTransform
else if (sym is Method) markCalled(sym, enclosure)
else if (sym.isTerm) markFree(sym, enclosure)
}
- if (sym.maybeOwner.isClass) narrowTo(sym.owner.asClass)
+ def captureImplicitThis(x: Type): Unit = {
+ x match {
+ case tr@TermRef(x, _) if (!tr.termSymbol.isStatic) => captureImplicitThis(x)
+ case x: ThisType if (!x.tref.typeSymbol.isStaticOwner) => narrowTo(x.tref.typeSymbol.asClass)
+ case _ =>
+ }
+ }
+ captureImplicitThis(tree.tpe)
case tree: Select =>
if (sym.is(Method) && isLocal(sym)) markCalled(sym, enclosure)
case tree: This =>
narrowTo(tree.symbol.asClass)
case tree: DefDef =>
if (sym.owner.isTerm && !sym.is(Label))
- liftedOwner(sym) = sym.enclosingClass.topLevelClass
+ liftedOwner(sym) = sym.enclosingPackageClass
// this will make methods in supercall constructors of top-level classes owned
// by the enclosing package, which means they will be static.
// On the other hand, all other methods will be indirectly owned by their
@@ -357,13 +367,16 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisTransform
if (lOwner is Package) {
val encClass = local.enclosingClass
val topClass = local.topLevelClass
- // member of a static object
- if (encClass.isStatic && encClass.isProperlyContainedIn(topClass)) {
- // though the second condition seems weird, it's not true for symbols which are defined in some
- // weird combinations of super calls.
- (encClass, EmptyFlags)
- } else
- (topClass, JavaStatic)
+ val preferEncClass =
+ encClass.isStatic &&
+ // non-static classes can capture owners, so should be avoided
+ (encClass.isProperlyContainedIn(topClass) ||
+ // can be false for symbols which are defined in some weird combination of supercalls.
+ encClass.is(ModuleClass, butNot = Package)
+ // needed to not cause deadlocks in classloader. see t5375.scala
+ )
+ if (preferEncClass) (encClass, EmptyFlags)
+ else (topClass, JavaStatic)
}
else (lOwner, EmptyFlags)
local.copySymDenotation(
diff --git a/src/dotty/tools/dotc/transform/LazyVals.scala b/src/dotty/tools/dotc/transform/LazyVals.scala
index fc02e68cc..e63a7c3a7 100644
--- a/src/dotty/tools/dotc/transform/LazyVals.scala
+++ b/src/dotty/tools/dotc/transform/LazyVals.scala
@@ -3,7 +3,6 @@ package transform
import dotty.tools.dotc.core.Annotations.Annotation
import dotty.tools.dotc.core.Phases.NeedsCompanions
-import dotty.tools.dotc.typer.Mode
import scala.collection.mutable
import core._
@@ -27,7 +26,7 @@ import dotty.tools.dotc.core.SymDenotations.SymDenotation
import dotty.tools.dotc.core.DenotTransformers.{SymTransformer, IdentityDenotTransformer, DenotTransformer}
import Erasure.Boxing.adaptToType
-class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer with NeedsCompanions {
+class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer {
import LazyVals._
import tpd._
@@ -50,11 +49,6 @@ class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer with Nee
* before this phase starts processing same tree */
override def runsAfter = Set(classOf[Mixin])
- def isCompanionNeeded(cls: ClassSymbol)(implicit ctx: Context): Boolean = {
- def hasLazyVal(cls: ClassSymbol) = cls.info.decls.exists(_.is(Flags.Lazy))
- hasLazyVal(cls) || cls.mixins.exists(hasLazyVal)
- }
-
override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree =
transformLazyVal(tree)
@@ -216,7 +210,7 @@ class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer with Nee
def transformMemberDefNonVolatile(x: ValOrDefDef)(implicit ctx: Context) = {
val claz = x.symbol.owner.asClass
val tpe = x.tpe.widen.resultType.widen
- assert(!(x.mods is Flags.Mutable))
+ assert(!(x.symbol is Flags.Mutable))
val containerName = ctx.freshName(x.name.asTermName.lazyLocalName).toTermName
val containerSymbol = ctx.newSymbol(claz, containerName,
x.symbol.flags &~ containerFlagsMask | containerFlags | Flags.Private,
@@ -226,14 +220,14 @@ class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer with Nee
val containerTree = ValDef(containerSymbol, defaultValue(tpe))
if (x.tpe.isNotNull && tpe <:< defn.ObjectType) { // can use 'null' value instead of flag
val slowPath = DefDef(x.symbol.asTerm, mkDefNonThreadSafeNonNullable(containerSymbol, x.rhs))
- Thicket(List(containerTree, slowPath))
+ Thicket(containerTree, slowPath)
}
else {
val flagName = ctx.freshName(x.name ++ StdNames.nme.BITMAP_PREFIX).toTermName
val flagSymbol = ctx.newSymbol(x.symbol.owner, flagName, containerFlags | Flags.Private, defn.BooleanType).enteredAfter(this)
val flag = ValDef(flagSymbol, Literal(Constants.Constant(false)))
val slowPath = DefDef(x.symbol.asTerm, mkNonThreadSafeDef(ref(containerSymbol), ref(flagSymbol), x.rhs))
- Thicket(List(containerTree, flag, slowPath))
+ Thicket(containerTree, flag, slowPath)
}
}
@@ -337,31 +331,33 @@ class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer with Nee
}
def transformMemberDefVolatile(x: ValOrDefDef)(implicit ctx: Context) = {
- assert(!(x.mods is Flags.Mutable))
+ assert(!(x.symbol is Flags.Mutable))
val tpe = x.tpe.widen.resultType.widen
val claz = x.symbol.owner.asClass
val thizClass = Literal(Constant(claz.info))
- val companion = claz.companionModule
val helperModule = ctx.requiredModule("dotty.runtime.LazyVals")
val getOffset = Select(ref(helperModule), lazyNme.RLazyVals.getOffset)
var offsetSymbol: TermSymbol = null
var flag: Tree = EmptyTree
var ord = 0
+ def offsetName(id: Int) = (StdNames.nme.LAZY_FIELD_OFFSET + (if(x.symbol.owner.is(Flags.Module)) "_m_" else "") + id.toString).toTermName
+
// compute or create appropriate offsetSymol, bitmap and bits used by current ValDef
- appendOffsetDefs.get(companion.moduleClass) match {
+ appendOffsetDefs.get(claz) match {
case Some(info) =>
val flagsPerLong = (64 / dotty.runtime.LazyVals.BITS_PER_LAZY_VAL).toInt
info.ord += 1
ord = info.ord % flagsPerLong
val id = info.ord / flagsPerLong
+ val offsetById = offsetName(id)
if (ord != 0) { // there are unused bits in already existing flag
- offsetSymbol = companion.moduleClass.info.decl((StdNames.nme.LAZY_FIELD_OFFSET + id.toString).toTermName)
+ offsetSymbol = claz.info.decl(offsetById)
.suchThat(sym => (sym is Flags.Synthetic) && sym.isTerm)
.symbol.asTerm
} else { // need to create a new flag
- offsetSymbol = ctx.newSymbol(companion.moduleClass, (StdNames.nme.LAZY_FIELD_OFFSET + id.toString).toTermName, Flags.Synthetic, defn.LongType).enteredAfter(this)
+ offsetSymbol = ctx.newSymbol(claz, offsetById, Flags.Synthetic, defn.LongType).enteredAfter(this)
offsetSymbol.addAnnotation(Annotation(defn.ScalaStaticAnnot))
val flagName = (StdNames.nme.BITMAP_PREFIX + id.toString).toTermName
val flagSymbol = ctx.newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this)
@@ -371,17 +367,17 @@ class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer with Nee
}
case None =>
- offsetSymbol = ctx.newSymbol(companion.moduleClass, (StdNames.nme.LAZY_FIELD_OFFSET + "0").toTermName, Flags.Synthetic, defn.LongType).enteredAfter(this)
+ offsetSymbol = ctx.newSymbol(claz, offsetName(0), Flags.Synthetic, defn.LongType).enteredAfter(this)
offsetSymbol.addAnnotation(Annotation(defn.ScalaStaticAnnot))
val flagName = (StdNames.nme.BITMAP_PREFIX + "0").toTermName
val flagSymbol = ctx.newSymbol(claz, flagName, containerFlags, defn.LongType).enteredAfter(this)
flag = ValDef(flagSymbol, Literal(Constants.Constant(0L)))
val offsetTree = ValDef(offsetSymbol, getOffset.appliedTo(thizClass, Literal(Constant(flagName.toString))))
- appendOffsetDefs += (companion.moduleClass -> new OffsetInfo(List(offsetTree), ord))
+ appendOffsetDefs += (claz -> new OffsetInfo(List(offsetTree), ord))
}
val containerName = ctx.freshName(x.name.asTermName.lazyLocalName).toTermName
- val containerSymbol = ctx.newSymbol(claz, containerName, (x.mods &~ containerFlagsMask | containerFlags).flags, tpe, coord = x.symbol.coord).enteredAfter(this)
+ val containerSymbol = ctx.newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags, tpe, coord = x.symbol.coord).enteredAfter(this)
val containerTree = ValDef(containerSymbol, defaultValue(tpe))
@@ -394,8 +390,8 @@ class LazyVals extends MiniPhaseTransform with IdentityDenotTransformer with Nee
val accessor = mkThreadSafeDef(x.symbol.asTerm, claz, ord, containerSymbol, x.rhs, tpe, offset, getFlag, state, cas, setFlag, wait)
if (flag eq EmptyTree)
- Thicket(List(containerTree, accessor))
- else Thicket(List(containerTree, flag, accessor))
+ Thicket(containerTree, accessor)
+ else Thicket(containerTree, flag, accessor)
}
}
diff --git a/src/dotty/tools/dotc/transform/Memoize.scala b/src/dotty/tools/dotc/transform/Memoize.scala
index b775496ae..01c240e3a 100644
--- a/src/dotty/tools/dotc/transform/Memoize.scala
+++ b/src/dotty/tools/dotc/transform/Memoize.scala
@@ -36,6 +36,27 @@ import Decorators._
override def phaseName = "memoize"
+ /* Makes sure that, after getters and constructors gen, there doesn't
+ * exist non-deferred definitions that are not implemented. */
+ override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = {
+ def errorLackImplementation(t: Tree) = {
+ val firstPhaseId = t.symbol.initial.validFor.firstPhaseId
+ val definingPhase = ctx.withPhase(firstPhaseId).phase.prev
+ throw new AssertionError(
+ i"Non-deferred definition introduced by $definingPhase lacks implementation: $t")
+ }
+ tree match {
+ case ddef: DefDef
+ if !ddef.symbol.is(Deferred) && ddef.rhs == EmptyTree =>
+ errorLackImplementation(ddef)
+ case tdef: TypeDef
+ if tdef.symbol.isClass && !tdef.symbol.is(Deferred) && tdef.rhs == EmptyTree =>
+ errorLackImplementation(tdef)
+ case _ =>
+ }
+ super.checkPostCondition(tree)
+ }
+
/** Should run after mixin so that fields get generated in the
* class that contains the concrete getter rather than the trait
* that defines it.
@@ -68,6 +89,10 @@ import Decorators._
}
lazy val field = sym.field.orElse(newField).asTerm
+
+ def adaptToField(tree: Tree) =
+ if (tree.isEmpty) tree else tree.ensureConforms(field.info.widen)
+
if (sym.is(Accessor, butNot = NoFieldNeeded))
if (sym.isGetter) {
def skipBlocks(t: Tree): Tree = t match {
@@ -85,14 +110,15 @@ import Decorators._
case _ =>
var rhs = tree.rhs.changeOwnerAfter(sym, field, thisTransform)
if (isWildcardArg(rhs)) rhs = EmptyTree
- val fieldDef = transformFollowing(ValDef(field, rhs))
+
+ val fieldDef = transformFollowing(ValDef(field, adaptToField(rhs)))
val getterDef = cpy.DefDef(tree)(rhs = transformFollowingDeep(ref(field))(ctx.withOwner(sym), info))
Thicket(fieldDef, getterDef)
}
} else if (sym.isSetter) {
if (!sym.is(ParamAccessor)) { val Literal(Constant(())) = tree.rhs } // this is intended as an assertion
field.setFlag(Mutable) // necessary for vals mixed in from Scala2 traits
- val initializer = Assign(ref(field), ref(tree.vparamss.head.head.symbol))
+ val initializer = Assign(ref(field), adaptToField(ref(tree.vparamss.head.head.symbol)))
cpy.DefDef(tree)(rhs = transformFollowingDeep(initializer)(ctx.withOwner(sym), info))
}
else tree // curiously, some accessors from Scala2 have ' ' suffixes. They count as
diff --git a/src/dotty/tools/dotc/transform/Mixin.scala b/src/dotty/tools/dotc/transform/Mixin.scala
index b0d1e5c5f..27cfc835a 100644
--- a/src/dotty/tools/dotc/transform/Mixin.scala
+++ b/src/dotty/tools/dotc/transform/Mixin.scala
@@ -182,27 +182,28 @@ class Mixin extends MiniPhaseTransform with SymTransformer { thisTransform =>
}
}
- def wasDeferred(sym: Symbol) =
- ctx.atPhase(thisTransform) { implicit ctx => sym is Deferred }
+ def was(sym: Symbol, flags: FlagSet) =
+ ctx.atPhase(thisTransform) { implicit ctx => sym is flags }
def traitInits(mixin: ClassSymbol): List[Tree] = {
var argNum = 0
def nextArgument() = initArgs.get(mixin) match {
case Some(arguments) =>
- try arguments(argNum) finally argNum += 1
+ val result = arguments(argNum)
+ argNum += 1
+ result
case None =>
- val (msg, pos) = impl.parents.find(_.tpe.typeSymbol == mixin) match {
- case Some(parent) => ("lacks argument list", parent.pos)
- case None =>
- ("""is indirectly implemented,
- |needs to be implemented directly so that arguments can be passed""".stripMargin,
- cls.pos)
- }
- ctx.error(i"parameterized $mixin $msg", pos)
+ assert(
+ impl.parents.forall(_.tpe.typeSymbol != mixin),
+ i"missing parameters for $mixin from $impl should have been caught in typer")
+ ctx.error(
+ em"""parameterized $mixin is indirectly implemented,
+ |needs to be implemented directly so that arguments can be passed""",
+ cls.pos)
EmptyTree
}
- for (getter <- mixin.info.decls.filter(getr => getr.isGetter && !wasDeferred(getr)).toList) yield {
+ for (getter <- mixin.info.decls.toList if getter.isGetter && !was(getter, Deferred)) yield {
val isScala2x = mixin.is(Scala2x)
def default = Underscore(getter.info.resultType)
def initial = transformFollowing(superRef(initializer(getter)).appliedToNone)
@@ -220,23 +221,23 @@ class Mixin extends MiniPhaseTransform with SymTransformer { thisTransform =>
if (isCurrent(getter) || getter.is(ExpandedName)) {
val rhs =
- if (ctx.atPhase(thisTransform)(implicit ctx => getter.is(ParamAccessor))) nextArgument()
+ if (was(getter, ParamAccessor)) nextArgument()
else if (isScala2x)
if (getter.is(Lazy, butNot = Module)) lazyGetterCall
else if (getter.is(Module))
New(getter.info.resultType, List(This(cls)))
else Underscore(getter.info.resultType)
- else transformFollowing(superRef(initializer(getter)).appliedToNone)
+ else initial
// transformFollowing call is needed to make memoize & lazy vals run
transformFollowing(DefDef(implementation(getter.asTerm), rhs))
}
- else if (isScala2x) EmptyTree
+ else if (isScala2x || was(getter, ParamAccessor)) EmptyTree
else initial
}
}
def setters(mixin: ClassSymbol): List[Tree] =
- for (setter <- mixin.info.decls.filter(setr => setr.isSetter && !wasDeferred(setr)).toList)
+ for (setter <- mixin.info.decls.filter(setr => setr.isSetter && !was(setr, Deferred)).toList)
yield transformFollowing(DefDef(implementation(setter.asTerm), unitLiteral.withPos(cls.pos)))
cpy.Template(impl)(
diff --git a/src/dotty/tools/dotc/transform/MixinOps.scala b/src/dotty/tools/dotc/transform/MixinOps.scala
index db89f939b..6cebf7197 100644
--- a/src/dotty/tools/dotc/transform/MixinOps.scala
+++ b/src/dotty/tools/dotc/transform/MixinOps.scala
@@ -41,11 +41,21 @@ class MixinOps(cls: ClassSymbol, thisTransform: DenotTransformer)(implicit ctx:
ctx.atPhase(thisTransform) { implicit ctx =>
cls.info.member(sym.name).hasAltWith(_.symbol == sym)
}
-
+
+ /** Does `method` need a forwarder to in class `cls`
+ * Method needs a forwarder in those cases:
+ * - there's a class defining a method with same signature
+ * - there are multiple traits defining method with same signature
+ */
def needsForwarder(meth: Symbol): Boolean = {
- lazy val overridenSymbols = meth.allOverriddenSymbols
- def needsDisambiguation = !overridenSymbols.forall(_ is Deferred)
- def hasNonInterfaceDefinition = overridenSymbols.forall(!_.owner.is(Trait))
+ lazy val competingMethods = cls.baseClasses.iterator
+ .filter(_ ne meth.owner)
+ .map(meth.overriddenSymbol)
+ .filter(_.exists)
+ .toList
+
+ def needsDisambiguation = competingMethods.exists(x=> !(x is Deferred)) // multiple implementations are available
+ def hasNonInterfaceDefinition = competingMethods.exists(!_.owner.is(Trait)) // there is a definition originating from class
meth.is(Method, butNot = PrivateOrAccessorOrDeferred) &&
isCurrent(meth) &&
(needsDisambiguation || hasNonInterfaceDefinition || meth.owner.is(Scala2x))
diff --git a/src/dotty/tools/dotc/transform/MoveStatics.scala b/src/dotty/tools/dotc/transform/MoveStatics.scala
new file mode 100644
index 000000000..5c2cd3145
--- /dev/null
+++ b/src/dotty/tools/dotc/transform/MoveStatics.scala
@@ -0,0 +1,77 @@
+package dotty.tools.dotc.transform
+
+import dotty.tools.dotc.ast.{Trees, tpd}
+import dotty.tools.dotc.core.Annotations.Annotation
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.DenotTransformers.{InfoTransformer, SymTransformer}
+import dotty.tools.dotc.core.SymDenotations.SymDenotation
+import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.NameOps._
+import dotty.tools.dotc.core.{Flags, Names}
+import dotty.tools.dotc.core.Names.Name
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core.Types.MethodType
+import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+
+/** Move static methods from companion to the class itself */
+class MoveStatics extends MiniPhaseTransform with SymTransformer { thisTransformer =>
+
+ import tpd._
+ override def phaseName = "moveStatic"
+
+
+ def transformSym(sym: SymDenotation)(implicit ctx: Context): SymDenotation = {
+ if (sym.hasAnnotation(defn.ScalaStaticAnnot) && sym.owner.is(Flags.Module) && sym.owner.companionClass.exists) {
+ sym.owner.asClass.delete(sym.symbol)
+ sym.owner.companionClass.asClass.enter(sym.symbol)
+ val flags = if (sym.is(Flags.Method)) sym.flags else sym.flags | Flags.Mutable
+ sym.copySymDenotation(owner = sym.owner.companionClass, initFlags = flags)
+ }
+ else sym
+ }
+
+ override def transformStats(trees: List[Tree])(implicit ctx: Context, info: TransformerInfo): List[Tree] = {
+ if (ctx.owner.is(Flags.Package)) {
+ val (classes, others) = trees.partition(x => x.isInstanceOf[TypeDef] && x.symbol.isClass)
+ val pairs = classes.groupBy(_.symbol.name.stripModuleClassSuffix).asInstanceOf[Map[Name, List[TypeDef]]]
+
+ def rebuild(orig: TypeDef, newBody: List[Tree]): Tree = {
+ if (orig eq null) return EmptyTree
+
+ val staticFields = newBody.filter(x => x.isInstanceOf[ValDef] && x.symbol.hasAnnotation(defn.ScalaStaticAnnot)).asInstanceOf[List[ValDef]]
+ val newBodyWithStaticConstr =
+ if (staticFields.nonEmpty) {
+ /* do NOT put Flags.JavaStatic here. It breaks .enclosingClass */
+ val staticCostructor = ctx.newSymbol(orig.symbol, Names.STATIC_CONSTRUCTOR, Flags.Synthetic | Flags.Method | Flags.Private, MethodType(Nil, defn.UnitType))
+ staticCostructor.addAnnotation(Annotation(defn.ScalaStaticAnnot))
+ staticCostructor.entered
+
+ val staticAssigns = staticFields.map(x => Assign(ref(x.symbol), x.rhs.changeOwner(x.symbol, staticCostructor)))
+ tpd.DefDef(staticCostructor, Block(staticAssigns, tpd.unitLiteral)) :: newBody
+ } else newBody
+
+ val oldTemplate = orig.rhs.asInstanceOf[Template]
+ cpy.TypeDef(orig)(rhs = cpy.Template(orig.rhs)(oldTemplate.constr, oldTemplate.parents, oldTemplate.self, newBodyWithStaticConstr))
+ }
+
+ def move(module: TypeDef, companion: TypeDef): List[Tree] = {
+ if (!module.symbol.is(Flags.Module)) move(companion, module)
+ else {
+ val allMembers =
+ (if(companion ne null) {companion.rhs.asInstanceOf[Template].body} else Nil) ++
+ module.rhs.asInstanceOf[Template].body
+ val (newModuleBody, newCompanionBody) = allMembers.partition(x => {assert(x.symbol.exists); x.symbol.owner == module.symbol})
+ Trees.flatten(rebuild(companion, newCompanionBody) :: rebuild(module, newModuleBody) :: Nil)
+ }
+ }
+ val newPairs =
+ for ((name, classes) <- pairs)
+ yield
+ if (classes.tail.isEmpty)
+ if (classes.head.symbol.is(Flags.Module)) move(classes.head, null)
+ else List(rebuild(classes.head, classes.head.rhs.asInstanceOf[Template].body))
+ else move(classes.head, classes.tail.head)
+ Trees.flatten(newPairs.toList.flatten ++ others)
+ } else trees
+ }
+}
diff --git a/src/dotty/tools/dotc/transform/PatternMatcher.scala b/src/dotty/tools/dotc/transform/PatternMatcher.scala
index b4e32fa66..49c0eabec 100644
--- a/src/dotty/tools/dotc/transform/PatternMatcher.scala
+++ b/src/dotty/tools/dotc/transform/PatternMatcher.scala
@@ -1,6 +1,8 @@
package dotty.tools.dotc
package transform
+import scala.language.postfixOps
+
import TreeTransforms._
import core.Denotations._
import core.SymDenotations._
@@ -21,7 +23,8 @@ import ast.Trees._
import Applications._
import TypeApplications._
import SymUtils._, core.NameOps._
-import typer.Mode
+import core.Mode
+import patmat._
import dotty.tools.dotc.util.Positions.Position
import dotty.tools.dotc.core.Decorators._
@@ -34,7 +37,7 @@ import scala.reflect.internal.util.Collections
* elimRepeated is required
* TODO: outer tests are not generated yet.
*/
-class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTransformer =>
+class PatternMatcher extends MiniPhaseTransform with DenotTransformer {
import dotty.tools.dotc.ast.tpd._
override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = ref
@@ -50,20 +53,14 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
override def transformMatch(tree: Match)(implicit ctx: Context, info: TransformerInfo): Tree = {
val translated = new Translator()(ctx).translator.translateMatch(tree)
- translated.ensureConforms(tree.tpe)
- }
-
+ // check exhaustivity and unreachability
+ val engine = new SpaceEngine
+ if (engine.checkable(tree)) {
+ engine.checkExhaustivity(tree)
+ engine.checkRedundancy(tree)
+ }
- override def transformTry(tree: tpd.Try)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
- val selector =
- ctx.newSymbol(ctx.owner, ctx.freshName("ex").toTermName, Flags.Synthetic, defn.ThrowableType, coord = tree.pos)
- val sel = Ident(selector.termRef).withPos(tree.pos)
- val rethrow = tpd.CaseDef(EmptyTree, EmptyTree, Throw(ref(selector)))
- val newCases = tpd.CaseDef(
- Bind(selector, Underscore(selector.info).withPos(tree.pos)),
- EmptyTree,
- transformMatch(tpd.Match(sel, tree.cases ::: rethrow :: Nil)))
- cpy.Try(tree)(tree.expr, newCases :: Nil, tree.finalizer)
+ translated.ensureConforms(tree.tpe)
}
class Translator(implicit ctx: Context) {
@@ -80,10 +77,10 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// assert(owner ne null); assert(owner ne NoSymbol)
def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x", owner: Symbol = ctx.owner) = {
ctr += 1
- ctx.newSymbol(owner, ctx.freshName(prefix + ctr).toTermName, Flags.Synthetic, tp, coord = pos)
+ ctx.newSymbol(owner, ctx.freshName(prefix + ctr).toTermName, Flags.Synthetic | Flags.Case, tp, coord = pos)
}
- def newSynthCaseLabel(name: String, tpe:Type, owner: Symbol = ctx.owner) =
+ def newSynthCaseLabel(name: String, tpe: Type, owner: Symbol = ctx.owner) =
ctx.newSymbol(owner, ctx.freshName(name).toTermName, Flags.Label | Flags.Synthetic | Flags.Method, tpe).asTerm
//NoSymbol.newLabel(freshName(name), NoPosition) setFlag treeInfo.SYNTH_CASE_FLAGS
@@ -151,30 +148,28 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
}
+ object Rebindings {
+ def apply(from: Symbol, to: Symbol) = new Rebindings(List(from), List(ref(to)))
+ // requires sameLength(from, to)
+ def apply(from: List[Symbol], to: List[Tree]) =
+ if (from nonEmpty) new Rebindings(from, to) else NoRebindings
+ }
- object Rebindings {
- def apply(from: Symbol, to: Symbol) = new Rebindings(List(from), List(ref(to)))
- // requires sameLength(from, to)
- def apply(from: List[Symbol], to: List[Tree]) =
- if (from nonEmpty) new Rebindings(from, to) else NoRebindings
- }
-
- class Rebindings(val lhs: List[Symbol], val rhs: List[Tree]) {
- def >>(other: Rebindings) = {
- if (other eq NoRebindings) this
- else if (this eq NoRebindings) other
- else {
- assert((lhs.toSet ++ other.lhs.toSet).size == lhs.length + other.lhs.length, "no double assignments")
- new Rebindings(this.lhs ++ other.lhs, this.rhs ++ other.rhs)
- }
- }
-
- def emitValDefs: List[ValDef] = {
- Collections.map2(lhs, rhs)((symbol, tree) => ValDef(symbol.asTerm, tree.ensureConforms(symbol.info)))
+ class Rebindings(val lhs: List[Symbol], val rhs: List[Tree]) {
+ def >>(other: Rebindings) = {
+ if (other eq NoRebindings) this
+ else if (this eq NoRebindings) other
+ else {
+ assert((lhs.toSet ++ other.lhs.toSet).size == lhs.length + other.lhs.length, "no double assignments")
+ new Rebindings(this.lhs ++ other.lhs, this.rhs ++ other.rhs)
}
}
- object NoRebindings extends Rebindings(Nil, Nil)
+ def emitValDefs: List[ValDef] = {
+ Collections.map2(lhs, rhs)((symbol, tree) => ValDef(symbol.asTerm, tree.ensureConforms(symbol.info)))
+ }
+ }
+ object NoRebindings extends Rebindings(Nil, Nil)
trait OptimizedCodegen extends CodegenCore {
override def codegen: AbsCodegen = optimizedCodegen
@@ -195,12 +190,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
//val matchRes = ctx.newSymbol(NoSymbol, ctx.freshName("matchRes").toTermName, Flags.Synthetic | Flags.Param | Flags.Label | Flags.Method, restpe /*withoutAnnotations*/)
//NoSymbol.newValueParameter(newTermName("x"), NoPosition, newFlags = SYNTHETIC) setInfo restpe.withoutAnnotations
- val caseSyms = cases.scanLeft(ctx.owner.asTerm)((curOwner, nextTree) => newSynthCaseLabel(ctx.freshName("case"), MethodType(Nil, restpe), curOwner)).tail
+
+ val caseSyms: List[TermSymbol] = cases.scanLeft(ctx.owner.asTerm)((curOwner, nextTree) => newSynthCaseLabel(ctx.freshName("case"), MethodType(Nil, restpe), curOwner)).tail
// must compute catchAll after caseLabels (side-effects nextCase)
// catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
// if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
- val catchAllDef = matchFailGen.map { _(scrutSym)}
+ val catchAllDef = matchFailGen.map { _(scrutSym) }
.getOrElse(Throw(New(defn.MatchErrorType, List(ref(scrutSym)))))
val matchFail = newSynthCaseLabel(ctx.freshName("matchFail"), MethodType(Nil, restpe))
@@ -210,14 +206,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val caseDefs = (cases zip caseSyms zip nextCases).foldRight[Tree](catchAllDefBody) {
// dotty deviation
//case (((mkCase, sym), nextCase), acc) =>
- (x:(((Casegen => Tree), TermSymbol), Tree), acc: Tree) => x match {
-
- case ((mkCase, sym), nextCase) =>
- val body = mkCase(new OptimizedCasegen(nextCase)).ensureConforms(restpe)
-
- DefDef(sym, _ => Block(List(acc), body))
- }}
+ (x: (((Casegen => Tree), TermSymbol), Tree), acc: Tree) => x match {
+ case ((mkCase, sym), nextCase) =>
+ val body = mkCase(new OptimizedCasegen(nextCase)).ensureConforms(restpe)
+ DefDef(sym, _ => Block(List(acc), body))
+ }
+ }
// scrutSym == NoSymbol when generating an alternatives matcher
// val scrutDef = scrutSym.fold(List[Tree]())(ValDef(_, scrut) :: Nil) // for alternatives
@@ -258,9 +253,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
)
} else {
assert(defn.isProductSubType(prev.tpe))
- Block(
- List(ValDef(b.asTerm, prev)),
- next //Substitution(b, ref(prevSym))(next)
+ val nullCheck: Tree = prev.select(defn.Object_ne).appliedTo(Literal(Constant(null)))
+ ifThenElseZero(
+ nullCheck,
+ Block(
+ List(ValDef(b.asTerm, prev)),
+ next //Substitution(b, ref(prevSym))(next)
+ )
)
}
}
@@ -288,7 +287,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
next
))
}
-
}
}
final case class Suppression(exhaustive: Boolean, unreachable: Boolean)
@@ -303,8 +301,141 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree])
def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {}
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Symbol => Tree], unchecked: Boolean): Option[Tree] =
- None // todo
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Symbol => Tree], unchecked: Boolean): Option[Tree] = {
+ // TODO Deal with guards?
+
+ def isSwitchableType(tpe: Type): Boolean =
+ (tpe isRef defn.IntClass) ||
+ (tpe isRef defn.ByteClass) ||
+ (tpe isRef defn.ShortClass) ||
+ (tpe isRef defn.CharClass)
+
+ object IntEqualityTestTreeMaker {
+ def unapply(treeMaker: EqualityTestTreeMaker): Option[Int] = treeMaker match {
+ case EqualityTestTreeMaker(`scrutSym`, _, Literal(const), _) =>
+ if (const.isIntRange) Some(const.intValue)
+ else None
+ case _ =>
+ None
+ }
+ }
+
+ def isSwitchCase(treeMakers: List[TreeMaker]): Boolean = treeMakers match {
+ // case 5 =>
+ case List(IntEqualityTestTreeMaker(_), _: BodyTreeMaker) =>
+ true
+
+ // case 5 | 6 =>
+ case List(AlternativesTreeMaker(`scrutSym`, alts, _), _: BodyTreeMaker) =>
+ alts.forall {
+ case List(IntEqualityTestTreeMaker(_)) => true
+ case _ => false
+ }
+
+ // case _ =>
+ case List(_: BodyTreeMaker) =>
+ true
+
+ /* case x @ pat =>
+ * This includes:
+ * case x =>
+ * case x @ 5 =>
+ * case x @ (5 | 6) =>
+ */
+ case (_: SubstOnlyTreeMaker) :: rest =>
+ isSwitchCase(rest)
+
+ case _ =>
+ false
+ }
+
+ /* (Nil, body) means that `body` is the default case
+ * It's a bit hacky but it simplifies manipulations.
+ */
+ def extractSwitchCase(treeMakers: List[TreeMaker]): (List[Int], BodyTreeMaker) = treeMakers match {
+ // case 5 =>
+ case List(IntEqualityTestTreeMaker(intValue), body: BodyTreeMaker) =>
+ (List(intValue), body)
+
+ // case 5 | 6 =>
+ case List(AlternativesTreeMaker(_, alts, _), body: BodyTreeMaker) =>
+ val intValues = alts.map {
+ case List(IntEqualityTestTreeMaker(intValue)) => intValue
+ }
+ (intValues, body)
+
+ // case _ =>
+ case List(body: BodyTreeMaker) =>
+ (Nil, body)
+
+ // case x @ pat =>
+ case (_: SubstOnlyTreeMaker) :: rest =>
+ /* Rebindings have been propagated, so the eventual body in `rest`
+ * contains all the necessary information. The substitution can be
+ * dropped at this point.
+ */
+ extractSwitchCase(rest)
+ }
+
+ def doOverlap(a: List[Int], b: List[Int]): Boolean =
+ a.exists(b.contains _)
+
+ def makeSwitch(valuesToCases: List[(List[Int], BodyTreeMaker)]): Tree = {
+ def genBody(body: BodyTreeMaker): Tree = {
+ val valDefs = body.rebindings.emitValDefs
+ if (valDefs.isEmpty) body.body
+ else Block(valDefs, body.body)
+ }
+
+ val intScrut =
+ if (pt isRef defn.IntClass) ref(scrutSym)
+ else Select(ref(scrutSym), nme.toInt)
+
+ val (normalCases, defaultCaseAndRest) = valuesToCases.span(_._1.nonEmpty)
+
+ val newCases = for {
+ (values, body) <- normalCases
+ } yield {
+ val literals = values.map(v => Literal(Constant(v)))
+ val pat =
+ if (literals.size == 1) literals.head
+ else Alternative(literals)
+ CaseDef(pat, EmptyTree, genBody(body))
+ }
+
+ val catchAllDef = {
+ if (defaultCaseAndRest.isEmpty) {
+ matchFailGenOverride.fold[Tree](
+ Throw(New(defn.MatchErrorType, List(ref(scrutSym)))))(
+ _(scrutSym))
+ } else {
+ /* After the default case, assuming the IR even allows anything,
+ * things are unreachable anyway and can be removed.
+ */
+ genBody(defaultCaseAndRest.head._2)
+ }
+ }
+ val defaultCase = CaseDef(Underscore(defn.IntType), EmptyTree, catchAllDef)
+
+ Match(intScrut, newCases :+ defaultCase)
+ }
+
+ val dealiased = scrut.tpe.widenDealias
+ if (isSwitchableType(dealiased) && cases.forall(isSwitchCase)) {
+ val valuesToCases = cases.map(extractSwitchCase)
+ val values = valuesToCases.map(_._1)
+ if (values.tails.exists { tail => tail.nonEmpty && tail.tail.exists(doOverlap(_, tail.head)) }) {
+ // TODO Deal with overlapping cases (mostly useless without guards)
+ None
+ } else {
+ Some(makeSwitch(valuesToCases))
+ }
+ } else {
+ if (dealiased hasAnnotation defn.SwitchAnnot)
+ ctx.warning("failed to emit switch for `@switch` annotated match", scrut.pos)
+ None
+ }
+ }
// for catch (no need to customize match failure)
def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
@@ -512,7 +643,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val checkedLength: Option[Int],
val prevBinder: Symbol,
val ignoredSubPatBinders: Set[Symbol]
- ) extends FunTreeMaker with PreserveSubPatBinders {
+ ) extends FunTreeMaker with PreserveSubPatBinders {
def extraStoredBinders: Set[Symbol] = Set()
@@ -534,9 +665,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
bindSubPats(next)
}
- if (extractorReturnsBoolean) casegen.flatMapCond(extractor, unitLiteral, nextBinder, condAndNext)
- else casegen.flatMap(extractor, nextBinder, condAndNext) // getType?
-
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, unitLiteral, nextBinder, condAndNext)
+ else casegen.flatMap(extractor, nextBinder, condAndNext) // getType?
}
override def toString = "X" + ((extractor, nextBinder.name))
@@ -570,7 +700,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val mutableBinders: List[Symbol],
binderKnownNonNull: Boolean,
val ignoredSubPatBinders: Set[Symbol]
- ) extends FunTreeMaker with PreserveSubPatBinders {
+ ) extends FunTreeMaker with PreserveSubPatBinders {
val nextBinder = prevBinder // just passing through
@@ -579,6 +709,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck: Tree = ref(prevBinder).select(defn.Object_ne).appliedTo(Literal(Constant(null)))
+
val cond: Option[Tree] =
if (binderKnownNonNull) extraCond
else extraCond.map(nullCheck.select(defn.Boolean_&&).appliedTo).orElse(Some(nullCheck))
@@ -641,9 +772,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def outerTest(testedBinder: Symbol, expectedTp: Type): Tree = {
val expectedOuter = expectedTp.normalizedPrefix match {
- //case ThisType(clazz) => This(clazz)
//case NoType => Literal(Constant(true)) // fallback for SI-6183 todo?
- case pre => ref(pre.termSymbol)
+ case pre: SingletonType => singleton(pre)
}
// ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix`
@@ -652,10 +782,11 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val expectedClass = expectedTp.dealias.classSymbol.asClass
val test = codegen._asInstanceOf(testedBinder, expectedTp)
+ // TODO: Use nme.OUTER_SELECT, like the Inliner does?
val outerAccessorTested = ctx.atPhase(ctx.explicitOuterPhase.next) { implicit ctx =>
- ExplicitOuter.ensureOuterAccessors(expectedClass)
- test.select(ExplicitOuter.outerAccessor(expectedClass)).select(defn.Object_eq).appliedTo(expectedOuter)
- }
+ ExplicitOuter.ensureOuterAccessors(expectedClass)
+ test.select(ExplicitOuter.outerAccessor(expectedClass)).select(defn.Object_eq).appliedTo(expectedOuter)
+ }
outerAccessorTested
}
}
@@ -719,7 +850,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val nextBinder = afterTest.asTerm
- def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol) = {
+ def needsOuterTest(patType: Type, selType: Type, currentOwner: Symbol): Boolean = {
// See the test for SI-7214 for motivation for dealias. Later `treeCondStrategy#outerTest`
// generates an outer test based on `patType.prefix` with automatically dealises.
patType.dealias match {
@@ -737,7 +868,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val np = expectedTp.normalizedPrefix
val ts = np.termSymbol
(ts ne NoSymbol) && needsOuterTest(expectedTp, testedBinder.info, ctx.owner)
-
}
// the logic to generate the run-time test that follows from the fact that
@@ -777,7 +907,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
if (isExpectedReferenceType) mkNullTest
else mkTypeTest
)
- )
+ )
// true when called to type-test the argument to an extractor
// don't do any fancy equality checking, just test the type
@@ -791,7 +921,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
and(mkEqualsTest(ref(tref.symbol.companionModule)), mkTypeTest) // must use == to support e.g. List() == Nil
case ConstantType(Constant(null)) if isAnyRef => mkEqTest(expTp(Literal(Constant(null))))
case ConstantType(const) => mkEqualsTest(expTp(Literal(const)))
- case t:SingletonType => mkEqTest(singleton(expectedTp)) // SI-4577, SI-4897
+ case t: SingletonType => mkEqTest(singleton(expectedTp)) // SI-4577, SI-4897
//case ThisType(sym) => mkEqTest(expTp(This(sym)))
case _ => mkDefault
}
@@ -921,7 +1051,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
val (cases, toHoist) = optimizeCases(scrutSym, casesRebindingPropagated, pt)
-
val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases.map(x => combineExtractors(x) _), synthCatchAll)
if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
@@ -961,8 +1090,9 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
*/
object WildcardPattern {
def unapply(pat: Tree): Boolean = pat match {
+ case Typed(_, arg) if arg.tpe.isRepeatedParam => true
case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol!
- case t if (tpd.isWildcardArg(t)) => true
+ case t if (tpd.isWildcardArg(t)) => true
case x: Ident => isVarPattern(x)
case Alternative(ps) => ps forall unapply
case EmptyTree => true
@@ -983,7 +1113,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
object SymbolBound {
def unapply(tree: Tree): Option[(Symbol, Tree)] = tree match {
case Bind(_, expr) if tree.symbol.exists => Some(tree.symbol -> expr)
- case _ => None
+ case _ => None
}
}
@@ -996,13 +1126,13 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
final case class BoundTree(binder: Symbol, tree: Tree) {
private lazy val extractor = ExtractorCall(tree, binder)
- def pos = tree.pos
- def tpe = binder.info.widenDealias
- def pt = unbound match {
- // case Star(tpt) => this glbWith seqType(tpt.tpe) dd todo:
- case TypeBound(tpe) => tpe
- case tree => tree.tpe
- }
+ def pos = tree.pos
+ def tpe = binder.info.widenDealias
+ def pt = unbound match {
+ // case Star(tpt) => this glbWith seqType(tpt.tpe) dd todo:
+ case TypeBound(tpe) => tpe
+ case tree => tree.tpe
+ }
def glbWith(other: Type) = ctx.typeComparer.glb(tpe :: other :: Nil)// .normalize
@@ -1024,8 +1154,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
object TypeBound {
def unapply(tree: Tree): Option[Type] = tree match {
- case Typed(_, _) => Some(tree.typeOpt)
- case _ => None
+ case Typed(_, arg) if !arg.tpe.isRepeatedParam => Some(tree.typeOpt)
+ case _ => None
}
}
@@ -1047,9 +1177,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
private def translatedAlts(alts: List[Tree]) = alts map (alt => rebindTo(alt).translate())
private def noStep() = step()()
- private def unsupportedPatternMsg = sm"""
- |unsupported pattern: ${tree.show} / $this (this is a scalac bug.)
- |""".trim
+ private def unsupportedPatternMsg =
+ i"unsupported pattern: ${tree.show} / $this (this is a scalac bug.)"
// example check: List[Int] <:< ::[Int]
private def extractorStep(): TranslationStep = {
@@ -1071,7 +1200,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// Statically conforms to paramType
if (tpe <:< paramType) treeMaker(binder, false, pos, tpe) :: Nil
else typeTest :: extraction :: Nil
- )
+ )
step(makers: _*)(extractor.subBoundTrees: _*)
}
@@ -1090,7 +1219,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// [7] symbol-less bind patterns - this happens in certain ill-formed programs, there'll be an error later
// don't fail here though (or should we?)
def nextStep(): TranslationStep = tree match {
- case _: UnApply | _: Apply| Typed(_: UnApply | _: Apply, _) => extractorStep()
+ case _: UnApply | _: Apply | Typed(_: UnApply | _: Apply, _) => extractorStep()
case SymbolAndTypeBound(sym, tpe) => typeTestStep(sym, tpe)
case TypeBound(tpe) => typeTestStep(binder, tpe)
case SymbolBound(sym, expr) => bindingStep(sym, expr)
@@ -1101,7 +1230,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
def translate(): List[TreeMaker] = nextStep() merge (_.translate())
-
private def concreteType = tpe.bounds.hi
private def unbound = unbind(tree)
private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)"
@@ -1123,34 +1251,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
case _ => false
}
- def elimAnonymousClass(t: Type) = t match {
- case t:TypeRef if t.symbol.isAnonymousClass =>
- t.symbol.asClass.typeRef.asSeenFrom(t.prefix, t.symbol.owner)
- case _ =>
- t
- }
-
- /** Is this pattern node a catch-all or type-test pattern? */
- def isCatchCase(cdef: CaseDef) = cdef match {
- case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) =>
- isSimpleThrowable(tpt.tpe)
- case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) =>
- isSimpleThrowable(tpt.tpe)
- case _ =>
- isDefaultCase(cdef)
- }
-
- private def isSimpleThrowable(tp: Type)(implicit ctx: Context): Boolean = tp match {
- case tp @ TypeRef(pre, _) =>
- val sym = tp.symbol
- (pre == NoPrefix || pre.widen.typeSymbol.isStatic) &&
- (sym.derivesFrom(defn.ThrowableClass)) && /* bq */ !(sym is Flags.Trait)
- case _ =>
- false
- }
-
-
-
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
*
@@ -1159,20 +1259,21 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
*
* NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed
* thus, you must typecheck the result (and that will in turn translate nested matches)
- * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
+ * this could probably be optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
*/
def translateMatch(match_ : Match): Tree = {
val Match(sel, cases) = match_
- val selectorTp = elimAnonymousClass(sel.tpe.widen/*withoutAnnotations*/)
+ val selectorTp = sel.tpe.widen.deAnonymize/*withoutAnnotations*/
val selectorSym = freshSym(sel.pos, selectorTp, "selector")
val (nonSyntheticCases, defaultOverride) = cases match {
case init :+ last if isSyntheticDefaultCase(last) => (init, Some(((scrut: Symbol) => last.body)))
- case _ => (cases, None)
+ case _ => (cases, None)
}
+
// checkMatchVariablePatterns(nonSyntheticCases) // only used for warnings
// we don't transform after uncurry
@@ -1201,46 +1302,6 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
Block(List(ValDef(selectorSym, sel)), combined)
}
- // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
- // we don't have a global scrutinee -- the caught exception must be bound in each of the casedefs
- // there's no need to check the scrutinee for null -- "throw null" becomes "throw new NullPointerException"
- // try to simplify to a type-based switch, or fall back to a catch-all case that runs a normal pattern match
- // unlike translateMatch, we type our result before returning it
- /*def translateTry(caseDefs: List[CaseDef], pt: Type, pos: Position): List[CaseDef] =
- // if they're already simple enough to be handled by the back-end, we're done
- if (caseDefs forall isCatchCase) caseDefs
- else {
- val swatches = { // switch-catches
- val bindersAndCases = caseDefs map { caseDef =>
- // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
- // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
- val caseScrutSym = freshSym(pos, pureType(defn.ThrowableType))
- (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
- }
-
- for(cases <- emitTypeSwitch(bindersAndCases, pt).toList
- if cases forall isCatchCase; // must check again, since it's not guaranteed -- TODO: can we eliminate this? e.g., a type test could test for a trait or a non-trivial prefix, which are not handled by the back-end
- cse <- cases) yield /*fixerUpper(matchOwner, pos)*/(cse).asInstanceOf[CaseDef]
- }
-
- val catches = if (swatches.nonEmpty) swatches else {
- val scrutSym = freshSym(pos, pureType(defn.ThrowableType))
- val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
-
- val exSym = freshSym(pos, pureType(defn.ThrowableType), "ex")
-
- List(
- CaseDef(
- Bind(exSym, Ident(??? /*nme.WILDCARD*/)), // TODO: does this need fixing upping?
- EmptyTree,
- combineCasesNoSubstOnly(ref(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some((scrut: Symbol) => Throw(ref(exSym))))
- )
- )
- }
-
- /*typer.typedCases(*/catches/*, defn.ThrowableType, WildcardType)*/
- }*/
-
/** The translation of `pat if guard => body` has two aspects:
* 1) the substitution due to the variables bound by patterns
* 2) the combination of the extractor calls using `flatMap`.
@@ -1269,7 +1330,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
* a function that will take care of binding and substitution of the next ast (to the right).
*
*/
- def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef) = {
+ def translateCase(scrutSym: Symbol, pt: Type)(caseDef: CaseDef): List[TreeMaker] = {
val CaseDef(pattern, guard, body) = caseDef
translatePattern(BoundTree(scrutSym, pattern)) ++ translateGuard(guard) :+ translateBody(body, pt)
}
@@ -1338,7 +1399,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
object ExtractorCall {
// TODO: check unargs == args
- def apply(tree: Tree, binder: Symbol): ExtractorCall = {
+ def apply(tree: Tree, binder: Symbol): ExtractorCall = {
tree match {
case UnApply(unfun, implicits, args) =>
val castedBinder = ref(binder).ensureConforms(tree.tpe)
@@ -1417,8 +1478,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
productSelectors(binder.info)
else binder.caseAccessors
val res =
- if (accessors.isDefinedAt(i - 1)) ref(binder).select(accessors(i - 1).name)
- else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
+ if (accessors.isDefinedAt(i - 1)) ref(binder).select(accessors(i - 1).name)
+ else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN
val rsym = res.symbol // just for debugging
res
}
@@ -1430,7 +1491,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
if (!aligner.isStar) Nil
else if (expectedLength == 0) seqTree(binder) :: Nil
else genDrop(binder, expectedLength)
- )
+ )
// this error-condition has already been checked by checkStarPatOK:
// if (isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if (lastIsStar) 1 else 0) == totalArity, "(resultInMonad, ts, subPatTypes, subPats)= " +(resultInMonad, ts, subPatTypes, subPats))
@@ -1441,7 +1502,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
( productElemsToN(binder, firstIndexingBinder)
++ genTake(binder, expectedLength)
++ lastTrees
- ).toList
+ ).toList
}
// the trees that select the subpatterns on the extractor's result, referenced by `binder`
@@ -1449,7 +1510,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
protected def subPatRefs(binder: Symbol): List[Tree] = {
val refs = if (totalArity > 0 && isSeq) subPatRefsSeq(binder)
else if (binder.info.member(nme._1).exists && !isSeq) productElemsToN(binder, totalArity)
- else ref(binder):: Nil
+ else ref(binder) :: Nil
refs
}
@@ -1480,7 +1541,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// (otherwise equality is required)
def compareOp: (Tree, Tree) => Tree =
if (aligner.isStar) _.select(defn.Int_>=).appliedTo(_)
- else _.select(defn.Int_==).appliedTo(_)
+ else _.select(defn.Int_==).appliedTo(_)
// `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero`
(seqTree(binder).select(defn.Any_!=).appliedTo(Literal(Constant(null)))).select(defn.Boolean_&&).appliedTo(compareOp(checkExpectedLength, Literal(Constant(0))))
@@ -1539,7 +1600,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
// can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely
// wrong when isSeq, and resultInMonad should always be correct since it comes
// directly from the extractor's result type
- val binder = freshSym(pos, resultInMonad)
+ val binder = freshSym(pos, resultInMonad)
val spb = subPatBinders
ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)(
spb,
@@ -1757,6 +1818,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def expectedTypes = typedPatterns map (_.tpe)
def unexpandedFormals = extractor.varargsTypes
}
+
trait ScalacPatternExpander extends PatternExpander[Tree, Type] {
def NoPattern = EmptyTree
def NoType = core.Types.NoType
@@ -1774,7 +1836,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
( typeOfMemberNamedHead(seq)
orElse typeOfMemberNamedApply(seq)
orElse seq.elemType
- )
+ )
}
def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated): Extractor = {
ctx.log(s"newExtractor($whole, $fixed, $repeated")
@@ -1801,7 +1863,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
method.paramTypess.head match {
case init :+ last if last.isRepeatedParam => newExtractor(whole, init, repeatedFromVarargs(last))
- case tps => newExtractor(whole, tps, NoRepeated)
+ case tps => newExtractor(whole, tps, NoRepeated)
}
}
@@ -1812,15 +1874,14 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
* Unfortunately the MethodType does not carry the information of whether
* it was unapplySeq, so we have to funnel that information in separately.
*/
- def unapplyMethodTypes(tree:Tree, fun: Tree, args:List[Tree], resultType:Type, isSeq: Boolean): Extractor = {
+ def unapplyMethodTypes(tree: Tree, fun: Tree, args: List[Tree], resultType: Type, isSeq: Boolean): Extractor = {
_id = _id + 1
- val whole = tree.tpe// see scaladoc for Trees.Unapply
+ val whole = tree.tpe // see scaladoc for Trees.Unapply
// fun.tpe.widen.paramTypess.headOption.flatMap(_.headOption).getOrElse(NoType)//firstParamType(method)
val resultOfGet = extractorMemberType(resultType, nme.get)
- //println(s"${_id}unapplyArgs(${result.widen}")
- val expanded:List[Type] = /*(
+ val expanded: List[Type] = /*(
if (result =:= defn.BooleanType) Nil
else if (defn.isProductSubType(result)) productSelectorTypes(result)
else if (result.classSymbol is Flags.CaseClass) result.decls.filter(x => x.is(Flags.CaseAccessor) && x.is(Flags.Method)).map(_.info).toList
@@ -1855,7 +1916,7 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
def offering = extractor.offeringString
def symString = tree.symbol.showLocated
def offerString = if (extractor.isErroneous) "" else s" offering $offering"
- def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + prodArity
+ def arityExpected = (if (extractor.hasSeq) "at least " else "") + prodArity
def err(msg: String) = ctx.error(msg, tree.pos)
def warn(msg: String) = ctx.warning(msg, tree.pos)
@@ -1882,10 +1943,10 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
}
- def apply(tree:Tree, sel: Tree, args: List[Tree], resultType: Type): Aligned = {
+ def apply(tree: Tree, sel: Tree, args: List[Tree], resultType: Type): Aligned = {
val fn = sel match {
case Applied(fn) => fn
- case _ => sel
+ case _ => sel
}
val patterns = newPatterns(args)
val isSeq = sel.symbol.name == nme.unapplySeq
@@ -1915,8 +1976,8 @@ class PatternMatcher extends MiniPhaseTransform with DenotTransformer {thisTrans
}
def apply(tree: Tree, resultType: Type): Aligned = tree match {
- case Typed(tree, _) => apply(tree, resultType)
- case Apply(fn, args) => apply(tree, fn, args, resultType)
+ case Typed(tree, _) => apply(tree, resultType)
+ case Apply(fn, args) => apply(tree, fn, args, resultType)
case UnApply(fn, implicits, args) => apply(tree, fn, args, resultType)
}
}
diff --git a/src/dotty/tools/dotc/transform/Pickler.scala b/src/dotty/tools/dotc/transform/Pickler.scala
index c5b223d53..90e62b65c 100644
--- a/src/dotty/tools/dotc/transform/Pickler.scala
+++ b/src/dotty/tools/dotc/transform/Pickler.scala
@@ -11,7 +11,6 @@ import Periods._
import Phases._
import Symbols._
import Flags.Module
-import util.SourceFile
import collection.mutable
/** This phase pickles trees */
@@ -46,12 +45,11 @@ class Pickler extends Phase {
unit.picklers += (cls -> pickler)
val treePkl = pickler.treePkl
treePkl.pickle(tree :: Nil)
+ treePkl.compactify()
pickler.addrOfTree = treePkl.buf.addrOfTree
pickler.addrOfSym = treePkl.addrOfSym
- if (unit.source.exists)
- pickleSourcefile(pickler, unit.source)
if (tree.pos.exists)
- new PositionPickler(pickler, treePkl.buf.addrOfTree).picklePositions(tree :: Nil, tree.pos)
+ new PositionPickler(pickler, treePkl.buf.addrOfTree).picklePositions(tree :: Nil)
def rawBytes = // not needed right now, but useful to print raw format.
pickler.assembleParts().iterator.grouped(10).toList.zipWithIndex.map {
@@ -65,12 +63,6 @@ class Pickler extends Phase {
}
}
- private def pickleSourcefile(pickler: TastyPickler, source: SourceFile): Unit = {
- val buf = new TastyBuffer(10)
- pickler.newSection("Sourcefile", buf)
- buf.writeNat(pickler.nameBuffer.nameIndex(source.file.path).index)
- }
-
override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
val result = super.runOn(units)
if (ctx.settings.YtestPickler.value)
@@ -89,17 +81,17 @@ class Pickler extends Phase {
}
pickling.println("************* entered toplevel ***********")
for ((cls, unpickler) <- unpicklers) {
- val (unpickled, source) = unpickler.body(readPositions = true)
- testSame(i"$unpickled%\n%", beforePickling(cls), cls, source)
+ val unpickled = unpickler.body(ctx.addMode(Mode.ReadPositions))
+ testSame(i"$unpickled%\n%", beforePickling(cls), cls)
}
}
- private def testSame(unpickled: String, previous: String, cls: ClassSymbol, source: SourceFile)(implicit ctx: Context) =
+ private def testSame(unpickled: String, previous: String, cls: ClassSymbol)(implicit ctx: Context) =
if (previous != unpickled) {
output("before-pickling.txt", previous)
output("after-pickling.txt", unpickled)
- ctx.error(s"""pickling difference for ${cls.fullName} in $source, for details:
+ ctx.error(i"""pickling difference for ${cls.fullName} in ${cls.sourceFile}, for details:
|
- | diff before-pickling.txt after-pickling.txt""".stripMargin)
+ | diff before-pickling.txt after-pickling.txt""")
}
}
diff --git a/src/dotty/tools/dotc/transform/PostTyper.scala b/src/dotty/tools/dotc/transform/PostTyper.scala
index 01f9f6317..51851a589 100644
--- a/src/dotty/tools/dotc/transform/PostTyper.scala
+++ b/src/dotty/tools/dotc/transform/PostTyper.scala
@@ -13,6 +13,7 @@ import Types._, Contexts._, Constants._, Names._, NameOps._, Flags._, DenotTrans
import SymDenotations._, Symbols._, StdNames._, Annotations._, Trees._, Scopes._, Denotations._
import util.Positions._
import Decorators._
+import config.Printers.typr
import Symbols._, TypeUtils._
/** A macro transform that runs immediately after typer and that performs the following functions:
@@ -36,6 +37,10 @@ import Symbols._, TypeUtils._
*
* (8) Replaces self references by name with `this`
*
+ * (9) Adds SourceFile annotations to all top-level classes and objects
+ *
+ * (10) Adds Child annotations to all sealed classes
+ *
* The reason for making this a macro transform is that some functions (in particular
* super and protected accessors and instantiation checks) are naturally top-down and
* don't lend themselves to the bottom-up approach of a mini phase. The other two functions
@@ -88,7 +93,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
*
* should behave differently.
*
- * O1.x should have the same effect as { println("43"; 42 }
+ * O1.x should have the same effect as { println("43"); 42 }
*
* whereas
*
@@ -98,10 +103,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
* purity of the prefix unless the selection goes to an inline val.
*/
private def normalizeTree(tree: Tree)(implicit ctx: Context): Tree = tree match {
- case tree: TypeTree => tree
- case TypeApply(fn, args) =>
- Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType])
- tree
+ case _: TypeTree | _: TypeApply => tree
case _ =>
if (tree.isType) {
Checking.typeChecker.traverse(tree)
@@ -113,6 +115,17 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
}
}
+ /** If the type of `tree` is a TermRefWithSignature with an underdefined
+ * signature, narrow the type by re-computing the signature (which should
+ * be fully-defined by now).
+ */
+ private def fixSignature[T <: Tree](tree: T)(implicit ctx: Context): T = tree.tpe match {
+ case tpe: TermRefWithSignature if tpe.signature.isUnderDefined =>
+ typr.println(i"fixing $tree with type ${tree.tpe.widen.toString} with sig ${tpe.signature} to ${tpe.widen.signature}")
+ tree.withType(TermRef.withSig(tpe.prefix, tpe.name, tpe.widen.signature)).asInstanceOf[T]
+ case _ => tree
+ }
+
class PostTyperTransformer extends Transformer {
private var inJavaAnnot: Boolean = false
@@ -176,9 +189,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
}
val (tycon, args) = decompose(tree)
tycon.tpe.widen match {
- case PolyType(pnames) =>
+ case tp: PolyType =>
val (namedArgs, otherArgs) = args.partition(isNamedArg)
- val args1 = reorderArgs(pnames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs)
+ val args1 = reorderArgs(tp.paramNames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs)
TypeApply(tycon, args1).withPos(tree.pos).withType(tree.tpe)
case _ =>
tree
@@ -190,10 +203,14 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
case tree: Ident =>
tree.tpe match {
case tpe: ThisType => This(tpe.cls).withPos(tree.pos)
- case _ => paramFwd.adaptRef(tree)
+ case _ => paramFwd.adaptRef(fixSignature(tree))
}
case tree: Select =>
- transformSelect(paramFwd.adaptRef(tree), Nil)
+ transformSelect(paramFwd.adaptRef(fixSignature(tree)), Nil)
+ case tree: Super =>
+ if (ctx.owner.enclosingMethod.isInlineMethod)
+ ctx.error(em"super not allowed in inline ${ctx.owner}", tree.pos)
+ super.transform(tree)
case tree: TypeApply =>
val tree1 @ TypeApply(fn, args) = normalizeTypeArgs(tree)
Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType])
@@ -224,7 +241,20 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
transformMemberDef(tree)
val sym = tree.symbol
val tree1 =
- if (sym.isClass) tree
+ if (sym.isClass) {
+ if (sym.owner.is(Package) &&
+ ctx.compilationUnit.source.exists &&
+ sym != defn.SourceFileAnnot)
+ sym.addAnnotation(Annotation.makeSourceFile(ctx.compilationUnit.source.file.path))
+
+ if (!sym.isAnonymousClass) // ignore anonymous class
+ for (parent <- sym.asClass.classInfo.classParents) {
+ val pclazz = parent.classSymbol
+ if (pclazz.is(Sealed)) pclazz.addAnnotation(Annotation.makeChild(sym))
+ }
+
+ tree
+ }
else {
Checking.typeChecker.traverse(tree.rhs)
cpy.TypeDef(tree)(rhs = TypeTree(tree.symbol.info))
@@ -236,8 +266,8 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisTran
case tree: New if !inJavaAnnot && !parentNews.contains(tree) =>
Checking.checkInstantiable(tree.tpe, tree.pos)
super.transform(tree)
- case tree @ Annotated(annot, annotated) =>
- cpy.Annotated(tree)(transformAnnot(annot), transform(annotated))
+ case tree @ Annotated(annotated, annot) =>
+ cpy.Annotated(tree)(transform(annotated), transformAnnot(annot))
case tree: TypeTree =>
tree.withType(
tree.tpe match {
diff --git a/src/dotty/tools/dotc/transform/RestoreScopes.scala b/src/dotty/tools/dotc/transform/RestoreScopes.scala
index 41da05691..8b9d2be0d 100644
--- a/src/dotty/tools/dotc/transform/RestoreScopes.scala
+++ b/src/dotty/tools/dotc/transform/RestoreScopes.scala
@@ -11,7 +11,6 @@ import TreeTransforms.MiniPhaseTransform
import SymDenotations._
import ast.Trees._
import NameOps._
-import typer.Mode
import TreeTransforms.TransformerInfo
import StdNames._
diff --git a/src/dotty/tools/dotc/transform/SelectStatic.scala b/src/dotty/tools/dotc/transform/SelectStatic.scala
new file mode 100644
index 000000000..5d60bb984
--- /dev/null
+++ b/src/dotty/tools/dotc/transform/SelectStatic.scala
@@ -0,0 +1,56 @@
+package dotty.tools.dotc
+package transform
+
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.DenotTransformers.IdentityDenotTransformer
+import dotty.tools.dotc.core.Flags._
+import dotty.tools.dotc.core.Symbols._
+import dotty.tools.dotc.core._
+import dotty.tools.dotc.transform.TreeTransforms._
+
+/** Removes selects that would be compiled into GetStatic
+ * otherwise backend needs to be aware that some qualifiers need to be dropped.
+ * Similar transformation seems to be performed by flatten in nsc
+ * @author Dmytro Petrashko
+ */
+class SelectStatic extends MiniPhaseTransform with IdentityDenotTransformer { thisTransform =>
+ import ast.tpd._
+
+ override def phaseName: String = "selectStatic"
+
+ override def transformSelect(tree: tpd.Select)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ val sym = tree.symbol
+ def isStaticMember =
+ (sym is Flags.Module) && sym.initial.maybeOwner.initial.isStaticOwner ||
+ (sym is Flags.JavaStatic) ||
+ (sym.maybeOwner is Flags.ImplClass) ||
+ sym.hasAnnotation(ctx.definitions.ScalaStaticAnnot)
+ val isStaticRef = !sym.is(Package) && !sym.maybeOwner.is(Package) && isStaticMember
+ val tree1 =
+ if (isStaticRef && !tree.qualifier.symbol.is(JavaModule) && !tree.qualifier.isType)
+ Block(List(tree.qualifier), ref(sym))
+ else tree
+
+ normalize(tree1)
+ }
+
+ private def normalize(t: Tree)(implicit ctx: Context) = t match {
+ case Select(Block(stats, qual), nm) =>
+ Block(stats, cpy.Select(t)(qual, nm))
+ case Apply(Block(stats, qual), nm) =>
+ Block(stats, Apply(qual, nm))
+ case TypeApply(Block(stats, qual), nm) =>
+ Block(stats, TypeApply(qual, nm))
+ case _ => t
+ }
+
+ override def transformApply(tree: tpd.Apply)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ normalize(tree)
+ }
+
+ override def transformTypeApply(tree: tpd.TypeApply)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
+ normalize(tree)
+ }
+}
diff --git a/src/dotty/tools/dotc/transform/Splitter.scala b/src/dotty/tools/dotc/transform/Splitter.scala
index 410b412e0..d62be1a82 100644
--- a/src/dotty/tools/dotc/transform/Splitter.scala
+++ b/src/dotty/tools/dotc/transform/Splitter.scala
@@ -6,25 +6,34 @@ import ast.Trees._
import core._
import Contexts._, Types._, Decorators._, Denotations._, Symbols._, SymDenotations._, Names._
-/** This transform makes sure every identifier and select node
- * carries a symbol. To do this, certain qualifiers with a union type
- * have to be "splitted" with a type test.
- *
- * For now, only self references are treated.
+/** Distribute applications into Block and If nodes
*/
class Splitter extends MiniPhaseTransform { thisTransform =>
import ast.tpd._
override def phaseName: String = "splitter"
- /** Replace self referencing idents with ThisTypes. */
- override def transformIdent(tree: Ident)(implicit ctx: Context, info: TransformerInfo) = tree.tpe match {
- case tp: ThisType =>
- ctx.debuglog(s"owner = ${ctx.owner}, context = ${ctx}")
- This(tp.cls) withPos tree.pos
- case _ => tree
+ /** Distribute arguments among splitted branches */
+ def distribute(tree: GenericApply[Type], rebuild: (Tree, List[Tree]) => Context => Tree)(implicit ctx: Context) = {
+ def recur(fn: Tree): Tree = fn match {
+ case Block(stats, expr) => Block(stats, recur(expr))
+ case If(cond, thenp, elsep) => If(cond, recur(thenp), recur(elsep))
+ case _ => rebuild(fn, tree.args)(ctx) withPos tree.pos
+ }
+ recur(tree.fun)
}
+ override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo) =
+ distribute(tree, typeApply)
+
+ override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo) =
+ distribute(tree, apply)
+
+ private val typeApply = (fn: Tree, args: List[Tree]) => (ctx: Context) => TypeApply(fn, args)(ctx)
+ private val apply = (fn: Tree, args: List[Tree]) => (ctx: Context) => Apply(fn, args)(ctx)
+
+/* The following is no longer necessary, since we select members on the join of an or type:
+ *
/** If we select a name, make sure the node has a symbol.
* If necessary, split the qualifier with type tests.
* Example: Assume:
@@ -46,7 +55,7 @@ class Splitter extends MiniPhaseTransform { thisTransform =>
val mbr = tp.member(name)
if (!mbr.isOverloaded) mbr.asSingleDenotation
else tree.tpe match {
- case tref: TermRefWithSignature => mbr.atSignature(tref.sig)
+ case tref: TermRefWithSignature => mbr.atSignature(tref.sig).checkUnique
case _ =>
def alts = mbr.alternatives.map(alt => i"$alt: ${alt.info}").mkString(", ")
ctx.error(s"cannot disambiguate overloaded members $alts", tree.pos)
@@ -108,23 +117,5 @@ class Splitter extends MiniPhaseTransform { thisTransform =>
evalOnce(qual)(qual => choose(qual, candidates(qual.tpe)))
}
}
-
- /** Distribute arguments among splitted branches */
- def distribute(tree: GenericApply[Type], rebuild: (Tree, List[Tree]) => Context => Tree)(implicit ctx: Context) = {
- def recur(fn: Tree): Tree = fn match {
- case Block(stats, expr) => Block(stats, recur(expr))
- case If(cond, thenp, elsep) => If(cond, recur(thenp), recur(elsep))
- case _ => rebuild(fn, tree.args)(ctx) withPos tree.pos
- }
- recur(tree.fun)
- }
-
- override def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo) =
- distribute(tree, typeApply)
-
- override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo) =
- distribute(tree, apply)
-
- private val typeApply = (fn: Tree, args: List[Tree]) => (ctx: Context) => TypeApply(fn, args)(ctx)
- private val apply = (fn: Tree, args: List[Tree]) => (ctx: Context) => Apply(fn, args)(ctx)
+*/
}
diff --git a/src/dotty/tools/dotc/transform/SuperAccessors.scala b/src/dotty/tools/dotc/transform/SuperAccessors.scala
index ae9c493ae..10be6db65 100644
--- a/src/dotty/tools/dotc/transform/SuperAccessors.scala
+++ b/src/dotty/tools/dotc/transform/SuperAccessors.scala
@@ -148,7 +148,7 @@ class SuperAccessors(thisTransformer: DenotTransformer) {
*/
private def ensureProtectedAccessOK(sel: Select, targs: List[Tree])(implicit ctx: Context) = {
val sym = sel.symbol
- if (sym.exists && needsProtectedAccessor(sym, sel.pos)) {
+ if (sym.isTerm && !sel.name.isOuterSelect && needsProtectedAccessor(sym, sel.pos)) {
ctx.debuglog("Adding protected accessor for " + sel)
protectedAccessorCall(sel, targs)
} else sel
@@ -167,12 +167,6 @@ class SuperAccessors(thisTransformer: DenotTransformer) {
val accName = sym.name.protectedAccessorName
- def isThisType(tpe: Type): Boolean = tpe match {
- case tpe: ThisType => !tpe.cls.is(PackageClass)
- case tpe: TypeProxy => isThisType(tpe.underlying)
- case _ => false
- }
-
// if the result type depends on the this type of an enclosing class, the accessor
// has to take an object of exactly this type, otherwise it's more general
val receiverType =
diff --git a/src/dotty/tools/dotc/transform/SyntheticMethods.scala b/src/dotty/tools/dotc/transform/SyntheticMethods.scala
index a496f80ce..9dfd92fe9 100644
--- a/src/dotty/tools/dotc/transform/SyntheticMethods.scala
+++ b/src/dotty/tools/dotc/transform/SyntheticMethods.scala
@@ -10,6 +10,7 @@ import DenotTransformers._
import ast.Trees._
import ast.untpd
import Decorators._
+import NameOps._
import ValueClasses.isDerivedValueClass
import scala.collection.mutable.ListBuffer
import scala.language.postfixOps
@@ -79,14 +80,17 @@ class SyntheticMethods(thisTransformer: DenotTransformer) {
def forwardToRuntime(vrefss: List[List[Tree]]): Tree =
ref(defn.runtimeMethodRef("_" + sym.name.toString)).appliedToArgs(This(clazz) :: vrefss.head)
+ def ownName(vrefss: List[List[Tree]]): Tree =
+ Literal(Constant(clazz.name.stripModuleClassSuffix.decode.toString))
+
def syntheticRHS(implicit ctx: Context): List[List[Tree]] => Tree = synthetic.name match {
case nme.hashCode_ if isDerivedValueClass(clazz) => vrefss => valueHashCodeBody
case nme.hashCode_ => vrefss => caseHashCodeBody
- case nme.toString_ => forwardToRuntime
+ case nme.toString_ => if (clazz.is(ModuleClass)) ownName else forwardToRuntime
case nme.equals_ => vrefss => equalsBody(vrefss.head.head)
case nme.canEqual_ => vrefss => canEqualBody(vrefss.head.head)
case nme.productArity => vrefss => Literal(Constant(accessors.length))
- case nme.productPrefix => vrefss => Literal(Constant(clazz.name.decode.toString))
+ case nme.productPrefix => ownName
}
ctx.log(s"adding $synthetic to $clazz at ${ctx.phase}")
DefDef(synthetic, syntheticRHS(ctx.withOwner(synthetic)))
diff --git a/src/dotty/tools/dotc/transform/TailRec.scala b/src/dotty/tools/dotc/transform/TailRec.scala
index 58fe7a6c9..d99a48af3 100644
--- a/src/dotty/tools/dotc/transform/TailRec.scala
+++ b/src/dotty/tools/dotc/transform/TailRec.scala
@@ -1,7 +1,7 @@
package dotty.tools.dotc.transform
import dotty.tools.dotc.ast.Trees._
-import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.ast.{TreeTypeMap, tpd}
import dotty.tools.dotc.core.Contexts.Context
import dotty.tools.dotc.core.Decorators._
import dotty.tools.dotc.core.DenotTransformers.DenotTransformer
@@ -10,13 +10,12 @@ import dotty.tools.dotc.core.Symbols._
import dotty.tools.dotc.core.Types._
import dotty.tools.dotc.core._
import dotty.tools.dotc.transform.TailRec._
-import dotty.tools.dotc.transform.TreeTransforms.{TransformerInfo, MiniPhaseTransform}
+import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
/**
* A Tail Rec Transformer
- *
* @author Erik Stenman, Iulian Dragos,
- * ported to dotty by Dmitry Petrashko
+ * ported and heavily modified for dotty by Dmitry Petrashko
* @version 1.1
*
* What it does:
@@ -74,10 +73,26 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
final val labelPrefix = "tailLabel"
final val labelFlags = Flags.Synthetic | Flags.Label
+ /** Symbols of methods that have @tailrec annotatios inside */
+ private val methodsWithInnerAnnots = new collection.mutable.HashSet[Symbol]()
+
+ override def transformUnit(tree: Tree)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ methodsWithInnerAnnots.clear()
+ tree
+ }
+
+ override def transformTyped(tree: Typed)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ if (tree.tpt.tpe.hasAnnotation(defn.TailrecAnnot))
+ methodsWithInnerAnnots += ctx.owner.enclosingMethod
+ tree
+ }
+
private def mkLabel(method: Symbol, abstractOverClass: Boolean)(implicit c: Context): TermSymbol = {
val name = c.freshName(labelPrefix)
- c.newSymbol(method, name.toTermName, labelFlags, fullyParameterizedType(method.info, method.enclosingClass.asClass, abstractOverClass))
+ if (method.owner.isClass)
+ c.newSymbol(method, name.toTermName, labelFlags, fullyParameterizedType(method.info, method.enclosingClass.asClass, abstractOverClass, liftThisType = false))
+ else c.newSymbol(method, name.toTermName, labelFlags, method.info)
}
override def transformDefDef(tree: tpd.DefDef)(implicit ctx: Context, info: TransformerInfo): tpd.Tree = {
@@ -103,7 +118,7 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
// and second one will actually apply,
// now this speculatively transforms tree and throws away result in many cases
val rhsSemiTransformed = {
- val transformer = new TailRecElimination(origMeth, owner, thisTpe, mandatory, label, abstractOverClass = defIsTopLevel)
+ val transformer = new TailRecElimination(origMeth, dd.tparams, owner, thisTpe, mandatory, label, abstractOverClass = defIsTopLevel)
val rhs = atGroupEnd(transformer.transform(dd.rhs)(_))
rewrote = transformer.rewrote
rhs
@@ -111,28 +126,48 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
if (rewrote) {
val dummyDefDef = cpy.DefDef(tree)(rhs = rhsSemiTransformed)
- val res = fullyParameterizedDef(label, dummyDefDef, abstractOverClass = defIsTopLevel)
- val call = forwarder(label, dd, abstractOverClass = defIsTopLevel)
- Block(List(res), call)
- } else {
- if (mandatory)
- ctx.error("TailRec optimisation not applicable, method not tail recursive", dd.pos)
+ if (tree.symbol.owner.isClass) {
+ val labelDef = fullyParameterizedDef(label, dummyDefDef, abstractOverClass = defIsTopLevel)
+ val call = forwarder(label, dd, abstractOverClass = defIsTopLevel, liftThisType = true)
+ Block(List(labelDef), call)
+ } else { // inner method. Tail recursion does not change `this`
+ val labelDef = polyDefDef(label, trefs => vrefss => {
+ val origMeth = tree.symbol
+ val origTParams = tree.tparams.map(_.symbol)
+ val origVParams = tree.vparamss.flatten map (_.symbol)
+ new TreeTypeMap(
+ typeMap = identity(_)
+ .substDealias(origTParams, trefs)
+ .subst(origVParams, vrefss.flatten.map(_.tpe)),
+ oldOwners = origMeth :: Nil,
+ newOwners = label :: Nil
+ ).transform(rhsSemiTransformed)
+ })
+ Block(List(labelDef), ref(label).appliedToArgss(vparamss0.map(_.map(x=> ref(x.symbol)))))
+ }} else {
+ if (mandatory) ctx.error(
+ "TailRec optimisation not applicable, method not tail recursive",
+ // FIXME: want to report this error on `dd.namePos`, but
+ // because of extension method getting a weird pos, it is
+ // better to report on symbol so there's no overlap
+ sym.pos
+ )
dd.rhs
}
})
}
- case d: DefDef if d.symbol.hasAnnotation(defn.TailrecAnnot) =>
- ctx.error("TailRec optimisation not applicable, method is neither private nor final so can be overridden", d.pos)
+ case d: DefDef if d.symbol.hasAnnotation(defn.TailrecAnnot) || methodsWithInnerAnnots.contains(d.symbol) =>
+ ctx.error("TailRec optimisation not applicable, method is neither private nor final so can be overridden", sym.pos)
d
- case d if d.symbol.hasAnnotation(defn.TailrecAnnot) =>
- ctx.error("TailRec optimisation not applicable, not a method", d.pos)
+ case d if d.symbol.hasAnnotation(defn.TailrecAnnot) || methodsWithInnerAnnots.contains(d.symbol) =>
+ ctx.error("TailRec optimisation not applicable, not a method", sym.pos)
d
case _ => tree
}
}
- class TailRecElimination(method: Symbol, enclosingClass: Symbol, thisType: Type, isMandatory: Boolean, label: Symbol, abstractOverClass: Boolean) extends tpd.TreeMap {
+ class TailRecElimination(method: Symbol, methTparams: List[Tree], enclosingClass: Symbol, thisType: Type, isMandatory: Boolean, label: Symbol, abstractOverClass: Boolean) extends tpd.TreeMap {
import dotty.tools.dotc.ast.tpd._
@@ -164,7 +199,7 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
override def transform(tree: Tree)(implicit c: Context): Tree = {
/* A possibly polymorphic apply to be considered for tail call transformation. */
- def rewriteApply(tree: Tree, sym: Symbol): Tree = {
+ def rewriteApply(tree: Tree, sym: Symbol, required: Boolean = false): Tree = {
def receiverArgumentsAndSymbol(t: Tree, accArgs: List[List[Tree]] = Nil, accT: List[Tree] = Nil):
(Tree, Tree, List[List[Tree]], List[Tree], Symbol) = t match {
case TypeApply(fun, targs) if fun.symbol eq t.symbol => receiverArgumentsAndSymbol(fun, accArgs, targs)
@@ -175,8 +210,9 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
case x => (x, x, accArgs, accT, x.symbol)
}
- val (reciever, call, arguments, typeArguments, symbol) = receiverArgumentsAndSymbol(tree)
- val recv = noTailTransform(reciever)
+ val (prefix, call, arguments, typeArguments, symbol) = receiverArgumentsAndSymbol(tree)
+ val hasConformingTargs = (typeArguments zip methTparams).forall{x => x._1.tpe <:< x._2.tpe}
+ val recv = noTailTransform(prefix)
val targs = typeArguments.map(noTailTransform)
val argumentss = arguments.map(noTailTransforms)
@@ -199,7 +235,7 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
}
}
def fail(reason: String) = {
- if (isMandatory) c.error(s"Cannot rewrite recursive call: $reason", tree.pos)
+ if (isMandatory || required) c.error(s"Cannot rewrite recursive call: $reason", tree.pos)
else c.debuglog("Cannot rewrite recursive call at: " + tree.pos + " because: " + reason)
continue
}
@@ -215,20 +251,24 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
targs ::: classTypeArgs.map(x => ref(x.typeSymbol))
} else targs
- val method = Apply(if (callTargs.nonEmpty) TypeApply(Ident(label.termRef), callTargs) else Ident(label.termRef),
- List(receiver))
+ val method = if (callTargs.nonEmpty) TypeApply(Ident(label.termRef), callTargs) else Ident(label.termRef)
+ val thisPassed =
+ if (this.method.owner.isClass)
+ method.appliedTo(receiver.ensureConforms(method.tpe.widen.firstParamTypes.head))
+ else method
val res =
- if (method.tpe.widen.isParameterless) method
- else argumentss.foldLeft(method) {
- (met, ar) => Apply(met, ar) // Dotty deviation no auto-detupling yet.
- }
+ if (thisPassed.tpe.widen.isParameterless) thisPassed
+ else argumentss.foldLeft(thisPassed) {
+ (met, ar) => Apply(met, ar) // Dotty deviation no auto-detupling yet.
+ }
res
}
if (isRecursiveCall) {
if (ctx.tailPos) {
- if (recv eq EmptyTree) rewriteTailCall(This(enclosingClass.asClass))
+ if (!hasConformingTargs) fail("it changes type arguments on a polymorphic recursive call")
+ else if (recv eq EmptyTree) rewriteTailCall(This(enclosingClass.asClass))
else if (receiverIsSame || receiverIsThis) rewriteTailCall(recv)
else fail("it changes type of 'this' on a polymorphic recursive call")
}
@@ -281,7 +321,8 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
noTailTransforms(stats),
transform(expr)
)
-
+ case tree @ Typed(t: Apply, tpt) if tpt.tpe.hasAnnotation(defn.TailrecAnnot) =>
+ tpd.Typed(rewriteApply(t, t.fun.symbol, required = true), tpt)
case tree@If(cond, thenp, elsep) =>
tpd.cpy.If(tree)(
noTailTransform(cond),
@@ -305,14 +346,16 @@ class TailRec extends MiniPhaseTransform with DenotTransformer with FullParamete
assert(false, "We should never have gotten inside a pattern")
tree
+ case t @ DefDef(_, _, _, _, _) =>
+ t // todo: could improve to handle DefDef's with a label flag calls to which are in tail position
+
case ValDef(_, _, _) | EmptyTree | Super(_, _) | This(_) |
- Literal(_) | TypeTree(_) | DefDef(_, _, _, _, _) | TypeDef(_, _) =>
+ Literal(_) | TypeTree(_) | TypeDef(_, _) =>
tree
case Return(expr, from) =>
tpd.cpy.Return(tree)(noTailTransform(expr), from)
- case t: DefDef =>
- t // todo: could improve to handle DefDef's with a label flag calls to which are in tail position
+
case _ =>
super.transform(tree)
}
diff --git a/src/dotty/tools/dotc/transform/TreeChecker.scala b/src/dotty/tools/dotc/transform/TreeChecker.scala
index a260963e9..808178369 100644
--- a/src/dotty/tools/dotc/transform/TreeChecker.scala
+++ b/src/dotty/tools/dotc/transform/TreeChecker.scala
@@ -15,6 +15,7 @@ import core.StdNames._
import core.Decorators._
import core.TypeErasure.isErasedType
import core.Phases.Phase
+import core.Mode
import typer._
import typer.ErrorReporting._
import reporting.ThrowingReporter
@@ -25,6 +26,9 @@ import collection.mutable
import ProtoTypes._
import config.Printers
import java.lang.AssertionError
+
+import dotty.tools.dotc.core.Names
+
import scala.util.control.NonFatal
/** Run by -Ycheck option after a given phase, this class retypes all syntax trees
@@ -51,7 +55,7 @@ class TreeChecker extends Phase with SymTransformer {
!name.exists(c => c == '.' || c == ';' || c =='[' || c == '/' || c == '<' || c == '>')
def printError(str: String)(implicit ctx: Context) = {
- ctx.println(Console.RED + "[error] " + Console.WHITE + str)
+ ctx.echo(Console.RED + "[error] " + Console.WHITE + str)
}
val NoSuperClass = Trait | Package
@@ -117,17 +121,19 @@ class TreeChecker extends Phase with SymTransformer {
def check(phasesToRun: Seq[Phase], ctx: Context) = {
val prevPhase = ctx.phase.prev // can be a mini-phase
val squahsedPhase = ctx.squashed(prevPhase)
- ctx.println(s"checking ${ctx.compilationUnit} after phase ${squahsedPhase}")
- val checkingCtx = ctx.fresh.setReporter(new ThrowingReporter(ctx.reporter))
+ ctx.echo(s"checking ${ctx.compilationUnit} after phase ${squahsedPhase}")
+
+ val checkingCtx = ctx
+ .fresh
+ .setMode(Mode.ImplicitsEnabled)
+ .setReporter(new ThrowingReporter(ctx.reporter))
+
val checker = new Checker(previousPhases(phasesToRun.toList)(ctx))
try checker.typedExpr(ctx.compilationUnit.tpdTree)(checkingCtx)
catch {
case NonFatal(ex) => //TODO CHECK. Check that we are bootstrapped
implicit val ctx: Context = checkingCtx
- ctx.println(i"*** error while checking ${ctx.compilationUnit} after phase ${checkingCtx.phase.prev} ***")
- ctx.println(ex.toString)
- ctx.println(ex.getStackTrace.take(30).deep.mkString("\n"))
- ctx.println("<<<")
+ println(i"*** error while checking ${ctx.compilationUnit} after phase ${checkingCtx.phase.prev} ***")
throw ex
}
}
@@ -155,17 +161,16 @@ class TreeChecker extends Phase with SymTransformer {
tree match {
case t: MemberDef =>
if (t.name ne sym.name) ctx.warning(s"symbol ${sym.fullName} name doesn't correspond to AST: ${t}")
- if (sym.flags != t.mods.flags) ctx.warning(s"symbol ${sym.fullName} flags ${sym.flags} doesn't match AST definition flags ${t.mods.flags}")
// todo: compare trees inside annotations
case _ =>
}
}
nowDefinedSyms += tree.symbol
- //ctx.println(i"defined: ${tree.symbol}")
+ //ctx.echo(i"defined: ${tree.symbol}")
val res = op
nowDefinedSyms -= tree.symbol
- //ctx.println(i"undefined: ${tree.symbol}")
+ //ctx.echo(i"undefined: ${tree.symbol}")
res
case _ => op
}
@@ -180,6 +185,76 @@ class TreeChecker extends Phase with SymTransformer {
if (tree.symbol.maybeOwner.isTerm)
assert(nowDefinedSyms contains tree.symbol, i"undefined symbol ${tree.symbol}")
+ /** assert Java classes are not used as objects */
+ def assertIdentNotJavaClass(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case _ : untpd.Ident =>
+ assert(!tree.symbol.is(JavaModule), "Java class can't be used as value: " + tree)
+ case _ =>
+ }
+
+ /** check Java classes are not used as objects */
+ def checkIdentNotJavaClass(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ // case tree: untpd.Ident =>
+ // case tree: untpd.Select =>
+ // case tree: untpd.Bind =>
+ case vd : ValDef =>
+ assertIdentNotJavaClass(vd.forceIfLazy)
+ case dd : DefDef =>
+ assertIdentNotJavaClass(dd.forceIfLazy)
+ // case tree: untpd.TypeDef =>
+ case Apply(fun, args) =>
+ assertIdentNotJavaClass(fun)
+ args.foreach(assertIdentNotJavaClass _)
+ // case tree: untpd.This =>
+ // case tree: untpd.Literal =>
+ // case tree: untpd.New =>
+ case Typed(expr, _) =>
+ assertIdentNotJavaClass(expr)
+ case NamedArg(_, arg) =>
+ assertIdentNotJavaClass(arg)
+ case Assign(_, rhs) =>
+ assertIdentNotJavaClass(rhs)
+ case Block(stats, expr) =>
+ stats.foreach(assertIdentNotJavaClass _)
+ assertIdentNotJavaClass(expr)
+ case If(_, thenp, elsep) =>
+ assertIdentNotJavaClass(thenp)
+ assertIdentNotJavaClass(elsep)
+ // case tree: untpd.Closure =>
+ case Match(selector, cases) =>
+ assertIdentNotJavaClass(selector)
+ cases.foreach(caseDef => assertIdentNotJavaClass(caseDef.body))
+ case Return(expr, _) =>
+ assertIdentNotJavaClass(expr)
+ case Try(expr, cases, finalizer) =>
+ assertIdentNotJavaClass(expr)
+ cases.foreach(caseDef => assertIdentNotJavaClass(caseDef.body))
+ assertIdentNotJavaClass(finalizer)
+ // case tree: TypeApply =>
+ // case tree: Super =>
+ case SeqLiteral(elems, _) =>
+ elems.foreach(assertIdentNotJavaClass)
+ // case tree: TypeTree =>
+ // case tree: SingletonTypeTree =>
+ // case tree: AndTypeTree =>
+ // case tree: OrTypeTree =>
+ // case tree: RefinedTypeTree =>
+ // case tree: AppliedTypeTree =>
+ // case tree: ByNameTypeTree =>
+ // case tree: TypeBoundsTree =>
+ // case tree: Alternative =>
+ // case tree: PackageDef =>
+ case Annotated(arg, _) =>
+ assertIdentNotJavaClass(arg)
+ case _ =>
+ }
+
+ override def typed(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree = {
+ val tpdTree = super.typed(tree, pt)
+ checkIdentNotJavaClass(tpdTree)
+ tpdTree
+ }
+
override def typedUnadapted(tree: untpd.Tree, pt: Type)(implicit ctx: Context): tpd.Tree = {
val res = tree match {
case _: untpd.UnApply =>
@@ -232,19 +307,51 @@ class TreeChecker extends Phase with SymTransformer {
}.apply(tp)
def checkNotRepeated(tree: Tree)(implicit ctx: Context): tree.type = {
- assert(!tree.tpe.widen.isRepeatedParam, i"repeated parameter type not allowed here: $tree")
+ def allowedRepeated = (tree.symbol.flags is Case) && tree.tpe.widen.isRepeatedParam
+
+ assert(!tree.tpe.widen.isRepeatedParam || allowedRepeated, i"repeated parameter type not allowed here: $tree")
tree
}
+ /** Check that all methods have MethodicType */
+ def isMethodType(pt: Type)(implicit ctx: Context): Boolean = pt match {
+ case at: AnnotatedType => isMethodType(at.tpe)
+ case _: MethodicType => true // MethodType, ExprType, PolyType
+ case _ => false
+ }
+
override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context): Tree = {
assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase)
assert(tree.isType || !needsSelect(tree.tpe), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}")
assertDefined(tree)
+
checkNotRepeated(super.typedIdent(tree, pt))
}
+ /** Makes sure the symbol in the tree can be approximately reconstructed by
+ * calling `member` on the qualifier type.
+ * Approximately means: The two symbols might be different but one still overrides the other.
+ */
override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = {
assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase)
+ val tpe = tree.typeOpt
+ val sym = tree.symbol
+ if (!tpe.isInstanceOf[WithFixedSym] && sym.exists && !sym.is(Private)) {
+ val qualTpe = tree.qualifier.typeOpt
+ val member =
+ if (sym.is(Private)) qualTpe.member(tree.name)
+ else qualTpe.nonPrivateMember(tree.name)
+ val memberSyms = member.alternatives.map(_.symbol)
+ assert(memberSyms.exists(mbr =>
+ sym == mbr ||
+ sym.overriddenSymbol(mbr.owner.asClass) == mbr ||
+ mbr.overriddenSymbol(sym.owner.asClass) == sym),
+ ex"""symbols differ for $tree
+ |was : $sym
+ |alternatives by type: $memberSyms%, % of types ${memberSyms.map(_.info)}%, %
+ |qualifier type : ${tree.qualifier.typeOpt}
+ |tree type : ${tree.typeOpt} of class ${tree.typeOpt.getClass}""")
+ }
checkNotRepeated(super.typedSelect(tree, pt))
}
@@ -277,8 +384,7 @@ class TreeChecker extends Phase with SymTransformer {
def isNonMagicalMethod(x: Symbol) =
x.is(Method) &&
!x.isCompanionMethod &&
- !x.isValueClassConvertMethod &&
- x != defn.newRefArrayMethod
+ !x.isValueClassConvertMethod
val symbolsNotDefined = cls.classInfo.decls.toSet.filter(isNonMagicalMethod) -- impl.body.map(_.symbol) - constr.symbol
@@ -293,8 +399,10 @@ class TreeChecker extends Phase with SymTransformer {
override def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) =
withDefinedSyms(ddef.tparams) {
withDefinedSymss(ddef.vparamss) {
- if (!sym.isClassConstructor) assert(isValidJVMMethodName(sym.name), s"${sym.fullName} name is invalid on jvm")
- super.typedDefDef(ddef, sym)
+ if (!sym.isClassConstructor && !(sym.name eq Names.STATIC_CONSTRUCTOR)) assert(isValidJVMMethodName(sym.name), s"${sym.fullName} name is invalid on jvm")
+ val tpdTree = super.typedDefDef(ddef, sym)
+ assert(isMethodType(sym.info), i"wrong type, expect a method type for ${sym.fullName}, but found: ${sym.info}")
+ tpdTree
}
}
@@ -307,6 +415,9 @@ class TreeChecker extends Phase with SymTransformer {
override def typedBlock(tree: untpd.Block, pt: Type)(implicit ctx: Context) =
withDefinedSyms(tree.stats) { super.typedBlock(tree, pt) }
+ override def typedInlined(tree: untpd.Inlined, pt: Type)(implicit ctx: Context) =
+ withDefinedSyms(tree.bindings) { super.typedInlined(tree, pt) }
+
/** Check that all defined symbols have legal owners.
* An owner is legal if it is either the same as the context's owner
* or there's an owner chain of valdefs starting at the context's owner and
@@ -333,9 +444,11 @@ class TreeChecker extends Phase with SymTransformer {
!tree.isEmpty &&
!isPrimaryConstructorReturn &&
!pt.isInstanceOf[FunProto])
- assert(tree.tpe <:< pt,
- s"error at ${sourcePos(tree.pos)}\n" +
- err.typeMismatchStr(tree.tpe, pt) + "\ntree = " + tree)
+ assert(tree.tpe <:< pt, {
+ val mismatch = err.typeMismatchMsg(tree.tpe, pt)
+ i"""|${mismatch.msg}
+ |tree = $tree""".stripMargin
+ })
tree
}
}
diff --git a/src/dotty/tools/dotc/transform/TreeTransform.scala b/src/dotty/tools/dotc/transform/TreeTransform.scala
index 7fe003388..45fa3d607 100644
--- a/src/dotty/tools/dotc/transform/TreeTransform.scala
+++ b/src/dotty/tools/dotc/transform/TreeTransform.scala
@@ -11,7 +11,7 @@ import dotty.tools.dotc.core.Phases.Phase
import dotty.tools.dotc.core.SymDenotations.SymDenotation
import dotty.tools.dotc.core.Symbols.Symbol
import dotty.tools.dotc.core.Flags.PackageVal
-import dotty.tools.dotc.typer.Mode
+import dotty.tools.dotc.core.Mode
import dotty.tools.dotc.ast.Trees._
import dotty.tools.dotc.core.Decorators._
import dotty.tools.dotc.util.DotClass
@@ -42,7 +42,7 @@ object TreeTransforms {
* the general dispatch overhead as opposed to the concrete work done in transformations. So that leaves us with
* 0.2sec, or roughly 600M processor cycles.
*
- * Now, to the amount of work that needs to be done. The codebase produces of about 250'000 trees after typechecking.
+ * Now, to the amount of work that needs to be done. The codebase produces an average of about 250'000 trees after typechecking.
* Transformations are likely to make this bigger so let's assume 300K trees on average. We estimate to have about 100
* micro-transformations. Let's say 5 transformation groups of 20 micro-transformations each. (by comparison,
* scalac has in excess of 20 phases, and most phases do multiple transformations). There are then 30M visits
@@ -70,7 +70,6 @@ object TreeTransforms {
def prepareForApply(tree: Apply)(implicit ctx: Context) = this
def prepareForTypeApply(tree: TypeApply)(implicit ctx: Context) = this
def prepareForLiteral(tree: Literal)(implicit ctx: Context) = this
- def prepareForPair(tree: Pair)(implicit ctx: Context) = this
def prepareForNew(tree: New)(implicit ctx: Context) = this
def prepareForTyped(tree: Typed)(implicit ctx: Context) = this
def prepareForAssign(tree: Assign)(implicit ctx: Context) = this
@@ -82,8 +81,8 @@ object TreeTransforms {
def prepareForReturn(tree: Return)(implicit ctx: Context) = this
def prepareForTry(tree: Try)(implicit ctx: Context) = this
def prepareForSeqLiteral(tree: SeqLiteral)(implicit ctx: Context) = this
+ def prepareForInlined(tree: Inlined)(implicit ctx: Context) = this
def prepareForTypeTree(tree: TypeTree)(implicit ctx: Context) = this
- def prepareForSelectFromTypeTree(tree: SelectFromTypeTree)(implicit ctx: Context) = this
def prepareForBind(tree: Bind)(implicit ctx: Context) = this
def prepareForAlternative(tree: Alternative)(implicit ctx: Context) = this
def prepareForTypeDef(tree: TypeDef)(implicit ctx: Context) = this
@@ -104,7 +103,6 @@ object TreeTransforms {
def transformTypeApply(tree: TypeApply)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformLiteral(tree: Literal)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformNew(tree: New)(implicit ctx: Context, info: TransformerInfo): Tree = tree
- def transformPair(tree: Pair)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformTyped(tree: Typed)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformAssign(tree: Assign)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformBlock(tree: Block)(implicit ctx: Context, info: TransformerInfo): Tree = tree
@@ -115,8 +113,8 @@ object TreeTransforms {
def transformReturn(tree: Return)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformTry(tree: Try)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformSeqLiteral(tree: SeqLiteral)(implicit ctx: Context, info: TransformerInfo): Tree = tree
+ def transformInlined(tree: Inlined)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformTypeTree(tree: TypeTree)(implicit ctx: Context, info: TransformerInfo): Tree = tree
- def transformSelectFromTypeTree(tree: SelectFromTypeTree)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformBind(tree: Bind)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformAlternative(tree: Alternative)(implicit ctx: Context, info: TransformerInfo): Tree = tree
def transformUnApply(tree: UnApply)(implicit ctx: Context, info: TransformerInfo): Tree = tree
@@ -174,25 +172,22 @@ object TreeTransforms {
}
/** A helper trait to transform annotations on MemberDefs */
- trait AnnotationTransformer extends MiniPhaseTransform with InfoTransformer {
+ trait AnnotationTransformer extends MiniPhaseTransform with DenotTransformer {
val annotationTransformer = mkTreeTransformer
override final def treeTransformPhase = this
// need to run at own phase because otherwise we get ahead of ourselves in transforming denotations
- override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation = {
- val info1 = transformInfo(ref.info, ref.symbol)
-
- ref match {
- case ref: SymDenotation =>
- val annotTrees = ref.annotations.map(_.tree)
+ abstract override def transform(ref: SingleDenotation)(implicit ctx: Context): SingleDenotation =
+ super.transform(ref) match {
+ case ref1: SymDenotation if ref1.symbol.isDefinedInCurrentRun =>
+ val annotTrees = ref1.annotations.map(_.tree)
val annotTrees1 = annotTrees.mapConserve(annotationTransformer.macroTransform)
- val annots1 = if (annotTrees eq annotTrees1) ref.annotations else annotTrees1.map(new ConcreteAnnotation(_))
- if ((info1 eq ref.info) && (annots1 eq ref.annotations)) ref
- else ref.copySymDenotation(info = info1, annotations = annots1)
- case _ => if (info1 eq ref.info) ref else ref.derivedSingleDenotation(ref.symbol, info1)
+ if (annotTrees eq annotTrees1) ref1
+ else ref1.copySymDenotation(annotations = annotTrees1.map(new ConcreteAnnotation(_)))
+ case ref1 =>
+ ref1
}
- }
}
@sharable val NoTransform = new TreeTransform {
@@ -213,7 +208,7 @@ object TreeTransforms {
if (cls.getDeclaredMethods.exists(_.getName == name)) cls != classOf[TreeTransform]
else hasRedefinedMethod(cls.getSuperclass, name)
- /** Create an index array `next` of size one larger than teh size of `transforms` such that
+ /** Create an index array `next` of size one larger than the size of `transforms` such that
* for each index i, `next(i)` is the smallest index j such that
*
* i <= j
@@ -270,7 +265,6 @@ object TreeTransforms {
nxPrepTypeApply = index(transformations, "prepareForTypeApply")
nxPrepLiteral = index(transformations, "prepareForLiteral")
nxPrepNew = index(transformations, "prepareForNew")
- nxPrepPair = index(transformations, "prepareForPair")
nxPrepTyped = index(transformations, "prepareForTyped")
nxPrepAssign = index(transformations, "prepareForAssign")
nxPrepBlock = index(transformations, "prepareForBlock")
@@ -281,8 +275,8 @@ object TreeTransforms {
nxPrepReturn = index(transformations, "prepareForReturn")
nxPrepTry = index(transformations, "prepareForTry")
nxPrepSeqLiteral = index(transformations, "prepareForSeqLiteral")
+ nxPrepInlined = index(transformations, "prepareForInlined")
nxPrepTypeTree = index(transformations, "prepareForTypeTree")
- nxPrepSelectFromTypeTree = index(transformations, "prepareForSelectFromTypeTree")
nxPrepBind = index(transformations, "prepareForBind")
nxPrepAlternative = index(transformations, "prepareForAlternative")
nxPrepUnApply = index(transformations, "prepareForUnApply")
@@ -302,7 +296,6 @@ object TreeTransforms {
nxTransTypeApply = index(transformations, "transformTypeApply")
nxTransLiteral = index(transformations, "transformLiteral")
nxTransNew = index(transformations, "transformNew")
- nxTransPair = index(transformations, "transformPair")
nxTransTyped = index(transformations, "transformTyped")
nxTransAssign = index(transformations, "transformAssign")
nxTransBlock = index(transformations, "transformBlock")
@@ -313,8 +306,8 @@ object TreeTransforms {
nxTransReturn = index(transformations, "transformReturn")
nxTransTry = index(transformations, "transformTry")
nxTransSeqLiteral = index(transformations, "transformSeqLiteral")
+ nxTransInlined = index(transformations, "transformInlined")
nxTransTypeTree = index(transformations, "transformTypeTree")
- nxTransSelectFromTypeTree = index(transformations, "transformSelectFromTypeTree")
nxTransBind = index(transformations, "transformBind")
nxTransAlternative = index(transformations, "transformAlternative")
nxTransUnApply = index(transformations, "transformUnApply")
@@ -344,7 +337,6 @@ object TreeTransforms {
nxPrepTypeApply = indexUpdate(prev.nxPrepTypeApply, changedTransformationClass, transformationIndex, "prepareForTypeApply", copy)
nxPrepLiteral = indexUpdate(prev.nxPrepLiteral, changedTransformationClass, transformationIndex, "prepareForLiteral", copy)
nxPrepNew = indexUpdate(prev.nxPrepNew, changedTransformationClass, transformationIndex, "prepareForNew", copy)
- nxPrepPair = indexUpdate(prev.nxPrepPair, changedTransformationClass, transformationIndex, "prepareForPair", copy)
nxPrepTyped = indexUpdate(prev.nxPrepTyped, changedTransformationClass, transformationIndex, "prepareForTyped", copy)
nxPrepAssign = indexUpdate(prev.nxPrepAssign, changedTransformationClass, transformationIndex, "prepareForAssign", copy)
nxPrepBlock = indexUpdate(prev.nxPrepBlock, changedTransformationClass, transformationIndex, "prepareForBlock", copy)
@@ -355,8 +347,8 @@ object TreeTransforms {
nxPrepReturn = indexUpdate(prev.nxPrepReturn, changedTransformationClass, transformationIndex, "prepareForReturn", copy)
nxPrepTry = indexUpdate(prev.nxPrepTry, changedTransformationClass, transformationIndex, "prepareForTry", copy)
nxPrepSeqLiteral = indexUpdate(prev.nxPrepSeqLiteral, changedTransformationClass, transformationIndex, "prepareForSeqLiteral", copy)
+ nxPrepInlined = indexUpdate(prev.nxPrepInlined, changedTransformationClass, transformationIndex, "prepareForInlined", copy)
nxPrepTypeTree = indexUpdate(prev.nxPrepTypeTree, changedTransformationClass, transformationIndex, "prepareForTypeTree", copy)
- nxPrepSelectFromTypeTree = indexUpdate(prev.nxPrepSelectFromTypeTree, changedTransformationClass, transformationIndex, "prepareForSelectFromTypeTree", copy)
nxPrepBind = indexUpdate(prev.nxPrepBind, changedTransformationClass, transformationIndex, "prepareForBind", copy)
nxPrepAlternative = indexUpdate(prev.nxPrepAlternative, changedTransformationClass, transformationIndex, "prepareForAlternative", copy)
nxPrepUnApply = indexUpdate(prev.nxPrepUnApply, changedTransformationClass, transformationIndex, "prepareForUnApply", copy)
@@ -375,7 +367,6 @@ object TreeTransforms {
nxTransTypeApply = indexUpdate(prev.nxTransTypeApply, changedTransformationClass, transformationIndex, "transformTypeApply", copy)
nxTransLiteral = indexUpdate(prev.nxTransLiteral, changedTransformationClass, transformationIndex, "transformLiteral", copy)
nxTransNew = indexUpdate(prev.nxTransNew, changedTransformationClass, transformationIndex, "transformNew", copy)
- nxTransPair = indexUpdate(prev.nxTransPair, changedTransformationClass, transformationIndex, "transformPair", copy)
nxTransTyped = indexUpdate(prev.nxTransTyped, changedTransformationClass, transformationIndex, "transformTyped", copy)
nxTransAssign = indexUpdate(prev.nxTransAssign, changedTransformationClass, transformationIndex, "transformAssign", copy)
nxTransBlock = indexUpdate(prev.nxTransBlock, changedTransformationClass, transformationIndex, "transformBlock", copy)
@@ -386,8 +377,8 @@ object TreeTransforms {
nxTransReturn = indexUpdate(prev.nxTransReturn, changedTransformationClass, transformationIndex, "transformReturn", copy)
nxTransTry = indexUpdate(prev.nxTransTry, changedTransformationClass, transformationIndex, "transformTry", copy)
nxTransSeqLiteral = indexUpdate(prev.nxTransSeqLiteral, changedTransformationClass, transformationIndex, "transformSeqLiteral", copy)
+ nxTransInlined = indexUpdate(prev.nxTransInlined, changedTransformationClass, transformationIndex, "transformInlined", copy)
nxTransTypeTree = indexUpdate(prev.nxTransTypeTree, changedTransformationClass, transformationIndex, "transformTypeTree", copy)
- nxTransSelectFromTypeTree = indexUpdate(prev.nxTransSelectFromTypeTree, changedTransformationClass, transformationIndex, "transformSelectFromTypeTree", copy)
nxTransBind = indexUpdate(prev.nxTransBind, changedTransformationClass, transformationIndex, "transformBind", copy)
nxTransAlternative = indexUpdate(prev.nxTransAlternative, changedTransformationClass, transformationIndex, "transformAlternative", copy)
nxTransUnApply = indexUpdate(prev.nxTransUnApply, changedTransformationClass, transformationIndex, "transformUnApply", copy)
@@ -412,7 +403,6 @@ object TreeTransforms {
var nxPrepTypeApply: Array[Int] = _
var nxPrepLiteral: Array[Int] = _
var nxPrepNew: Array[Int] = _
- var nxPrepPair: Array[Int] = _
var nxPrepTyped: Array[Int] = _
var nxPrepAssign: Array[Int] = _
var nxPrepBlock: Array[Int] = _
@@ -423,8 +413,8 @@ object TreeTransforms {
var nxPrepReturn: Array[Int] = _
var nxPrepTry: Array[Int] = _
var nxPrepSeqLiteral: Array[Int] = _
+ var nxPrepInlined: Array[Int] = _
var nxPrepTypeTree: Array[Int] = _
- var nxPrepSelectFromTypeTree: Array[Int] = _
var nxPrepBind: Array[Int] = _
var nxPrepAlternative: Array[Int] = _
var nxPrepUnApply: Array[Int] = _
@@ -444,7 +434,6 @@ object TreeTransforms {
var nxTransTypeApply: Array[Int] = _
var nxTransLiteral: Array[Int] = _
var nxTransNew: Array[Int] = _
- var nxTransPair: Array[Int] = _
var nxTransTyped: Array[Int] = _
var nxTransAssign: Array[Int] = _
var nxTransBlock: Array[Int] = _
@@ -455,8 +444,8 @@ object TreeTransforms {
var nxTransReturn: Array[Int] = _
var nxTransTry: Array[Int] = _
var nxTransSeqLiteral: Array[Int] = _
+ var nxTransInlined: Array[Int] = _
var nxTransTypeTree: Array[Int] = _
- var nxTransSelectFromTypeTree: Array[Int] = _
var nxTransBind: Array[Int] = _
var nxTransAlternative: Array[Int] = _
var nxTransUnApply: Array[Int] = _
@@ -523,7 +512,6 @@ object TreeTransforms {
val prepForApply: Mutator[Apply] = (trans, tree, ctx) => trans.prepareForApply(tree)(ctx)
val prepForTypeApply: Mutator[TypeApply] = (trans, tree, ctx) => trans.prepareForTypeApply(tree)(ctx)
val prepForNew: Mutator[New] = (trans, tree, ctx) => trans.prepareForNew(tree)(ctx)
- val prepForPair: Mutator[Pair] = (trans, tree, ctx) => trans.prepareForPair(tree)(ctx)
val prepForTyped: Mutator[Typed] = (trans, tree, ctx) => trans.prepareForTyped(tree)(ctx)
val prepForAssign: Mutator[Assign] = (trans, tree, ctx) => trans.prepareForAssign(tree)(ctx)
val prepForLiteral: Mutator[Literal] = (trans, tree, ctx) => trans.prepareForLiteral(tree)(ctx)
@@ -535,8 +523,8 @@ object TreeTransforms {
val prepForReturn: Mutator[Return] = (trans, tree, ctx) => trans.prepareForReturn(tree)(ctx)
val prepForTry: Mutator[Try] = (trans, tree, ctx) => trans.prepareForTry(tree)(ctx)
val prepForSeqLiteral: Mutator[SeqLiteral] = (trans, tree, ctx) => trans.prepareForSeqLiteral(tree)(ctx)
+ val prepForInlined: Mutator[Inlined] = (trans, tree, ctx) => trans.prepareForInlined(tree)(ctx)
val prepForTypeTree: Mutator[TypeTree] = (trans, tree, ctx) => trans.prepareForTypeTree(tree)(ctx)
- val prepForSelectFromTypeTree: Mutator[SelectFromTypeTree] = (trans, tree, ctx) => trans.prepareForSelectFromTypeTree(tree)(ctx)
val prepForBind: Mutator[Bind] = (trans, tree, ctx) => trans.prepareForBind(tree)(ctx)
val prepForAlternative: Mutator[Alternative] = (trans, tree, ctx) => trans.prepareForAlternative(tree)(ctx)
val prepForUnApply: Mutator[UnApply] = (trans, tree, ctx) => trans.prepareForUnApply(tree)(ctx)
@@ -641,17 +629,6 @@ object TreeTransforms {
}
@tailrec
- final private[TreeTransforms] def goPair(tree: Pair, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
- if (cur < info.transformers.length) {
- val trans = info.transformers(cur)
- trans.transformPair(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
- case t: Pair => goPair(t, info.nx.nxTransPair(cur + 1))
- case t => transformSingle(t, cur + 1)
- }
- } else tree
- }
-
- @tailrec
final private[TreeTransforms] def goTyped(tree: Typed, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
if (cur < info.transformers.length) {
val trans = info.transformers(cur)
@@ -773,22 +750,22 @@ object TreeTransforms {
}
@tailrec
- final private[TreeTransforms] def goTypeTree(tree: TypeTree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ final private[TreeTransforms] def goInlined(tree: Inlined, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
if (cur < info.transformers.length) {
val trans = info.transformers(cur)
- trans.transformTypeTree(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
- case t: TypeTree => goTypeTree(t, info.nx.nxTransTypeTree(cur + 1))
+ trans.transformInlined(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: Inlined => goInlined(t, info.nx.nxTransInlined(cur + 1))
case t => transformSingle(t, cur + 1)
}
} else tree
}
@tailrec
- final private[TreeTransforms] def goSelectFromTypeTree(tree: SelectFromTypeTree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ final private[TreeTransforms] def goTypeTree(tree: TypeTree, cur: Int)(implicit ctx: Context, info: TransformerInfo): Tree = {
if (cur < info.transformers.length) {
val trans = info.transformers(cur)
- trans.transformSelectFromTypeTree(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
- case t: SelectFromTypeTree => goSelectFromTypeTree(t, info.nx.nxTransSelectFromTypeTree(cur + 1))
+ trans.transformTypeTree(tree)(ctx.withPhase(trans.treeTransformPhase), info) match {
+ case t: TypeTree => goTypeTree(t, info.nx.nxTransTypeTree(cur + 1))
case t => transformSingle(t, cur + 1)
}
} else tree
@@ -903,8 +880,6 @@ object TreeTransforms {
tree match {
case tree: Ident => goIdent(tree, info.nx.nxTransIdent(cur))
case tree: Select => goSelect(tree, info.nx.nxTransSelect(cur))
- case tree: SelectFromTypeTree =>
- goSelectFromTypeTree(tree, info.nx.nxTransSelectFromTypeTree(cur))
case tree: Bind => goBind(tree, cur)
case tree: ValDef if !tree.isEmpty => goValDef(tree, info.nx.nxTransValDef(cur))
case tree: DefDef => goDefDef(tree, info.nx.nxTransDefDef(cur))
@@ -920,7 +895,6 @@ object TreeTransforms {
case tree: TypeApply => goTypeApply(tree, info.nx.nxTransTypeApply(cur))
case tree: Literal => goLiteral(tree, info.nx.nxTransLiteral(cur))
case tree: New => goNew(tree, info.nx.nxTransNew(cur))
- case tree: Pair => goPair(tree, info.nx.nxTransPair(cur))
case tree: Typed => goTyped(tree, info.nx.nxTransTyped(cur))
case tree: Assign => goAssign(tree, info.nx.nxTransAssign(cur))
case tree: Block => goBlock(tree, info.nx.nxTransBlock(cur))
@@ -930,7 +904,8 @@ object TreeTransforms {
case tree: CaseDef => goCaseDef(tree, info.nx.nxTransCaseDef(cur))
case tree: Return => goReturn(tree, info.nx.nxTransReturn(cur))
case tree: Try => goTry(tree, info.nx.nxTransTry(cur))
- case tree: SeqLiteral => goSeqLiteral(tree, info.nx.nxTransLiteral(cur))
+ case tree: SeqLiteral => goSeqLiteral(tree, info.nx.nxTransSeqLiteral(cur))
+ case tree: Inlined => goInlined(tree, info.nx.nxTransInlined(cur))
case tree: TypeTree => goTypeTree(tree, info.nx.nxTransTypeTree(cur))
case tree: Alternative => goAlternative(tree, info.nx.nxTransAlternative(cur))
case tree: UnApply => goUnApply(tree, info.nx.nxTransUnApply(cur))
@@ -970,13 +945,6 @@ object TreeTransforms {
val qual = transform(tree.qualifier, mutatedInfo, cur)
goSelect(cpy.Select(tree)(qual, tree.name), mutatedInfo.nx.nxTransSelect(cur))
}
- case tree: SelectFromTypeTree =>
- implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForSelectFromTypeTree, info.nx.nxPrepSelectFromTypeTree, tree, cur)
- if (mutatedInfo eq null) tree
- else {
- val qual = transform(tree.qualifier, mutatedInfo, cur)
- goSelectFromTypeTree(cpy.SelectFromTypeTree(tree)(qual, tree.name), mutatedInfo.nx.nxTransSelectFromTypeTree(cur))
- }
case tree: Bind =>
implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForBind, info.nx.nxPrepBind, tree, cur)
if (mutatedInfo eq null) tree
@@ -1055,14 +1023,6 @@ object TreeTransforms {
val tpt = transform(tree.tpt, mutatedInfo, cur)
goNew(cpy.New(tree)(tpt), mutatedInfo.nx.nxTransNew(cur))
}
- case tree: Pair =>
- implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForPair, info.nx.nxPrepPair, tree, cur)
- if (mutatedInfo eq null) tree
- else {
- val left = transform(tree.left, mutatedInfo, cur)
- val right = transform(tree.right, mutatedInfo, cur)
- goPair(cpy.Pair(tree)(left, right), mutatedInfo.nx.nxTransPair(cur))
- }
case tree: Typed =>
implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForTyped, info.nx.nxPrepTyped, tree, cur)
if (mutatedInfo eq null) tree
@@ -1151,6 +1111,14 @@ object TreeTransforms {
val elemtpt = transform(tree.elemtpt, mutatedInfo, cur)
goSeqLiteral(cpy.SeqLiteral(tree)(elems, elemtpt), mutatedInfo.nx.nxTransSeqLiteral(cur))
}
+ case tree: Inlined =>
+ implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForInlined, info.nx.nxPrepInlined, tree, cur)
+ if (mutatedInfo eq null) tree
+ else {
+ val bindings = transformSubTrees(tree.bindings, mutatedInfo, cur)
+ val expansion = transform(tree.expansion, mutatedInfo, cur)(inlineContext(tree))
+ goInlined(cpy.Inlined(tree)(tree.call, bindings, expansion), mutatedInfo.nx.nxTransInlined(cur))
+ }
case tree: TypeTree =>
implicit val mutatedInfo: TransformerInfo = mutateTransformers(info, prepForTypeTree, info.nx.nxPrepTypeTree, tree, cur)
if (mutatedInfo eq null) tree
diff --git a/src/dotty/tools/dotc/transform/TryCatchPatterns.scala b/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
new file mode 100644
index 000000000..9a6ecef51
--- /dev/null
+++ b/src/dotty/tools/dotc/transform/TryCatchPatterns.scala
@@ -0,0 +1,99 @@
+package dotty.tools.dotc
+package transform
+
+import core.Symbols._
+import core.StdNames._
+import ast.Trees._
+import core.Types._
+import dotty.tools.dotc.core.Decorators._
+import dotty.tools.dotc.core.Flags
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.transform.TreeTransforms.{MiniPhaseTransform, TransformerInfo}
+import dotty.tools.dotc.util.Positions.Position
+
+/** Compiles the cases that can not be handled by primitive catch cases as a common pattern match.
+ *
+ * The following code:
+ * ```
+ * try { <code> }
+ * catch {
+ * <tryCases> // Cases that can be handled by catch
+ * <patternMatchCases> // Cases starting with first one that can't be handled by catch
+ * }
+ * ```
+ * will become:
+ * ```
+ * try { <code> }
+ * catch {
+ * <tryCases>
+ * case e => e match {
+ * <patternMatchCases>
+ * }
+ * }
+ * ```
+ *
+ * Cases that are not supported include:
+ * - Applies and unapplies
+ * - Idents
+ * - Alternatives
+ * - `case _: T =>` where `T` is not `Throwable`
+ *
+ */
+class TryCatchPatterns extends MiniPhaseTransform {
+ import dotty.tools.dotc.ast.tpd._
+
+ def phaseName: String = "tryCatchPatterns"
+
+ override def runsAfter = Set(classOf[ElimRepeated])
+
+ override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match {
+ case Try(_, cases, _) =>
+ cases.foreach {
+ case CaseDef(Typed(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.")
+ case CaseDef(Bind(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.")
+ case c =>
+ assert(isDefaultCase(c), "Pattern in Try should be Bind, Typed or default case.")
+ }
+ case _ =>
+ }
+
+ override def transformTry(tree: Try)(implicit ctx: Context, info: TransformerInfo): Tree = {
+ val (tryCases, patternMatchCases) = tree.cases.span(isCatchCase)
+ val fallbackCase = mkFallbackPatterMatchCase(patternMatchCases, tree.pos)
+ cpy.Try(tree)(cases = tryCases ++ fallbackCase)
+ }
+
+ /** Is this pattern node a catch-all or type-test pattern? */
+ private def isCatchCase(cdef: CaseDef)(implicit ctx: Context): Boolean = cdef match {
+ case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => isSimpleThrowable(tpt.tpe)
+ case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => isSimpleThrowable(tpt.tpe)
+ case _ => isDefaultCase(cdef)
+ }
+
+ private def isSimpleThrowable(tp: Type)(implicit ctx: Context): Boolean = tp match {
+ case tp @ TypeRef(pre, _) =>
+ (pre == NoPrefix || pre.widen.typeSymbol.isStatic) && // Does not require outer class check
+ !tp.symbol.is(Flags.Trait) && // Traits not supported by JVM
+ tp.derivesFrom(defn.ThrowableClass)
+ case _ =>
+ false
+ }
+
+ private def mkFallbackPatterMatchCase(patternMatchCases: List[CaseDef], pos: Position)(
+ implicit ctx: Context, info: TransformerInfo): Option[CaseDef] = {
+ if (patternMatchCases.isEmpty) None
+ else {
+ val exName = ctx.freshName("ex").toTermName
+ val fallbackSelector =
+ ctx.newSymbol(ctx.owner, exName, Flags.Synthetic | Flags.Case, defn.ThrowableType, coord = pos)
+ val sel = Ident(fallbackSelector.termRef).withPos(pos)
+ val rethrow = CaseDef(EmptyTree, EmptyTree, Throw(ref(fallbackSelector)))
+ Some(CaseDef(
+ Bind(fallbackSelector, Underscore(fallbackSelector.info).withPos(pos)),
+ EmptyTree,
+ transformFollowing(Match(sel, patternMatchCases ::: rethrow :: Nil)))
+ )
+ }
+ }
+
+}
diff --git a/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
index c57d6fd1a..3774127fa 100644
--- a/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
+++ b/src/dotty/tools/dotc/transform/TypeTestsCasts.scala
@@ -1,16 +1,12 @@
package dotty.tools.dotc
package transform
-import TreeTransforms._
-import core.Denotations._
-import core.SymDenotations._
import core.Contexts._
import core.Symbols._
import core.Types._
import core.Constants._
import core.StdNames._
import core.TypeErasure.isUnboundedGeneric
-import typer.ErrorReporting._
import ast.Trees._
import Erasure.Boxing._
import core.TypeErasure._
@@ -92,14 +88,33 @@ trait TypeTestsCasts {
unbox(qual.ensureConforms(defn.ObjectType), argType)
else if (isDerivedValueClass(argCls)) {
qual // adaptToType in Erasure will do the necessary type adaptation
- } else
+ }
+ else
derivedTree(qual, defn.Any_asInstanceOf, argType)
}
- def erasedArg = erasure(tree.args.head.tpe)
+
+ /** Transform isInstanceOf OrType
+ *
+ * expr.isInstanceOf[A | B] ~~> expr.isInstanceOf[A] | expr.isInstanceOf[B]
+ *
+ * The transform happens before erasure of `argType`, thus cannot be merged
+ * with `transformIsInstanceOf`, which depends on erased type of `argType`.
+ */
+ def transformOrTypeTest(qual: Tree, argType: Type): Tree = argType.dealias match {
+ case OrType(tp1, tp2) =>
+ evalOnce(qual) { fun =>
+ transformOrTypeTest(fun, tp1)
+ .select(nme.OR)
+ .appliedTo(transformOrTypeTest(fun, tp2))
+ }
+ case _ =>
+ transformIsInstanceOf(qual, erasure(argType))
+ }
+
if (sym eq defn.Any_isInstanceOf)
- transformIsInstanceOf(qual, erasedArg)
+ transformOrTypeTest(qual, tree.args.head.tpe)
else if (sym eq defn.Any_asInstanceOf)
- transformAsInstanceOf(erasedArg)
+ transformAsInstanceOf(erasure(tree.args.head.tpe))
else tree
case _ =>
diff --git a/src/dotty/tools/dotc/transform/VCInlineMethods.scala b/src/dotty/tools/dotc/transform/VCInlineMethods.scala
index 1c2b015a1..ddd414417 100644
--- a/src/dotty/tools/dotc/transform/VCInlineMethods.scala
+++ b/src/dotty/tools/dotc/transform/VCInlineMethods.scala
@@ -90,7 +90,7 @@ class VCInlineMethods extends MiniPhaseTransform with IdentityDenotTransformer {
tree // The rewiring will be handled by a fully-applied parent node
case _ =>
if (isMethodWithExtension(tree.symbol))
- rewire(tree)
+ rewire(tree).ensureConforms(tree.tpe)
else
tree
}
diff --git a/src/dotty/tools/dotc/transform/patmat/Space.scala b/src/dotty/tools/dotc/transform/patmat/Space.scala
new file mode 100644
index 000000000..830d0f938
--- /dev/null
+++ b/src/dotty/tools/dotc/transform/patmat/Space.scala
@@ -0,0 +1,619 @@
+package dotty.tools.dotc
+package transform
+package patmat
+
+import core.Types._
+import core.Contexts._
+import core.Flags._
+import ast.Trees._
+import ast.tpd
+import core.Decorators._
+import core.Symbols._
+import core.StdNames._
+import core.NameOps._
+import core.Constants._
+
+/** Space logic for checking exhaustivity and unreachability of pattern matching
+ *
+ * Space can be thought of as a set of possible values. A type or a pattern
+ * both refer to spaces. The space of a type is the values that inhabit the
+ * type. The space of a pattern is the values that can be covered by the
+ * pattern.
+ *
+ * Space is recursively defined as follows:
+ *
+ * 1. `Empty` is a space
+ * 2. For a type T, `Typ(T)` is a space
+ * 3. A union of spaces `S1 | S2 | ...` is a space
+ * 4. For a case class Kon(x1: T1, x2: T2, .., xn: Tn), if S1, S2, ..., Sn
+ * are spaces, then `Kon(S1, S2, ..., Sn)` is a space.
+ * 5. A constant `Const(value, T)` is a point in space
+ * 6. A stable identifier `Var(sym, T)` is a space
+ *
+ * For the problem of exhaustivity check, its formulation in terms of space is as follows:
+ *
+ * Is the space Typ(T) a subspace of the union of space covered by all the patterns?
+ *
+ * The problem of unreachable patterns can be formulated as follows:
+ *
+ * Is the space covered by a pattern a subspace of the space covered by previous patterns?
+ *
+ * Assumption:
+ * (1) One case class cannot be inherited directly or indirectly by another
+ * case class.
+ * (2) Inheritance of a case class cannot be well handled by the algorithm.
+ *
+ */
+
+
+/** space definition */
+sealed trait Space
+
+/** Empty space */
+case object Empty extends Space
+
+/** Space representing the set of all values of a type
+ *
+ * @param tp: the type this space represents
+ * @param decomposed: does the space result from decomposition? Used for pretty print
+ *
+ */
+case class Typ(tp: Type, decomposed: Boolean) extends Space
+
+/** Space representing a constructor pattern */
+case class Kon(tp: Type, params: List[Space]) extends Space
+
+/** Union of spaces */
+case class Or(spaces: List[Space]) extends Space
+
+/** Point in space */
+sealed trait Point extends Space
+
+/** Point representing variables(stable identifier) in patterns */
+case class Var(sym: Symbol, tp: Type) extends Point
+
+/** Point representing literal constants in patterns */
+case class Const(value: Constant, tp: Type) extends Point
+
+/** abstract space logic */
+trait SpaceLogic {
+ /** Is `tp1` a subtype of `tp2`? */
+ def isSubType(tp1: Type, tp2: Type): Boolean
+
+ /** Is `tp1` the same type as `tp2`? */
+ def isEqualType(tp1: Type, tp2: Type): Boolean
+
+ /** Is the type `tp` decomposable? i.e. all values of the type can be covered
+ * by its decomposed types.
+ *
+ * Abstract sealed class, OrType, Boolean and Java enums can be decomposed.
+ */
+ def canDecompose(tp: Type): Boolean
+
+ /** Return term parameter types of the case class `tp` */
+ def signature(tp: Type): List[Type]
+
+ /** Get components of decomposable types */
+ def decompose(tp: Type): List[Space]
+
+ /** Simplify space using the laws, there's no nested union after simplify */
+ def simplify(space: Space): Space = space match {
+ case Kon(tp, spaces) =>
+ val sp = Kon(tp, spaces.map(simplify _))
+ if (sp.params.contains(Empty)) Empty
+ else sp
+ case Or(spaces) =>
+ val set = spaces.map(simplify _).flatMap {
+ case Or(ss) => ss
+ case s => Seq(s)
+ } filter (_ != Empty)
+
+ if (set.isEmpty) Empty
+ else if (set.size == 1) set.toList(0)
+ else Or(set)
+ case Typ(tp, _) =>
+ if (canDecompose(tp) && decompose(tp).isEmpty) Empty
+ else space
+ case _ => space
+ }
+
+ /** Flatten space to get rid of `Or` for pretty print */
+ def flatten(space: Space): List[Space] = space match {
+ case Kon(tp, spaces) =>
+ val flats = spaces.map(flatten _)
+
+ flats.foldLeft(List[Kon]()) { (acc, flat) =>
+ if (acc.isEmpty) flat.map(s => Kon(tp, Nil :+ s))
+ else for (Kon(tp, ss) <- acc; s <- flat) yield Kon(tp, ss :+ s)
+ }
+ case Or(spaces) =>
+ spaces.flatMap(flatten _)
+ case _ => List(space)
+ }
+
+ /** Is `a` a subspace of `b`? Equivalent to `a - b == Empty`, but faster */
+ def isSubspace(a: Space, b: Space): Boolean = {
+ def tryDecompose1(tp: Type) = canDecompose(tp) && isSubspace(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = canDecompose(tp) && isSubspace(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) => true
+ case (_, Empty) => false
+ case (Or(ss), _) => ss.forall(isSubspace(_, b))
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ isSubType(tp1, tp2) || tryDecompose1(tp1) || tryDecompose2(tp2)
+ case (Typ(tp1, _), Or(ss)) =>
+ ss.exists(isSubspace(a, _)) || tryDecompose1(tp1)
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ isSubType(tp1, tp2) && isSubspace(Kon(tp2, signature(tp2).map(Typ(_, false))), b) ||
+ tryDecompose1(tp1)
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ isSubType(tp1, tp2) ||
+ simplify(a) == Empty ||
+ (isSubType(tp2, tp1) && tryDecompose1(tp1)) ||
+ tryDecompose2(tp2)
+ case (Kon(_, _), Or(_)) =>
+ simplify(minus(a, b)) == Empty
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ isEqualType(tp1, tp2) && ss1.zip(ss2).forall((isSubspace _).tupled)
+ case (Const(v1, _), Const(v2, _)) => v1 == v2
+ case (Const(_, tp1), Typ(tp2, _)) => isSubType(tp1, tp2) || tryDecompose2(tp2)
+ case (Const(_, _), Or(ss)) => ss.exists(isSubspace(a, _))
+ case (Const(_, _), _) => false
+ case (_, Const(_, _)) => false
+ case (Var(x, _), Var(y, _)) => x == y
+ case (Var(_, tp1), Typ(tp2, _)) => isSubType(tp1, tp2) || tryDecompose2(tp2)
+ case (Var(_, _), Or(ss)) => ss.exists(isSubspace(a, _))
+ case (Var(_, _), _) => false
+ case (_, Var(_, _)) => false
+ }
+ }
+
+ /** Intersection of two spaces */
+ def intersect(a: Space, b: Space): Space = {
+ def tryDecompose1(tp: Type) = intersect(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = intersect(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) | (_, Empty) => Empty
+ case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filterConserve(_ ne Empty))
+ case (Or(ss), _) => Or(ss.map(intersect(_, b)).filterConserve(_ ne Empty))
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (isSubType(tp1, tp2)) a // problematic corner case: inheriting a case class
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (isSubType(tp2, tp1)) a // problematic corner case: inheriting a case class
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ if (!isEqualType(tp1, tp2)) Empty
+ else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) Empty
+ else Kon(tp1, ss1.zip(ss2).map((intersect _).tupled))
+ case (Const(v1, _), Const(v2, _)) =>
+ if (v1 == v2) a else Empty
+ case (Const(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Const(_, _), _) => Empty
+ case (Typ(tp1, _), Const(_, tp2)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (_, Const(_, _)) => Empty
+ case (Var(x, _), Var(y, _)) =>
+ if (x == y) a else Empty
+ case (Var(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) a
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else Empty
+ case (Var(_, _), _) => Empty
+ case (Typ(tp1, _), Var(_, tp2)) =>
+ if (isSubType(tp2, tp1)) b
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else Empty
+ case (_, Var(_, _)) => Empty
+ }
+ }
+
+ /** The space of a not covered by b */
+ def minus(a: Space, b: Space): Space = {
+ def tryDecompose1(tp: Type) = minus(Or(decompose(tp)), b)
+ def tryDecompose2(tp: Type) = minus(a, Or(decompose(tp)))
+
+ (a, b) match {
+ case (Empty, _) => Empty
+ case (_, Empty) => a
+ case (Typ(tp1, _), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Typ(tp1, _), Kon(tp2, ss)) =>
+ // corner case: inheriting a case class
+ // rationale: every instance of `tp1` is covered by `tp2(_)`
+ if (isSubType(tp1, tp2)) minus(Kon(tp2, signature(tp2).map(Typ(_, false))), b)
+ else if (canDecompose(tp1)) tryDecompose1(tp1)
+ else a
+ case (_, Or(ss)) =>
+ ss.foldLeft(a)(minus)
+ case (Or(ss), _) =>
+ Or(ss.map(minus(_, b)))
+ case (Kon(tp1, ss), Typ(tp2, _)) =>
+ // uncovered corner case: tp2 :< tp1
+ if (isSubType(tp1, tp2)) Empty
+ else if (simplify(a) == Empty) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Kon(tp1, ss1), Kon(tp2, ss2)) =>
+ if (!isEqualType(tp1, tp2)) a
+ else if (ss1.zip(ss2).exists(p => simplify(intersect(p._1, p._2)) == Empty)) a
+ else if (ss1.zip(ss2).forall((isSubspace _).tupled)) Empty
+ else
+ // `(_, _, _) - (Some, None, _)` becomes `(None, _, _) | (_, Some, _) | (_, _, Empty)`
+ Or(ss1.zip(ss2).map((minus _).tupled).zip(0 to ss2.length - 1).map {
+ case (ri, i) => Kon(tp1, ss1.updated(i, ri))
+ })
+ case (Const(v1, _), Const(v2, _)) =>
+ if (v1 == v2) Empty else a
+ case (Const(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Const(_, _), _) => a
+ case (Typ(tp1, _), Const(_, tp2)) => // Boolean & Java enum
+ if (canDecompose(tp1)) tryDecompose1(tp1)
+ else a
+ case (_, Const(_, _)) => a
+ case (Var(x, _), Var(y, _)) =>
+ if (x == y) Empty else a
+ case (Var(_, tp1), Typ(tp2, _)) =>
+ if (isSubType(tp1, tp2)) Empty
+ else if (canDecompose(tp2)) tryDecompose2(tp2)
+ else a
+ case (Var(_, _), _) => a
+ case (_, Var(_, _)) => a
+ }
+ }
+}
+
+/** Scala implementation of space logic */
+class SpaceEngine(implicit ctx: Context) extends SpaceLogic {
+ import tpd._
+
+ /** Return the space that represents the pattern `pat`
+ *
+ * If roundUp is true, approximate extractors to its type,
+ * otherwise approximate extractors to Empty
+ */
+ def project(pat: Tree, roundUp: Boolean = true)(implicit ctx: Context): Space = pat match {
+ case Literal(c) => Const(c, c.tpe)
+ case _: BackquotedIdent => Var(pat.symbol, pat.tpe)
+ case Ident(_) | Select(_, _) =>
+ pat.tpe.stripAnnots match {
+ case tp: TermRef =>
+ if (pat.symbol.is(Enum))
+ Const(Constant(pat.symbol), tp)
+ else if (tp.underlyingIterator.exists(_.classSymbol.is(Module)))
+ Typ(tp.widenTermRefExpr.stripAnnots, false)
+ else
+ Var(pat.symbol, tp)
+ case tp => Typ(tp, false)
+ }
+ case Alternative(trees) => Or(trees.map(project(_, roundUp)))
+ case Bind(_, pat) => project(pat)
+ case UnApply(_, _, pats) =>
+ if (pat.tpe.classSymbol.is(CaseClass))
+ Kon(pat.tpe.stripAnnots, pats.map(pat => project(pat, roundUp)))
+ else if (roundUp) Typ(pat.tpe.stripAnnots, false)
+ else Empty
+ case Typed(pat @ UnApply(_, _, _), _) => project(pat)
+ case Typed(expr, _) => Typ(expr.tpe.stripAnnots, true)
+ case _ =>
+ Empty
+ }
+
+ /* Erase a type binding according to erasure semantics in pattern matching */
+ def erase(tp: Type): Type = {
+ def doErase(tp: Type): Type = tp match {
+ case tp: HKApply => erase(tp.superType)
+ case tp: RefinedType => erase(tp.parent)
+ case _ => tp
+ }
+
+ tp match {
+ case OrType(tp1, tp2) =>
+ OrType(erase(tp1), erase(tp2))
+ case AndType(tp1, tp2) =>
+ AndType(erase(tp1), erase(tp2))
+ case _ =>
+ val origin = doErase(tp)
+ if (origin =:= defn.ArrayType) tp else origin
+ }
+ }
+
+ /** Is `tp1` a subtype of `tp2`? */
+ def isSubType(tp1: Type, tp2: Type): Boolean = {
+ // check SI-9657 and tests/patmat/gadt.scala
+ erase(tp1) <:< erase(tp2)
+ }
+
+ def isEqualType(tp1: Type, tp2: Type): Boolean = tp1 =:= tp2
+
+ /** Parameter types of the case class type `tp` */
+ def signature(tp: Type): List[Type] = {
+ val ktor = tp.classSymbol.primaryConstructor.info
+
+ val meth = ktor match {
+ case ktor: PolyType =>
+ ktor.instantiate(tp.classSymbol.typeParams.map(_.typeRef)).asSeenFrom(tp, tp.classSymbol)
+ case _ => ktor
+ }
+
+ // refine path-dependent type in params. refer to t9672
+ meth.firstParamTypes.map(_.asSeenFrom(tp, tp.classSymbol))
+ }
+
+ /** Decompose a type into subspaces -- assume the type can be decomposed */
+ def decompose(tp: Type): List[Space] = {
+ val children = tp.classSymbol.annotations.filter(_.symbol == ctx.definitions.ChildAnnot).map { annot =>
+ // refer to definition of Annotation.makeChild
+ annot.tree match {
+ case Apply(TypeApply(_, List(tpTree)), _) => tpTree.symbol
+ }
+ }
+
+ tp match {
+ case OrType(tp1, tp2) => List(Typ(tp1, true), Typ(tp2, true))
+ case _ if tp =:= ctx.definitions.BooleanType =>
+ List(
+ Const(Constant(true), ctx.definitions.BooleanType),
+ Const(Constant(false), ctx.definitions.BooleanType)
+ )
+ case _ if tp.classSymbol.is(Enum) =>
+ children.map(sym => Const(Constant(sym), tp))
+ case _ =>
+ val parts = children.map { sym =>
+ if (sym.is(ModuleClass))
+ sym.asClass.classInfo.selfType
+ else if (sym.info.typeParams.length > 0 || tp.isInstanceOf[TypeRef])
+ refine(tp, sym.typeRef)
+ else
+ sym.typeRef
+ } filter { tpe =>
+ // Child class may not always be subtype of parent:
+ // GADT & path-dependent types
+ tpe <:< expose(tp)
+ }
+
+ parts.map(Typ(_, true))
+ }
+ }
+
+ /** Refine tp2 based on tp1
+ *
+ * E.g. if `tp1` is `Option[Int]`, `tp2` is `Some`, then return
+ * `Some[Int]`.
+ *
+ * If `tp1` is `path1.A`, `tp2` is `path2.B`, and `path1` is subtype of
+ * `path2`, then return `path1.B`.
+ */
+ def refine(tp1: Type, tp2: Type): Type = (tp1, tp2) match {
+ case (tp1: RefinedType, _) => tp1.wrapIfMember(refine(tp1.parent, tp2))
+ case (tp1: HKApply, _) => refine(tp1.superType, tp2)
+ case (TypeRef(ref1: TypeProxy, _), tp2 @ TypeRef(ref2: TypeProxy, name)) =>
+ if (ref1.underlying <:< ref2.underlying) TypeRef(ref1, name) else tp2
+ case _ => tp2
+ }
+
+ /** Abstract sealed types, or-types, Boolean and Java enums can be decomposed */
+ def canDecompose(tp: Type): Boolean = {
+ tp.classSymbol.is(allOf(Abstract, Sealed)) ||
+ tp.classSymbol.is(allOf(Trait, Sealed)) ||
+ tp.isInstanceOf[OrType] ||
+ tp =:= ctx.definitions.BooleanType ||
+ tp.classSymbol.is(Enum)
+ }
+
+ /** Show friendly type name with current scope in mind
+ *
+ * E.g. C.this.B --> B if current owner is C
+ * C.this.x.T --> x.T if current owner is C
+ * X[T] --> X
+ * C --> C if current owner is C !!!
+ *
+ */
+ def showType(tp: Type): String = {
+ val enclosingCls = ctx.owner.enclosingClass.asClass.classInfo.symbolicTypeRef
+
+ def isOmittable(sym: Symbol) =
+ sym.isEffectiveRoot || sym.isAnonymousClass || sym.name.isReplWrapperName ||
+ ctx.definitions.UnqualifiedOwnerTypes.exists(_.symbol == sym) ||
+ sym.showFullName.startsWith("scala.") ||
+ sym == enclosingCls.typeSymbol
+
+ def refinePrefix(tp: Type): String = tp match {
+ case NoPrefix => ""
+ case tp: NamedType if isOmittable(tp.symbol) => ""
+ case tp: ThisType => refinePrefix(tp.tref)
+ case tp: RefinedType => refinePrefix(tp.parent)
+ case tp: NamedType => tp.name.show.stripSuffix("$")
+ }
+
+ def refine(tp: Type): String = tp match {
+ case tp: RefinedType => refine(tp.parent)
+ case tp: ThisType => refine(tp.tref)
+ case tp: NamedType =>
+ val pre = refinePrefix(tp.prefix)
+ if (tp.name == tpnme.higherKinds) pre
+ else if (pre.isEmpty) tp.name.show.stripSuffix("$")
+ else pre + "." + tp.name.show.stripSuffix("$")
+ case _ => tp.show.stripSuffix("$")
+ }
+
+ val text = tp.stripAnnots match {
+ case tp: OrType => showType(tp.tp1) + " | " + showType(tp.tp2)
+ case tp => refine(tp)
+ }
+
+ if (text.isEmpty) enclosingCls.show.stripSuffix("$")
+ else text
+ }
+
+ /** Display spaces */
+ def show(s: Space): String = {
+ def doShow(s: Space, mergeList: Boolean = false): String = s match {
+ case Empty => ""
+ case Const(v, _) => v.show
+ case Var(x, _) => x.show
+ case Typ(tp, decomposed) =>
+ val sym = tp.widen.classSymbol
+
+ if (sym.is(ModuleClass))
+ showType(tp)
+ else if (ctx.definitions.isTupleType(tp))
+ signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (sym.showFullName == "scala.collection.immutable.::")
+ if (mergeList) "_" else "List(_)"
+ else if (tp.classSymbol.is(CaseClass))
+ // use constructor syntax for case class
+ showType(tp) + signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (signature(tp).nonEmpty)
+ tp.classSymbol.name + signature(tp).map(_ => "_").mkString("(", ", ", ")")
+ else if (decomposed) "_: " + showType(tp)
+ else "_"
+ case Kon(tp, params) =>
+ if (ctx.definitions.isTupleType(tp))
+ "(" + params.map(doShow(_)).mkString(", ") + ")"
+ else if (tp.widen.classSymbol.showFullName == "scala.collection.immutable.::")
+ if (mergeList) params.map(doShow(_, mergeList)).mkString(", ")
+ else params.map(doShow(_, true)).filter(_ != "Nil").mkString("List(", ", ", ")")
+ else
+ showType(tp) + params.map(doShow(_)).mkString("(", ", ", ")")
+ case Or(_) =>
+ throw new Exception("incorrect flatten result " + s)
+ }
+
+ flatten(s).map(doShow(_, false)).distinct.mkString(", ")
+ }
+
+ def checkable(tree: Match): Boolean = {
+ def isCheckable(tp: Type): Boolean = tp match {
+ case AnnotatedType(tp, annot) =>
+ (ctx.definitions.UncheckedAnnot != annot.symbol) && isCheckable(tp)
+ case _ =>
+ // Possible to check everything, but be compatible with scalac by default
+ ctx.settings.YcheckAllPatmat.value ||
+ tp.typeSymbol.is(Sealed) ||
+ tp.isInstanceOf[OrType] ||
+ tp.typeSymbol == ctx.definitions.BooleanType.typeSymbol ||
+ tp.typeSymbol.is(Enum) ||
+ canDecompose(tp) ||
+ (defn.isTupleType(tp) && tp.dealias.argInfos.exists(isCheckable(_)))
+ }
+
+ val Match(sel, cases) = tree
+ isCheckable(sel.tpe.widen.deAnonymize.dealiasKeepAnnots)
+ }
+
+
+ /** Expose refined type to eliminate reference to type variables
+ *
+ * A = B M { type T = A } ~~> M { type T = B }
+ *
+ * A <: X :> Y M { type T = A } ~~> M { type T <: X :> Y }
+ *
+ * A <: X :> Y B <: U :> V M { type T <: A :> B } ~~> M { type T <: X :> V }
+ *
+ * A = X B = Y M { type T <: A :> B } ~~> M { type T <: X :> Y }
+ */
+ def expose(tp: Type): Type = {
+ def follow(tp: Type, up: Boolean): Type = tp match {
+ case tp: TypeProxy =>
+ tp.underlying match {
+ case TypeBounds(lo, hi) =>
+ follow(if (up) hi else lo, up)
+ case _ =>
+ tp
+ }
+ case OrType(tp1, tp2) =>
+ OrType(follow(tp1, up), follow(tp2, up))
+ case AndType(tp1, tp2) =>
+ AndType(follow(tp1, up), follow(tp2, up))
+ }
+
+ tp match {
+ case tp: RefinedType =>
+ tp.refinedInfo match {
+ case tpa : TypeAlias =>
+ val hi = follow(tpa.alias, true)
+ val lo = follow(tpa.alias, false)
+ val refined = if (hi =:= lo)
+ tpa.derivedTypeAlias(hi)
+ else
+ tpa.derivedTypeBounds(lo, hi)
+
+ tp.derivedRefinedType(
+ expose(tp.parent),
+ tp.refinedName,
+ refined
+ )
+ case tpb @ TypeBounds(lo, hi) =>
+ tp.derivedRefinedType(
+ expose(tp.parent),
+ tp.refinedName,
+ tpb.derivedTypeBounds(follow(lo, false), follow(hi, true))
+ )
+ }
+ case _ => tp
+ }
+ }
+
+ def checkExhaustivity(_match: Match): Unit = {
+ val Match(sel, cases) = _match
+ val selTyp = sel.tpe.widen.deAnonymize.dealias
+
+
+ val patternSpace = cases.map(x => project(x.pat)).reduce((a, b) => Or(List(a, b)))
+ val uncovered = simplify(minus(Typ(selTyp, true), patternSpace))
+
+ if (uncovered != Empty) {
+ ctx.warning(
+ "match may not be exhaustive.\n" +
+ s"It would fail on the following input: " +
+ show(uncovered), _match.pos
+ )
+ }
+ }
+
+ def checkRedundancy(_match: Match): Unit = {
+ val Match(sel, cases) = _match
+ // ignore selector type for now
+ // val selTyp = sel.tpe.widen.deAnonymize.dealias
+
+ // starts from the second, the first can't be redundant
+ (1 until cases.length).foreach { i =>
+ // in redundancy check, take guard as false, take extractor as match
+ // nothing in order to soundly approximate
+ val prevs = cases.take(i).map { x =>
+ if (x.guard.isEmpty) project(x.pat, false)
+ else Empty
+ }.reduce((a, b) => Or(List(a, b)))
+
+ val curr = project(cases(i).pat)
+
+ if (isSubspace(curr, prevs)) {
+ ctx.warning("unreachable code", cases(i).body.pos)
+ }
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/typer/Applications.scala b/src/dotty/tools/dotc/typer/Applications.scala
index f3903e539..56595a637 100644
--- a/src/dotty/tools/dotc/typer/Applications.scala
+++ b/src/dotty/tools/dotc/typer/Applications.scala
@@ -17,20 +17,22 @@ import Types._
import Decorators._
import ErrorReporting._
import Trees._
+import config.Config
import Names._
import StdNames._
import ProtoTypes._
import EtaExpansion._
import Inferencing._
import collection.mutable
-import config.Printers._
+import config.Printers.{typr, unapp, overload}
import TypeApplications._
import language.implicitConversions
+import reporting.diagnostic.Message
object Applications {
import tpd._
- def extractorMemberType(tp: Type, name: Name, errorPos: Position = NoPosition)(implicit ctx:Context) = {
+ def extractorMemberType(tp: Type, name: Name, errorPos: Position = NoPosition)(implicit ctx: Context) = {
val ref = tp.member(name).suchThat(_.info.isParameterless)
if (ref.isOverloaded)
errorType(i"Overloaded reference to $ref is not allowed in extractor", errorPos)
@@ -40,12 +42,12 @@ object Applications {
ref.info.widenExpr.dealias
}
- def productSelectorTypes(tp: Type, errorPos: Position = NoPosition)(implicit ctx:Context): List[Type] = {
+ def productSelectorTypes(tp: Type, errorPos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
val sels = for (n <- Iterator.from(0)) yield extractorMemberType(tp, nme.selectorName(n), errorPos)
sels.takeWhile(_.exists).toList
}
- def productSelectors(tp: Type)(implicit ctx:Context): List[Symbol] = {
+ def productSelectors(tp: Type)(implicit ctx: Context): List[Symbol] = {
val sels = for (n <- Iterator.from(0)) yield tp.member(nme.selectorName(n)).symbol
sels.takeWhile(_.exists).toList
}
@@ -57,7 +59,7 @@ object Applications {
else tp :: Nil
} else tp :: Nil
- def unapplyArgs(unapplyResult: Type, unapplyFn:Tree, args:List[untpd.Tree], pos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
+ def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: Position = NoPosition)(implicit ctx: Context): List[Type] = {
def seqSelector = defn.RepeatedParamType.appliedTo(unapplyResult.elemType :: Nil)
def getTp = extractorMemberType(unapplyResult, nme.get, pos)
@@ -66,7 +68,7 @@ object Applications {
if (extractorMemberType(unapplyResult, nme.isDefined, pos) isRef defn.BooleanClass) {
if (getTp.exists)
if (unapplyFn.symbol.name == nme.unapplySeq) {
- val seqArg = boundsToHi(getTp.firstBaseArgInfo(defn.SeqClass))
+ val seqArg = boundsToHi(getTp.elemType)
if (seqArg.exists) return args map Function.const(seqArg)
}
else return getUnapplySelectors(getTp, args, pos)
@@ -86,11 +88,12 @@ object Applications {
import Applications._
-trait Applications extends Compatibility { self: Typer =>
+trait Applications extends Compatibility { self: Typer with Dynamic =>
import Applications._
import tpd.{ cpy => _, _ }
import untpd.cpy
+ import Dynamic.isDynamicMethod
/** @tparam Arg the type of arguments, could be tpd.Tree, untpd.Tree, or Type
* @param methRef the reference to the method of the application
@@ -130,10 +133,10 @@ trait Applications extends Compatibility { self: Typer =>
protected def harmonizeArgs(args: List[TypedArg]): List[TypedArg]
/** Signal failure with given message at position of given argument */
- protected def fail(msg: => String, arg: Arg): Unit
+ protected def fail(msg: => Message, arg: Arg): Unit
/** Signal failure with given message at position of the application itself */
- protected def fail(msg: => String): Unit
+ protected def fail(msg: => Message): Unit
protected def appPos: Position
@@ -184,7 +187,7 @@ trait Applications extends Compatibility { self: Typer =>
// it might be healed by an implicit conversion
assert(ctx.typerState.constraint eq savedConstraint)
else
- fail(err.typeMismatchStr(methType.resultType, resultType))
+ fail(err.typeMismatchMsg(methType.resultType, resultType))
}
// match all arguments with corresponding formal parameters
matchArgs(orderedArgs, methType.paramTypes, 0)
@@ -386,9 +389,9 @@ trait Applications extends Compatibility { self: Typer =>
def addArg(arg: TypedArg, formal: Type) =
ok = ok & isCompatible(argType(arg, formal), formal)
def makeVarArg(n: Int, elemFormal: Type) = {}
- def fail(msg: => String, arg: Arg) =
+ def fail(msg: => Message, arg: Arg) =
ok = false
- def fail(msg: => String) =
+ def fail(msg: => Message) =
ok = false
def appPos = NoPosition
lazy val normalizedFun = ref(methRef)
@@ -453,12 +456,12 @@ trait Applications extends Compatibility { self: Typer =>
override def appPos = app.pos
- def fail(msg: => String, arg: Trees.Tree[T]) = {
+ def fail(msg: => Message, arg: Trees.Tree[T]) = {
ctx.error(msg, arg.pos)
ok = false
}
- def fail(msg: => String) = {
+ def fail(msg: => Message) = {
ctx.error(msg, app.pos)
ok = false
}
@@ -531,43 +534,79 @@ trait Applications extends Compatibility { self: Typer =>
def treeToArg(arg: Tree): Tree = arg
}
+ /** If `app` is a `this(...)` constructor call, the this-call argument context,
+ * otherwise the current context.
+ */
+ def argCtx(app: untpd.Tree)(implicit ctx: Context): Context =
+ if (untpd.isSelfConstrCall(app)) ctx.thisCallArgContext else ctx
+
def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = {
def realApply(implicit ctx: Context): Tree = track("realApply") {
- def argCtx(implicit ctx: Context) =
- if (untpd.isSelfConstrCall(tree)) ctx.thisCallArgContext else ctx
- var proto = new FunProto(tree.args, IgnoredProto(pt), this)(argCtx)
- val fun1 = typedExpr(tree.fun, proto)
+ val originalProto = new FunProto(tree.args, IgnoredProto(pt), this)(argCtx(tree))
+ val fun1 = typedExpr(tree.fun, originalProto)
- // Warning: The following line is dirty and fragile. We record that auto-tupling was demanded as
- // a side effect in adapt. If it was, we assume the tupled proto-type in the rest of the application.
+ // Warning: The following lines are dirty and fragile. We record that auto-tupling was demanded as
+ // a side effect in adapt. If it was, we assume the tupled proto-type in the rest of the application,
+ // until, possibly, we have to fall back to insert an implicit on the qualifier.
// This crucially relies on he fact that `proto` is used only in a single call of `adapt`,
// otherwise we would get possible cross-talk between different `adapt` calls using the same
// prototype. A cleaner alternative would be to return a modified prototype from `adapt` together with
// a modified tree but this would be more convoluted and less efficient.
- if (proto.isTupled) proto = proto.tupled
-
- fun1.tpe match {
- case ErrorType => tree.withType(ErrorType)
- case _ => methPart(fun1).tpe match {
+ val proto = if (originalProto.isTupled) originalProto.tupled else originalProto
+
+ // If some of the application's arguments are function literals without explicitly declared
+ // parameter types, relate the normalized result type of the application with the
+ // expected type through `constrainResult`. This can add more constraints which
+ // help sharpen the inferred parameter types for the argument function literal(s).
+ // This tweak is needed to make i1378 compile.
+ if (tree.args.exists(untpd.isFunctionWithUnknownParamType(_)))
+ if (!constrainResult(fun1.tpe.widen, proto.derivedFunProto(resultType = pt)))
+ typr.println(i"result failure for $tree with type ${fun1.tpe.widen}, expected = $pt")
+
+ /** Type application where arguments come from prototype, and no implicits are inserted */
+ def simpleApply(fun1: Tree, proto: FunProto)(implicit ctx: Context): Tree =
+ methPart(fun1).tpe match {
case funRef: TermRef =>
- tryEither { implicit ctx =>
- val app =
- if (proto.argsAreTyped) new ApplyToTyped(tree, fun1, funRef, proto.typedArgs, pt)
- else new ApplyToUntyped(tree, fun1, funRef, proto, pt)(argCtx)
- val result = app.result
- convertNewArray(ConstFold(result))
- } { (failedVal, failedState) =>
- val fun2 = tryInsertImplicitOnQualifier(fun1, proto)
- if (fun1 eq fun2) {
- failedState.commit()
- failedVal
- } else typedApply(
- cpy.Apply(tree)(untpd.TypedSplice(fun2), proto.typedArgs map untpd.TypedSplice), pt)
- }
+ val app =
+ if (proto.allArgTypesAreCurrent())
+ new ApplyToTyped(tree, fun1, funRef, proto.typedArgs, pt)
+ else
+ new ApplyToUntyped(tree, fun1, funRef, proto, pt)(argCtx(tree))
+ convertNewGenericArray(ConstFold(app.result))
case _ =>
handleUnexpectedFunType(tree, fun1)
}
+
+ /** Try same application with an implicit inserted around the qualifier of the function
+ * part. Return an optional value to indicate success.
+ */
+ def tryWithImplicitOnQualifier(fun1: Tree, proto: FunProto)(implicit ctx: Context): Option[Tree] =
+ tryInsertImplicitOnQualifier(fun1, proto) flatMap { fun2 =>
+ tryEither {
+ implicit ctx => Some(simpleApply(fun2, proto)): Option[Tree]
+ } {
+ (_, _) => None
+ }
+ }
+
+ fun1.tpe match {
+ case ErrorType => tree.withType(ErrorType)
+ case TryDynamicCallType => typedDynamicApply(tree, pt)
+ case _ =>
+ tryEither {
+ implicit ctx => simpleApply(fun1, proto)
+ } {
+ (failedVal, failedState) =>
+ def fail = { failedState.commit(); failedVal }
+ // Try once with original prototype and once (if different) with tupled one.
+ // The reason we need to try both is that the decision whether to use tupled
+ // or not was already taken but might have to be revised when an implicit
+ // is inserted on the qualifier.
+ tryWithImplicitOnQualifier(fun1, originalProto).getOrElse(
+ if (proto eq originalProto) fail
+ else tryWithImplicitOnQualifier(fun1, proto).getOrElse(fail))
+ }
}
}
@@ -579,7 +618,7 @@ trait Applications extends Compatibility { self: Typer =>
*
* { val xs = es; e' = e' + args }
*/
- def typedOpAssign: Tree = track("typedOpAssign") {
+ def typedOpAssign: Tree = track("typedOpAssign") {
val Apply(Select(lhs, name), rhss) = tree
val lhs1 = typedExpr(lhs)
val liftedDefs = new mutable.ListBuffer[Tree]
@@ -599,12 +638,22 @@ trait Applications extends Compatibility { self: Typer =>
failedVal
}
}
- else realApply
+ else {
+ val app = realApply
+ app match {
+ case Apply(fn @ Select(left, _), right :: Nil) if fn.hasType =>
+ val op = fn.symbol
+ if (op == defn.Any_== || op == defn.Any_!=)
+ checkCanEqual(left.tpe.widen, right.tpe.widen, app.pos)
+ case _ =>
+ }
+ app
+ }
}
/** Overridden in ReTyper to handle primitive operations that can be generated after erasure */
protected def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(implicit ctx: Context): Tree =
- throw new Error(s"unexpected type.\n fun = $fun,\n methPart(fun) = ${methPart(fun)},\n methPart(fun).tpe = ${methPart(fun).tpe},\n tpe = ${fun.tpe}")
+ throw new Error(i"unexpected type.\n fun = $fun,\n methPart(fun) = ${methPart(fun)},\n methPart(fun).tpe = ${methPart(fun).tpe},\n tpe = ${fun.tpe}")
def typedNamedArgs(args: List[untpd.Tree])(implicit ctx: Context) =
for (arg @ NamedArg(id, argtpt) <- args) yield {
@@ -614,29 +663,41 @@ trait Applications extends Compatibility { self: Typer =>
def typedTypeApply(tree: untpd.TypeApply, pt: Type)(implicit ctx: Context): Tree = track("typedTypeApply") {
val isNamed = hasNamedArg(tree.args)
- var typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_))
+ val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_))
val typedFn = typedExpr(tree.fun, PolyProto(typedArgs.tpes, pt))
typedFn.tpe.widen match {
case pt: PolyType =>
if (typedArgs.length <= pt.paramBounds.length && !isNamed)
- typedArgs = typedArgs.zipWithConserve(pt.paramBounds)(adaptTypeArg)
if (typedFn.symbol == defn.Predef_classOf && typedArgs.nonEmpty) {
val arg = typedArgs.head
checkClassType(arg.tpe, arg.pos, traitReq = false, stablePrefixReq = false)
}
case _ =>
}
- assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)
+ def tryDynamicTypeApply(): Tree = typedFn match {
+ case typedFn: Select if !pt.isInstanceOf[FunProto] => typedDynamicSelect(typedFn, typedArgs, pt)
+ case _ => tree.withType(TryDynamicCallType)
+ }
+ if (typedFn.tpe eq TryDynamicCallType) tryDynamicTypeApply()
+ else assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs)
}
- def adaptTypeArg(tree: tpd.Tree, bound: Type)(implicit ctx: Context): tpd.Tree =
- tree.withType(tree.tpe.etaExpandIfHK(bound))
-
- /** Rewrite `new Array[T](....)` trees to calls of newXYZArray methods. */
- def convertNewArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match {
- case Apply(TypeApply(tycon, targ :: Nil), args) if tycon.symbol == defn.ArrayConstructor =>
+ /** Rewrite `new Array[T](....)` if T is an unbounded generic to calls to newGenericArray.
+ * It is performed during typer as creation of generic arrays needs a classTag.
+ * we rely on implicit search to find one.
+ */
+ def convertNewGenericArray(tree: tpd.Tree)(implicit ctx: Context): tpd.Tree = tree match {
+ case Apply(TypeApply(tycon, targs@(targ :: Nil)), args) if tycon.symbol == defn.ArrayConstructor =>
fullyDefinedType(tree.tpe, "array", tree.pos)
- tpd.cpy.Apply(tree)(newArray(targ, tree.pos), args)
+
+ def newGenericArrayCall =
+ ref(defn.DottyArraysModule)
+ .select(defn.newGenericArrayMethod).withPos(tree.pos)
+ .appliedToTypeTrees(targs).appliedToArgs(args)
+
+ if (TypeErasure.isUnboundedGeneric(targ.tpe))
+ newGenericArrayCall
+ else tree
case _ =>
tree
}
@@ -684,11 +745,11 @@ trait Applications extends Compatibility { self: Typer =>
// try first for non-overloaded, then for overloaded ocurrences
def tryWithName(name: TermName)(fallBack: Tree => Tree)(implicit ctx: Context): Tree =
tryEither {
- implicit ctx => typedExpr(untpd.Select(qual, name), genericProto)
+ implicit ctx => typedExpr(untpd.Select(qual, name), specificProto)
} {
(sel, _) =>
tryEither {
- implicit ctx => typedExpr(untpd.Select(qual, name), specificProto)
+ implicit ctx => typedExpr(untpd.Select(qual, name), genericProto)
} {
(_, _) => fallBack(sel)
}
@@ -710,34 +771,36 @@ trait Applications extends Compatibility { self: Typer =>
def fromScala2x = unapplyFn.symbol.exists && (unapplyFn.symbol.owner is Scala2x)
- /** Can `subtp` be made to be a subtype of `tp`, possibly by dropping some
- * refinements in `tp`?
+ /** Is `subtp` a subtype of `tp` or of some generalization of `tp`?
+ * The generalizations of a type T are the smallest set G such that
+ *
+ * - T is in G
+ * - If a typeref R in G represents a class or trait, R's superclass is in G.
+ * - If a type proxy P is not a reference to a class, P's supertype is in G
*/
def isSubTypeOfParent(subtp: Type, tp: Type)(implicit ctx: Context): Boolean =
if (subtp <:< tp) true
else tp match {
- case RefinedType(parent, _) => isSubTypeOfParent(subtp, parent)
+ case tp: TypeRef if tp.symbol.isClass => isSubTypeOfParent(subtp, tp.firstParent)
+ case tp: TypeProxy => isSubTypeOfParent(subtp, tp.superType)
case _ => false
}
unapplyFn.tpe.widen match {
- case mt: MethodType if mt.paramTypes.length == 1 && !mt.isDependent =>
- val m = mt
+ case mt: MethodType if mt.paramTypes.length == 1 =>
val unapplyArgType = mt.paramTypes.head
unapp.println(i"unapp arg tpe = $unapplyArgType, pt = $selType")
- def wpt = widenForMatchSelector(selType) // needed?
val ownType =
if (selType <:< unapplyArgType) {
- //fullyDefinedType(unapplyArgType, "extractor argument", tree.pos)
unapp.println(i"case 1 $unapplyArgType ${ctx.typerState.constraint}")
selType
- } else if (isSubTypeOfParent(unapplyArgType, wpt)(ctx.addMode(Mode.GADTflexible))) {
+ } else if (isSubTypeOfParent(unapplyArgType, selType)(ctx.addMode(Mode.GADTflexible))) {
maximizeType(unapplyArgType) match {
case Some(tvar) =>
def msg =
- d"""There is no best instantiation of pattern type $unapplyArgType
- |that makes it a subtype of selector type $selType.
- |Non-variant type variable ${tvar.origin} cannot be uniquely instantiated.""".stripMargin
+ ex"""There is no best instantiation of pattern type $unapplyArgType
+ |that makes it a subtype of selector type $selType.
+ |Non-variant type variable ${tvar.origin} cannot be uniquely instantiated."""
if (fromScala2x) {
// We can't issue an error here, because in Scala 2, ::[B] is invariant
// whereas List[+T] is covariant. According to the strict rule, a pattern
@@ -757,13 +820,13 @@ trait Applications extends Compatibility { self: Typer =>
unapplyArgType
} else {
unapp.println("Neither sub nor super")
- unapp.println(TypeComparer.explained(implicit ctx => unapplyArgType <:< wpt))
+ unapp.println(TypeComparer.explained(implicit ctx => unapplyArgType <:< selType))
errorType(
- d"Pattern type $unapplyArgType is neither a subtype nor a supertype of selector type $wpt",
+ ex"Pattern type $unapplyArgType is neither a subtype nor a supertype of selector type $selType",
tree.pos)
}
- val dummyArg = dummyTreeOfType(unapplyArgType)
+ val dummyArg = dummyTreeOfType(ownType)
val unapplyApp = typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil)))
val unapplyImplicits = unapplyApp match {
case Apply(Apply(unapply, `dummyArg` :: Nil), args2) => assert(args2.nonEmpty); args2
@@ -780,7 +843,7 @@ trait Applications extends Compatibility { self: Typer =>
case _ => args
}
if (argTypes.length != bunchedArgs.length) {
- ctx.error(d"wrong number of argument patterns for $qual; expected: ($argTypes%, %)", tree.pos)
+ ctx.error(em"wrong number of argument patterns for $qual; expected: ($argTypes%, %)", tree.pos)
argTypes = argTypes.take(args.length) ++
List.fill(argTypes.length - args.length)(WildcardType)
}
@@ -799,7 +862,7 @@ trait Applications extends Compatibility { self: Typer =>
/** A typed unapply hook, can be overridden by re any-typers between frontend
* and pattern matcher.
*/
- def typedUnApply(tree: untpd.UnApply, selType: Type)(implicit ctx: Context) =
+ def typedUnApply(tree: untpd.UnApply, selType: Type)(implicit ctx: Context): UnApply =
throw new UnsupportedOperationException("cannot type check an UnApply node")
/** Is given method reference applicable to type arguments `targs` and argument trees `args`?
@@ -901,17 +964,60 @@ trait Applications extends Compatibility { self: Typer =>
{
implicit val ctx: Context = nestedCtx
- isCompatible(tp1, constrained(tp2).resultType)
+ isAsSpecificValueType(tp1, constrained(tp2).resultType)
}
case _ => // (3b)
- isCompatible(tp1, tp2)
+ isAsSpecificValueType(tp1, tp2)
}
}}
+ /** Test whether value type `tp1` is as specific as value type `tp2`.
+ * Let's abbreviate this to `tp1 <:s tp2`.
+ * Previously, `<:s` was the same as `<:`. This behavior is still
+ * available under mode `Mode.OldOverloadingResolution`. The new behavior
+ * is different, however. Here, `T <:s U` iff
+ *
+ * flip(T) <: flip(U)
+ *
+ * where `flip` changes top-level contravariant type aliases to covariant ones.
+ * Intuitively `<:s` means subtyping `<:`, except that all top-level arguments
+ * to contravariant parameters are compared as if they were covariant. E.g. given class
+ *
+ * class Cmp[-X]
+ *
+ * `Cmp[T] <:s Cmp[U]` if `T <: U`. On the other hand, nested occurrences
+ * of parameters are not affected.
+ * So `T <: U` would imply `List[Cmp[U]] <:s List[Cmp[T]]`, as usual.
+ *
+ * This relation might seem strange, but it models closely what happens for methods.
+ * Indeed, if we integrate the existing rules for methods into `<:s` we have now that
+ *
+ * (T)R <:s (U)R
+ *
+ * iff
+ *
+ * T => R <:s U => R
+ */
+ def isAsSpecificValueType(tp1: Type, tp2: Type)(implicit ctx: Context) =
+ if (ctx.mode.is(Mode.OldOverloadingResolution))
+ isCompatible(tp1, tp2)
+ else {
+ val flip = new TypeMap {
+ def apply(t: Type) = t match {
+ case t: TypeAlias if variance > 0 && t.variance < 0 => t.derivedTypeAlias(t.alias, 1)
+ case t: TypeBounds => t
+ case _ => mapOver(t)
+ }
+ }
+ isCompatible(flip(tp1), flip(tp2))
+ }
+
/** Drop any implicit parameter section */
def stripImplicit(tp: Type): Type = tp match {
case mt: ImplicitMethodType if !mt.isDependent =>
- mt.resultType // todo: make sure implicit method types are not dependent
+ mt.resultType
+ // todo: make sure implicit method types are not dependent?
+ // but check test case in /tests/pos/depmet_implicit_chaining_zw.scala
case pt: PolyType =>
pt.derivedPolyType(pt.paramNames, pt.paramBounds, stripImplicit(pt.resultType))
case _ =>
@@ -975,31 +1081,7 @@ trait Applications extends Compatibility { self: Typer =>
* to form the method type.
* todo: use techniques like for implicits to pick candidates quickly?
*/
- def resolveOverloaded(alts: List[TermRef], pt: Type, targs: List[Type] = Nil)(implicit ctx: Context): List[TermRef] = track("resolveOverloaded") {
-
- def isDetermined(alts: List[TermRef]) = alts.isEmpty || alts.tail.isEmpty
-
- /** The shape of given tree as a type; cannot handle named arguments. */
- def typeShape(tree: untpd.Tree): Type = tree match {
- case untpd.Function(args, body) =>
- defn.FunctionOf(args map Function.const(defn.AnyType), typeShape(body))
- case _ =>
- defn.NothingType
- }
-
- /** The shape of given tree as a type; is more expensive than
- * typeShape but can can handle named arguments.
- */
- def treeShape(tree: untpd.Tree): Tree = tree match {
- case NamedArg(name, arg) =>
- val argShape = treeShape(arg)
- cpy.NamedArg(tree)(name, argShape).withType(argShape.tpe)
- case _ =>
- dummyTreeOfType(typeShape(tree))
- }
-
- def narrowByTypes(alts: List[TermRef], argTypes: List[Type], resultType: Type): List[TermRef] =
- alts filter (isApplicable(_, argTypes, resultType))
+ def resolveOverloaded(alts: List[TermRef], pt: Type)(implicit ctx: Context): List[TermRef] = track("resolveOverloaded") {
/** Is `alt` a method or polytype whose result type after the first value parameter
* section conforms to the expected type `resultType`? If `resultType`
@@ -1028,23 +1110,63 @@ trait Applications extends Compatibility { self: Typer =>
* probability of pruning the search. result type comparisons are neither cheap nor
* do they prune much, on average.
*/
- def adaptByResult(alts: List[TermRef], chosen: TermRef) = {
- def nestedCtx = ctx.fresh.setExploreTyperState
- pt match {
- case pt: FunProto if !resultConforms(chosen, pt.resultType)(nestedCtx) =>
- alts.filter(alt =>
- (alt ne chosen) && resultConforms(alt, pt.resultType)(nestedCtx)) match {
- case Nil => chosen
- case alt2 :: Nil => alt2
- case alts2 =>
- resolveOverloaded(alts2, pt) match {
- case alt2 :: Nil => alt2
- case _ => chosen
- }
- }
- case _ => chosen
- }
+ def adaptByResult(chosen: TermRef) = {
+ def nestedCtx = ctx.fresh.setExploreTyperState
+ pt match {
+ case pt: FunProto if !resultConforms(chosen, pt.resultType)(nestedCtx) =>
+ alts.filter(alt =>
+ (alt ne chosen) && resultConforms(alt, pt.resultType)(nestedCtx)) match {
+ case Nil => chosen
+ case alt2 :: Nil => alt2
+ case alts2 =>
+ resolveOverloaded(alts2, pt) match {
+ case alt2 :: Nil => alt2
+ case _ => chosen
+ }
+ }
+ case _ => chosen
}
+ }
+
+ var found = resolveOverloaded(alts, pt, Nil)(ctx.retractMode(Mode.ImplicitsEnabled))
+ if (found.isEmpty && ctx.mode.is(Mode.ImplicitsEnabled))
+ found = resolveOverloaded(alts, pt, Nil)
+ found match {
+ case alt :: Nil => adaptByResult(alt) :: Nil
+ case _ => found
+ }
+ }
+
+ /** This private version of `resolveOverloaded` does the bulk of the work of
+ * overloading resolution, but does not do result adaptation. It might be
+ * called twice from the public `resolveOverloaded` method, once with
+ * implicits enabled, and once without.
+ */
+ private def resolveOverloaded(alts: List[TermRef], pt: Type, targs: List[Type])(implicit ctx: Context): List[TermRef] = track("resolveOverloaded") {
+
+ def isDetermined(alts: List[TermRef]) = alts.isEmpty || alts.tail.isEmpty
+
+ /** The shape of given tree as a type; cannot handle named arguments. */
+ def typeShape(tree: untpd.Tree): Type = tree match {
+ case untpd.Function(args, body) =>
+ defn.FunctionOf(args map Function.const(defn.AnyType), typeShape(body))
+ case _ =>
+ defn.NothingType
+ }
+
+ /** The shape of given tree as a type; is more expensive than
+ * typeShape but can can handle named arguments.
+ */
+ def treeShape(tree: untpd.Tree): Tree = tree match {
+ case NamedArg(name, arg) =>
+ val argShape = treeShape(arg)
+ cpy.NamedArg(tree)(name, argShape).withType(argShape.tpe)
+ case _ =>
+ dummyTreeOfType(typeShape(tree))
+ }
+
+ def narrowByTypes(alts: List[TermRef], argTypes: List[Type], resultType: Type): List[TermRef] =
+ alts filter (isApplicable(_, argTypes, resultType))
val candidates = pt match {
case pt @ FunProto(args, resultType, _) =>
@@ -1079,11 +1201,17 @@ trait Applications extends Compatibility { self: Typer =>
alts
}
- def narrowByTrees(alts: List[TermRef], args: List[Tree], resultType: Type): List[TermRef] =
- alts filter ( alt =>
- if (!ctx.isAfterTyper) isApplicable(alt, targs, args, resultType)
- else isDirectlyApplicable(alt, targs, args, resultType)
+ def narrowByTrees(alts: List[TermRef], args: List[Tree], resultType: Type): List[TermRef] = {
+ val alts2 = alts.filter(alt =>
+ isDirectlyApplicable(alt, targs, args, resultType)
)
+ if (alts2.isEmpty && !ctx.isAfterTyper)
+ alts.filter(alt =>
+ isApplicable(alt, targs, args, resultType)
+ )
+ else
+ alts2
+ }
val alts1 = narrowBySize(alts)
//ctx.log(i"narrowed by size: ${alts1.map(_.symbol.showDcl)}%, %")
@@ -1098,9 +1226,10 @@ trait Applications extends Compatibility { self: Typer =>
}
}
- case pt @ PolyProto(targs, pt1) =>
+ case pt @ PolyProto(targs1, pt1) =>
+ assert(targs.isEmpty)
val alts1 = alts filter pt.isMatchedBy
- resolveOverloaded(alts1, pt1, targs)
+ resolveOverloaded(alts1, pt1, targs1)
case defn.FunctionOf(args, resultType) =>
narrowByTypes(alts, args, resultType)
@@ -1108,23 +1237,16 @@ trait Applications extends Compatibility { self: Typer =>
case pt =>
alts filter (normalizedCompatible(_, pt))
}
- narrowMostSpecific(candidates) match {
- case Nil => Nil
- case alt :: Nil =>
- adaptByResult(alts, alt) :: Nil
- // why `alts` and not `candidates`? pos/array-overload.scala gives a test case.
- // Here, only the Int-apply is a candidate, but it is not compatible with the result
- // type. Picking the Byte-apply as the only result-compatible solution then forces
- // the arguments (which are constants) to be adapted to Byte. If we had picked
- // `candidates` instead, no solution would have been found.
- case alts =>
- val noDefaults = alts.filter(!_.symbol.hasDefaultParams)
- if (noDefaults.length == 1) noDefaults // return unique alternative without default parameters if it exists
- else {
- val deepPt = pt.deepenProto
- if (deepPt ne pt) resolveOverloaded(alts, deepPt, targs)
- else alts
- }
+ val found = narrowMostSpecific(candidates)
+ if (found.length <= 1) found
+ else {
+ val noDefaults = alts.filter(!_.symbol.hasDefaultParams)
+ if (noDefaults.length == 1) noDefaults // return unique alternative without default parameters if it exists
+ else {
+ val deepPt = pt.deepenProto
+ if (deepPt ne pt) resolveOverloaded(alts, deepPt, targs)
+ else alts
+ }
}
}
@@ -1227,11 +1349,3 @@ trait Applications extends Compatibility { self: Typer =>
harmonizeWith(tpes)(identity, (tp, pt) => pt)
}
-/*
- def typedApply(app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Tree], resultType: Type)(implicit ctx: Context): Tree = track("typedApply") {
- new ApplyToTyped(app, fun, methRef, args, resultType).result
- }
-
- def typedApply(fun: Tree, methRef: TermRef, args: List[Tree], resultType: Type)(implicit ctx: Context): Tree =
- typedApply(untpd.Apply(untpd.TypedSplice(fun), args), fun, methRef, args, resultType)
-*/
diff --git a/src/dotty/tools/dotc/typer/Checking.scala b/src/dotty/tools/dotc/typer/Checking.scala
index 9b1f756b7..7899174f5 100644
--- a/src/dotty/tools/dotc/typer/Checking.scala
+++ b/src/dotty/tools/dotc/typer/Checking.scala
@@ -25,8 +25,8 @@ import util.common._
import transform.SymUtils._
import Decorators._
import Uniques._
-import ErrorReporting.{err, errorType, DiagnosticString}
-import config.Printers._
+import ErrorReporting.{err, errorType}
+import config.Printers.typr
import collection.mutable
import SymDenotations.NoCompleter
@@ -34,13 +34,19 @@ object Checking {
import tpd._
/** A general checkBounds method that can be used for TypeApply nodes as
- * well as for AppliedTypeTree nodes.
+ * well as for AppliedTypeTree nodes. Also checks that type arguments to
+ * *-type parameters are fully applied.
*/
- def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context) =
+ def checkBounds(args: List[tpd.Tree], boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type)(implicit ctx: Context): Unit = {
+ (args, boundss).zipped.foreach { (arg, bound) =>
+ if (!bound.isHK && arg.tpe.isHK)
+ ctx.error(ex"missing type parameter(s) for $arg", arg.pos)
+ }
for ((arg, which, bound) <- ctx.boundsViolations(args, boundss, instantiate))
ctx.error(
- d"Type argument ${arg.tpe} does not conform to $which bound $bound ${err.whyNoMatchStr(arg.tpe, bound)}",
- arg.pos)
+ ex"Type argument ${arg.tpe} does not conform to $which bound $bound ${err.whyNoMatchStr(arg.tpe, bound)}",
+ arg.pos.focus)
+ }
/** Check that type arguments `args` conform to corresponding bounds in `poly`
* Note: This does not check the bounds of AppliedTypeTrees. These
@@ -49,6 +55,24 @@ object Checking {
def checkBounds(args: List[tpd.Tree], poly: PolyType)(implicit ctx: Context): Unit =
checkBounds(args, poly.paramBounds, _.substParams(poly, _))
+ /** If type is a higher-kinded application with wildcard arguments,
+ * check that it or one of its supertypes can be reduced to a normal application.
+ * Unreducible applications correspond to general existentials, and we
+ * cannot handle those.
+ */
+ def checkWildcardHKApply(tp: Type, pos: Position)(implicit ctx: Context): Unit = tp match {
+ case tp @ HKApply(tycon, args) if args.exists(_.isInstanceOf[TypeBounds]) =>
+ tycon match {
+ case tycon: PolyType =>
+ ctx.errorOrMigrationWarning(
+ ex"unreducible application of higher-kinded type $tycon to wildcard arguments",
+ pos)
+ case _ =>
+ checkWildcardHKApply(tp.superType, pos)
+ }
+ case _ =>
+ }
+
/** Traverse type tree, performing the following checks:
* 1. All arguments of applied type trees must conform to their bounds.
* 2. Prefixes of type selections and singleton types must be realizable.
@@ -59,21 +83,24 @@ object Checking {
case AppliedTypeTree(tycon, args) =>
// If `args` is a list of named arguments, return corresponding type parameters,
// otherwise return type parameters unchanged
- def matchNamed(tparams: List[TypeSymbol], args: List[Tree]): List[Symbol] =
- if (hasNamedArg(args))
- for (NamedArg(name, _) <- args) yield tycon.tpe.member(name).symbol
- else
- tparams
- val tparams = matchNamed(tycon.tpe.typeSymbol.typeParams, args)
- val bounds = tparams.map(tparam =>
- tparam.info.asSeenFrom(tycon.tpe.normalizedPrefix, tparam.owner.owner).bounds)
- checkBounds(args, bounds, _.substDealias(tparams, _))
+ val tparams = tycon.tpe.typeParams
+ def argNamed(tparam: TypeParamInfo) = args.find {
+ case NamedArg(name, _) => name == tparam.paramName
+ case _ => false
+ }.getOrElse(TypeTree(tparam.paramRef))
+ val orderedArgs = if (hasNamedArg(args)) tparams.map(argNamed) else args
+ val bounds = tparams.map(_.paramBoundsAsSeenFrom(tycon.tpe))
+ def instantiate(bound: Type, args: List[Type]) =
+ bound.LambdaAbstract(tparams).appliedTo(args)
+ checkBounds(orderedArgs, bounds, instantiate)
+
+ def checkValidIfHKApply(implicit ctx: Context): Unit =
+ checkWildcardHKApply(tycon.tpe.appliedTo(args.map(_.tpe)), tree.pos)
+ checkValidIfHKApply(ctx.addMode(Mode.AllowLambdaWildcardApply))
case Select(qual, name) if name.isTypeName =>
- checkRealizable(qual.tpe, qual.pos)
- case SelectFromTypeTree(qual, name) if name.isTypeName =>
- checkRealizable(qual.tpe, qual.pos)
+ checkRealizable(qual.tpe, qual.pos.focus)
case SingletonTypeTree(ref) =>
- checkRealizable(ref.tpe, ref.pos)
+ checkRealizable(ref.tpe, ref.pos.focus)
case _ =>
}
traverseChildren(tree)
@@ -88,14 +115,14 @@ object Checking {
case tref: TypeRef =>
val cls = tref.symbol
if (cls.is(AbstractOrTrait))
- ctx.error(d"$cls is abstract; cannot be instantiated", pos)
+ ctx.error(em"$cls is abstract; cannot be instantiated", pos)
if (!cls.is(Module)) {
// Create a synthetic singleton type instance, and check whether
// it conforms to the self type of the class as seen from that instance.
val stp = SkolemType(tp)
val selfType = tref.givenSelfType.asSeenFrom(stp, cls)
if (selfType.exists && !(stp <:< selfType))
- ctx.error(d"$tp does not conform to its self type $selfType; cannot be instantiated")
+ ctx.error(ex"$tp does not conform to its self type $selfType; cannot be instantiated")
}
case _ =>
}
@@ -104,7 +131,7 @@ object Checking {
def checkRealizable(tp: Type, pos: Position)(implicit ctx: Context): Unit = {
val rstatus = realizability(tp)
if (rstatus ne Realizable) {
- def msg = d"$tp is not a legal path\n since it${rstatus.msg}"
+ def msg = em"$tp is not a legal path\n since it${rstatus.msg}"
if (ctx.scala2Mode) ctx.migrationWarning(msg, pos) else ctx.error(msg, pos)
}
}
@@ -156,16 +183,28 @@ object Checking {
tp
}
- def apply(tp: Type) = tp match {
+ private def apply(tp: Type, cycleOK: Boolean, nestedCycleOK: Boolean): Type = {
+ val savedCycleOK = this.cycleOK
+ val savedNestedCycleOK = this.nestedCycleOK
+ this.cycleOK = cycleOK
+ this.nestedCycleOK = nestedCycleOK
+ try apply(tp)
+ finally {
+ this.cycleOK = savedCycleOK
+ this.nestedCycleOK = savedNestedCycleOK
+ }
+ }
+
+ def apply(tp: Type): Type = tp match {
case tp: TermRef =>
this(tp.info)
mapOver(tp)
- case tp @ RefinedType(parent, name) =>
- val parent1 = this(parent)
- val saved = cycleOK
- cycleOK = nestedCycleOK
- try tp.derivedRefinedType(parent1, name, this(tp.refinedInfo))
- finally cycleOK = saved
+ case tp @ RefinedType(parent, name, rinfo) =>
+ tp.derivedRefinedType(this(parent), name, this(rinfo, nestedCycleOK, nestedCycleOK))
+ case tp: RecType =>
+ tp.rebind(this(tp.parent))
+ case tp @ HKApply(tycon, args) =>
+ tp.derivedAppliedType(this(tycon), args.map(this(_, nestedCycleOK, nestedCycleOK)))
case tp @ TypeRef(pre, name) =>
try {
// A prefix is interesting if it might contain (transitively) a reference
@@ -179,22 +218,15 @@ object Checking {
case SuperType(thistp, _) => isInteresting(thistp)
case AndType(tp1, tp2) => isInteresting(tp1) || isInteresting(tp2)
case OrType(tp1, tp2) => isInteresting(tp1) && isInteresting(tp2)
- case _: RefinedType => true
+ case _: RefinedOrRecType | _: HKApply => true
case _ => false
}
- // If prefix is interesting, check info of typeref recursively, marking the referred symbol
- // with NoCompleter. This provokes a CyclicReference when the symbol
- // is hit again. Without this precaution we could stackoverflow here.
if (isInteresting(pre)) {
- val info = tp.info
- val sym = tp.symbol
- if (sym.infoOrCompleter == SymDenotations.NoCompleter) throw CyclicReference(sym)
- val symInfo = sym.info
- if (sym.exists) sym.info = SymDenotations.NoCompleter
- try checkInfo(info)
- finally if (sym.exists) sym.info = symInfo
+ val pre1 = this(pre, false, false)
+ checkInfo(tp.info)
+ if (pre1 eq pre) tp else tp.newLikeThis(pre1)
}
- tp
+ else tp
} catch {
case ex: CyclicReference =>
ctx.debuglog(i"cycle detected for $tp, $nestedCycleOK, $cycleOK")
@@ -210,9 +242,6 @@ object Checking {
* @pre sym is not yet initialized (i.e. its type is a Completer).
* @return `info` where every legal F-bounded reference is proctected
* by a `LazyRef`, or `ErrorType` if a cycle was detected and reported.
- * Furthermore: Add an #Apply to a fully instantiated type lambda, if none was
- * given before. This is necessary here because sometimes type lambdas are not
- * recognized when they are first formed.
*/
def checkNonCyclic(sym: Symbol, info: Type, reportErrors: Boolean)(implicit ctx: Context): Type = {
val checker = new CheckNonCyclicMap(sym, reportErrors)(ctx.addMode(Mode.CheckCyclic))
@@ -315,6 +344,7 @@ object Checking {
fail(i"only classes can have declared but undefined members$varNote")
checkWithDeferred(Private)
checkWithDeferred(Final)
+ checkWithDeferred(Inline)
}
if (sym.isValueClass && sym.is(Trait) && !sym.isRefinementClass)
fail(i"$sym cannot extend AnyVal")
@@ -347,8 +377,8 @@ object Checking {
var tp1 =
if (tp.symbol.is(Private) &&
!accessBoundary(sym).isContainedIn(tp.symbol.owner)) {
- errors = (d"non-private $sym refers to private ${tp.symbol}\n in its type signature ${sym.info}",
- pos) :: errors
+ errors = (em"non-private $sym refers to private ${tp.symbol}\n in its type signature ${sym.info}",
+ sym.pos) :: errors
tp
}
else mapOver(tp)
@@ -356,7 +386,7 @@ object Checking {
// try to dealias to avoid a leak error
val savedErrors = errors
errors = prevErrors
- val tp2 = apply(tp.info.bounds.hi)
+ val tp2 = apply(tp.superType)
if (errors eq prevErrors) tp1 = tp2
else errors = savedErrors
}
@@ -391,20 +421,20 @@ trait Checking {
val sym = tree.tpe.termSymbol
// The check is avoided inside Java compilation units because it always fails
// on the singleton type Module.type.
- if ((sym is Package) || ((sym is JavaModule) && !ctx.compilationUnit.isJava)) ctx.error(d"$sym is not a value", tree.pos)
+ if ((sym is Package) || ((sym is JavaModule) && !ctx.compilationUnit.isJava)) ctx.error(em"$sym is not a value", tree.pos)
}
tree
}
/** Check that type `tp` is stable. */
def checkStable(tp: Type, pos: Position)(implicit ctx: Context): Unit =
- if (!tp.isStable) ctx.error(d"$tp is not stable", pos)
+ if (!tp.isStable) ctx.error(ex"$tp is not stable", pos)
/** Check that all type members of `tp` have realizable bounds */
def checkRealizableBounds(tp: Type, pos: Position)(implicit ctx: Context): Unit = {
val rstatus = boundsRealizability(tp)
if (rstatus ne Realizable)
- ctx.error(i"$tp cannot be instantiated since it${rstatus.msg}", pos)
+ ctx.error(ex"$tp cannot be instantiated since it${rstatus.msg}", pos)
}
/** Check that `tp` is a class type.
@@ -416,11 +446,11 @@ trait Checking {
def checkClassType(tp: Type, pos: Position, traitReq: Boolean, stablePrefixReq: Boolean)(implicit ctx: Context): Type =
tp.underlyingClassRef(refinementOK = false) match {
case tref: TypeRef =>
- if (traitReq && !(tref.symbol is Trait)) ctx.error(d"$tref is not a trait", pos)
+ if (traitReq && !(tref.symbol is Trait)) ctx.error(ex"$tref is not a trait", pos)
if (stablePrefixReq && ctx.phase <= ctx.refchecksPhase) checkStable(tref.prefix, pos)
tp
case _ =>
- ctx.error(d"$tp is not a class type", pos)
+ ctx.error(ex"$tp is not a class type", pos)
defn.ObjectType
}
@@ -435,19 +465,29 @@ trait Checking {
}
/** Check that any top-level type arguments in this type are feasible, i.e. that
- * their lower bound conforms to their upper cound. If a type argument is
+ * their lower bound conforms to their upper bound. If a type argument is
* infeasible, issue and error and continue with upper bound.
*/
def checkFeasible(tp: Type, pos: Position, where: => String = "")(implicit ctx: Context): Type = tp match {
case tp: RefinedType =>
tp.derivedRefinedType(tp.parent, tp.refinedName, checkFeasible(tp.refinedInfo, pos, where))
+ case tp: RecType =>
+ tp.rebind(tp.parent)
case tp @ TypeBounds(lo, hi) if !(lo <:< hi) =>
- ctx.error(d"no type exists between low bound $lo and high bound $hi$where", pos)
+ ctx.error(ex"no type exists between low bound $lo and high bound $hi$where", pos)
TypeAlias(hi)
case _ =>
tp
}
+ /** Check that `tree` is a pure expression of constant type */
+ def checkInlineConformant(tree: Tree, what: => String)(implicit ctx: Context): Unit =
+ tree.tpe.widenTermRefExpr match {
+ case tp: ConstantType if isPureExpr(tree) => // ok
+ case tp if defn.isFunctionType(tp) && isPureExpr(tree) => // ok
+ case _ => ctx.error(em"$what must be a constant expression or a function", tree.pos)
+ }
+
/** Check that class does not define same symbol twice */
def checkNoDoubleDefs(cls: Symbol)(implicit ctx: Context): Unit = {
val seen = new mutable.HashMap[Name, List[Symbol]] {
@@ -460,17 +500,17 @@ trait Checking {
typr.println(i"conflict? $decl $other")
if (decl.matches(other)) {
def doubleDefError(decl: Symbol, other: Symbol): Unit = {
- def ofType = if (decl.isType) "" else d": ${other.info}"
+ def ofType = if (decl.isType) "" else em": ${other.info}"
def explanation =
if (!decl.isRealMethod) ""
else "\n (the definitions have matching type signatures)"
- ctx.error(d"$decl is already defined as $other$ofType$explanation", decl.pos)
+ ctx.error(em"$decl is already defined as $other$ofType$explanation", decl.pos)
}
if (decl is Synthetic) doubleDefError(other, decl)
else doubleDefError(decl, other)
}
if ((decl is HasDefaultParams) && (other is HasDefaultParams)) {
- ctx.error(d"two or more overloaded variants of $decl have default arguments")
+ ctx.error(em"two or more overloaded variants of $decl have default arguments")
decl resetFlag HasDefaultParams
}
}
@@ -491,7 +531,7 @@ trait Checking {
ctx.error(i"$caller may not call constructor of $called", call.pos)
else if (called.is(Trait) && !caller.mixins.contains(called))
ctx.error(i"""$called is already implemented by super${caller.superClass},
- |its constructor cannot be called again""".stripMargin, call.pos)
+ |its constructor cannot be called again""", call.pos)
}
/** Check that `tpt` does not define a higher-kinded type */
@@ -499,20 +539,16 @@ trait Checking {
if (tpt.tpe.isHK && !ctx.compilationUnit.isJava) {
// be more lenient with missing type params in Java,
// needed to make pos/java-interop/t1196 work.
- errorTree(tpt, d"missing type parameter for ${tpt.tpe}")
+ errorTree(tpt, ex"missing type parameter for ${tpt.tpe}")
}
else tpt
- def checkLowerNotHK(sym: Symbol, tparams: List[Symbol], pos: Position)(implicit ctx: Context) =
- if (tparams.nonEmpty)
- sym.info match {
- case info: TypeAlias => // ok
- case TypeBounds(lo, _) =>
- for (tparam <- tparams)
- if (tparam.typeRef.occursIn(lo))
- ctx.error(i"type parameter ${tparam.name} may not occur in lower bound $lo", pos)
- case _ =>
- }
+ /** Check that `tpt` does not refer to a singleton type */
+ def checkNotSingleton(tpt: Tree, where: String)(implicit ctx: Context): Tree =
+ if (tpt.tpe.isInstanceOf[SingletonType]) {
+ errorTree(tpt, ex"Singleton type ${tpt.tpe} is not allowed $where")
+ }
+ else tpt
}
trait NoChecking extends Checking {
@@ -523,8 +559,9 @@ trait NoChecking extends Checking {
override def checkClassType(tp: Type, pos: Position, traitReq: Boolean, stablePrefixReq: Boolean)(implicit ctx: Context): Type = tp
override def checkImplicitParamsNotSingletons(vparamss: List[List[ValDef]])(implicit ctx: Context): Unit = ()
override def checkFeasible(tp: Type, pos: Position, where: => String = "")(implicit ctx: Context): Type = tp
+ override def checkInlineConformant(tree: Tree, what: => String)(implicit ctx: Context) = ()
override def checkNoDoubleDefs(cls: Symbol)(implicit ctx: Context): Unit = ()
override def checkParentCall(call: Tree, caller: ClassSymbol)(implicit ctx: Context) = ()
override def checkSimpleKinded(tpt: Tree)(implicit ctx: Context): Tree = tpt
- override def checkLowerNotHK(sym: Symbol, tparams: List[Symbol], pos: Position)(implicit ctx: Context) = ()
+ override def checkNotSingleton(tpt: Tree, where: String)(implicit ctx: Context): Tree = tpt
}
diff --git a/src/dotty/tools/dotc/typer/Docstrings.scala b/src/dotty/tools/dotc/typer/Docstrings.scala
new file mode 100644
index 000000000..370844e65
--- /dev/null
+++ b/src/dotty/tools/dotc/typer/Docstrings.scala
@@ -0,0 +1,56 @@
+package dotty.tools
+package dotc
+package typer
+
+import core._
+import Contexts._, Symbols._, Decorators._, Comments._
+import util.Positions._
+import ast.tpd
+
+trait Docstrings { self: Typer =>
+
+ /** The Docstrings typer will handle the expansion of `@define` and
+ * `@inheritdoc` if there is a `DocContext` present as a property in the
+ * supplied `ctx`.
+ *
+ * It will also type any `@usecase` available in function definitions.
+ */
+ def cookComments(syms: List[Symbol], owner: Symbol)(implicit ctx: Context): Unit =
+ ctx.docCtx.foreach { docbase =>
+ val relevantSyms = syms.filter(docbase.docstring(_).isDefined)
+ relevantSyms.foreach { sym =>
+ expandParentDocs(sym)
+ val usecases = docbase.docstring(sym).map(_.usecases).getOrElse(Nil)
+
+ usecases.foreach { usecase =>
+ enterSymbol(createSymbol(usecase.untpdCode))
+
+ typedStats(usecase.untpdCode :: Nil, owner) match {
+ case List(df: tpd.DefDef) => usecase.tpdCode = df
+ case _ => ctx.error("`@usecase` was not a valid definition", usecase.codePos)
+ }
+ }
+ }
+ }
+
+ private def expandParentDocs(sym: Symbol)(implicit ctx: Context): Unit =
+ ctx.docCtx.foreach { docCtx =>
+ docCtx.docstring(sym).foreach { cmt =>
+ def expandDoc(owner: Symbol): Unit = if (!cmt.isExpanded) {
+ val tplExp = docCtx.templateExpander
+ tplExp.defineVariables(sym)
+
+ val newCmt = cmt
+ .expand(tplExp.expandedDocComment(sym, owner, _))
+ .withUsecases
+
+ docCtx.addDocstring(sym, Some(newCmt))
+ }
+
+ if (sym ne NoSymbol) {
+ expandParentDocs(sym.owner)
+ expandDoc(sym.owner)
+ }
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/typer/Dynamic.scala b/src/dotty/tools/dotc/typer/Dynamic.scala
new file mode 100644
index 000000000..b5ace87d3
--- /dev/null
+++ b/src/dotty/tools/dotc/typer/Dynamic.scala
@@ -0,0 +1,104 @@
+package dotty.tools
+package dotc
+package typer
+
+import dotty.tools.dotc.ast.Trees._
+import dotty.tools.dotc.ast.tpd
+import dotty.tools.dotc.ast.untpd
+import dotty.tools.dotc.core.Constants.Constant
+import dotty.tools.dotc.core.Contexts.Context
+import dotty.tools.dotc.core.Names.Name
+import dotty.tools.dotc.core.StdNames._
+import dotty.tools.dotc.core.Types._
+import dotty.tools.dotc.core.Decorators._
+
+object Dynamic {
+ def isDynamicMethod(name: Name): Boolean =
+ name == nme.applyDynamic || name == nme.selectDynamic || name == nme.updateDynamic || name == nme.applyDynamicNamed
+}
+
+/** Translates selection that does not typecheck according to the scala.Dynamic rules:
+ * foo.bar(baz) = quux ~~> foo.selectDynamic(bar).update(baz, quux)
+ * foo.bar = baz ~~> foo.updateDynamic("bar")(baz)
+ * foo.bar(x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
+ * foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...)
+ * foo.bar ~~> foo.selectDynamic(bar)
+ *
+ * The first matching rule of is applied.
+ */
+trait Dynamic { self: Typer with Applications =>
+ import Dynamic._
+ import tpd._
+
+ /** Translate selection that does not typecheck according to the normal rules into a applyDynamic/applyDynamicNamed.
+ * foo.bar(baz0, baz1, ...) ~~> foo.applyDynamic(bar)(baz0, baz1, ...)
+ * foo.bar[T0, ...](baz0, baz1, ...) ~~> foo.applyDynamic[T0, ...](bar)(baz0, baz1, ...)
+ * foo.bar(x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
+ * foo.bar[T0, ...](x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed[T0, ...]("bar")(("x", bazX), ("y", bazY), ("", baz), ...)
+ */
+ def typedDynamicApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context): Tree = {
+ def typedDynamicApply(qual: untpd.Tree, name: Name, targs: List[untpd.Tree]): Tree = {
+ def isNamedArg(arg: untpd.Tree): Boolean = arg match { case NamedArg(_, _) => true; case _ => false }
+ val args = tree.args
+ val dynName = if (args.exists(isNamedArg)) nme.applyDynamicNamed else nme.applyDynamic
+ if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args)) {
+ ctx.error("applyDynamicNamed does not support passing a vararg parameter", tree.pos)
+ tree.withType(ErrorType)
+ } else {
+ def namedArgTuple(name: String, arg: untpd.Tree) = untpd.Tuple(List(Literal(Constant(name)), arg))
+ def namedArgs = args.map {
+ case NamedArg(argName, arg) => namedArgTuple(argName.toString, arg)
+ case arg => namedArgTuple("", arg)
+ }
+ val args1 = if (dynName == nme.applyDynamic) args else namedArgs
+ typedApply(untpd.Apply(coreDynamic(qual, dynName, name, targs), args1), pt)
+ }
+ }
+
+ tree.fun match {
+ case Select(qual, name) if !isDynamicMethod(name) =>
+ typedDynamicApply(qual, name, Nil)
+ case TypeApply(Select(qual, name), targs) if !isDynamicMethod(name) =>
+ typedDynamicApply(qual, name, targs)
+ case TypeApply(fun, targs) =>
+ typedDynamicApply(fun, nme.apply, targs)
+ case fun =>
+ typedDynamicApply(fun, nme.apply, Nil)
+ }
+ }
+
+ /** Translate selection that does not typecheck according to the normal rules into a selectDynamic.
+ * foo.bar ~~> foo.selectDynamic(bar)
+ * foo.bar[T0, ...] ~~> foo.selectDynamic[T0, ...](bar)
+ *
+ * Note: inner part of translation foo.bar(baz) = quux ~~> foo.selectDynamic(bar).update(baz, quux) is achieved
+ * through an existing transformation of in typedAssign [foo.bar(baz) = quux ~~> foo.bar.update(baz, quux)].
+ */
+ def typedDynamicSelect(tree: untpd.Select, targs: List[Tree], pt: Type)(implicit ctx: Context): Tree =
+ typedApply(coreDynamic(tree.qualifier, nme.selectDynamic, tree.name, targs), pt)
+
+ /** Translate selection that does not typecheck according to the normal rules into a updateDynamic.
+ * foo.bar = baz ~~> foo.updateDynamic(bar)(baz)
+ */
+ def typedDynamicAssign(tree: untpd.Assign, pt: Type)(implicit ctx: Context): Tree = {
+ def typedDynamicAssign(qual: untpd.Tree, name: Name, targs: List[untpd.Tree]): Tree =
+ typedApply(untpd.Apply(coreDynamic(qual, nme.updateDynamic, name, targs), tree.rhs), pt)
+ tree.lhs match {
+ case Select(qual, name) if !isDynamicMethod(name) =>
+ typedDynamicAssign(qual, name, Nil)
+ case TypeApply(Select(qual, name), targs) if !isDynamicMethod(name) =>
+ typedDynamicAssign(qual, name, targs)
+ case _ =>
+ ctx.error("reassignment to val", tree.pos)
+ tree.withType(ErrorType)
+ }
+ }
+
+ private def coreDynamic(qual: untpd.Tree, dynName: Name, name: Name, targs: List[untpd.Tree])(implicit ctx: Context): untpd.Apply = {
+ val select = untpd.Select(qual, dynName)
+ val selectWithTypes =
+ if (targs.isEmpty) select
+ else untpd.TypeApply(select, targs)
+ untpd.Apply(selectWithTypes, Literal(Constant(name.toString)))
+ }
+}
diff --git a/src/dotty/tools/dotc/typer/ErrorReporting.scala b/src/dotty/tools/dotc/typer/ErrorReporting.scala
index d6a87acf6..1d22dc646 100644
--- a/src/dotty/tools/dotc/typer/ErrorReporting.scala
+++ b/src/dotty/tools/dotc/typer/ErrorReporting.scala
@@ -8,18 +8,20 @@ import Trees._
import Types._, ProtoTypes._, Contexts._, Decorators._, Denotations._, Symbols._
import Applications._, Implicits._, Flags._
import util.Positions._
-import reporting.Diagnostic
-import printing.Showable
-import printing.Disambiguation.disambiguated
+import printing.{Showable, RefinedPrinter}
+import scala.collection.mutable
+import java.util.regex.Matcher.quoteReplacement
+import reporting.diagnostic.Message
+import reporting.diagnostic.messages._
object ErrorReporting {
import tpd._
- def errorTree(tree: untpd.Tree, msg: => String)(implicit ctx: Context): tpd.Tree =
+ def errorTree(tree: untpd.Tree, msg: => Message)(implicit ctx: Context): tpd.Tree =
tree withType errorType(msg, tree.pos)
- def errorType(msg: => String, pos: Position)(implicit ctx: Context): ErrorType = {
+ def errorType(msg: => Message, pos: Position)(implicit ctx: Context): ErrorType = {
ctx.error(msg, pos)
ErrorType
}
@@ -37,7 +39,7 @@ object ErrorReporting {
val treeSym = ctx.symOfContextTree(tree)
if (treeSym.exists && treeSym.name == cycleSym.name && treeSym.owner == cycleSym.owner) {
val result = if (cycleSym is Method) " result" else ""
- d"overloaded or recursive $cycleSym needs$result type"
+ em"overloaded or recursive $cycleSym needs$result type"
}
else errorMsg(msg, cx.outer)
case _ =>
@@ -47,6 +49,9 @@ object ErrorReporting {
errorMsg(ex.show, ctx)
}
+ def wrongNumberOfArgs(fntpe: Type, kind: String, expected: Int, pos: Position)(implicit ctx: Context) =
+ errorType(em"wrong number of ${kind}arguments for $fntpe, expected: $expected", pos)
+
class Errors(implicit ctx: Context) {
/** An explanatory note to be added to error messages
@@ -58,15 +63,15 @@ object ErrorReporting {
def expectedTypeStr(tp: Type): String = tp match {
case tp: PolyProto =>
- d"type arguments [${tp.targs}%, %] and ${expectedTypeStr(tp.resultType)}"
+ em"type arguments [${tp.targs}%, %] and ${expectedTypeStr(tp.resultType)}"
case tp: FunProto =>
val result = tp.resultType match {
case _: WildcardType | _: IgnoredProto => ""
- case tp => d" and expected result type $tp"
+ case tp => em" and expected result type $tp"
}
- d"arguments (${tp.typedArgs.tpes}%, %)$result"
+ em"arguments (${tp.typedArgs.tpes}%, %)$result"
case _ =>
- d"expected type $tp"
+ em"expected type $tp"
}
def anonymousTypeMemberStr(tpe: Type) = {
@@ -75,12 +80,12 @@ object ErrorReporting {
case _: PolyType | _: MethodType => "method"
case _ => "value of type"
}
- d"$kind $tpe"
+ em"$kind $tpe"
}
def overloadedAltsStr(alts: List[SingleDenotation]) =
- d"overloaded alternatives of ${denotStr(alts.head)} with types\n" +
- d" ${alts map (_.info)}%\n %"
+ em"overloaded alternatives of ${denotStr(alts.head)} with types\n" +
+ em" ${alts map (_.info)}%\n %"
def denotStr(denot: Denotation): String =
if (denot.isOverloaded) overloadedAltsStr(denot.alternatives)
@@ -96,9 +101,8 @@ object ErrorReporting {
def patternConstrStr(tree: Tree): String = ???
- def typeMismatch(tree: Tree, pt: Type, implicitFailure: SearchFailure = NoImplicitMatches): Tree = {
- errorTree(tree, typeMismatchStr(normalize(tree.tpe, pt), pt) + implicitFailure.postscript)
- }
+ def typeMismatch(tree: Tree, pt: Type, implicitFailure: SearchFailure = NoImplicitMatches): Tree =
+ errorTree(tree, typeMismatchMsg(normalize(tree.tpe, pt), pt, implicitFailure.postscript))
/** A subtype log explaining why `found` does not conform to `expected` */
def whyNoMatchStr(found: Type, expected: Type) =
@@ -107,40 +111,43 @@ object ErrorReporting {
else
""
- def typeMismatchStr(found: Type, expected: Type) = disambiguated { implicit ctx =>
- def infoStr = found match { // DEBUG
- case tp: TypeRef => s"with info ${tp.info} / ${tp.prefix.toString} / ${tp.prefix.dealias.toString}"
- case _ => ""
+ def typeMismatchMsg(found: Type, expected: Type, postScript: String = "") = {
+ // replace constrained polyparams and their typevars by their bounds where possible
+ object reported extends TypeMap {
+ def setVariance(v: Int) = variance = v
+ val constraint = ctx.typerState.constraint
+ def apply(tp: Type): Type = tp match {
+ case tp: PolyParam =>
+ constraint.entry(tp) match {
+ case bounds: TypeBounds =>
+ if (variance < 0) apply(constraint.fullUpperBound(tp))
+ else if (variance > 0) apply(constraint.fullLowerBound(tp))
+ else tp
+ case NoType => tp
+ case instType => apply(instType)
+ }
+ case tp: TypeVar => apply(tp.stripTypeVar)
+ case _ => mapOver(tp)
}
- d"""type mismatch:
- | found : $found
- | required: $expected""".stripMargin + whyNoMatchStr(found, expected)
+ }
+ val found1 = reported(found)
+ reported.setVariance(-1)
+ val expected1 = reported(expected)
+ TypeMismatch(found1, expected1, whyNoMatchStr(found, expected), postScript)
}
- }
- def err(implicit ctx: Context): Errors = new Errors
-
- /** The d string interpolator works like the i string interpolator, but marks nonsensical errors
- * using `<nonsensical>...</nonsensical>` tags.
- * Note: Instead of these tags, it would be nicer to return a data structure containing the message string
- * and a boolean indicating whether the message is sensical, but then we cannot use string operations
- * like concatenation, stripMargin etc on the values returned by d"...", and in the current error
- * message composition methods, this is crucial.
- */
- implicit class DiagnosticString(val sc: StringContext) extends AnyVal {
- def d(args: Any*)(implicit ctx: Context): String = {
- def isSensical(arg: Any): Boolean = arg match {
- case l: Seq[_] => l.forall(isSensical(_))
- case tpe: Type if tpe.isErroneous => false
- case NoType => false
- case sym: Symbol if sym.isCompleted =>
- sym.info != ErrorType && sym.info != TypeAlias(ErrorType) && sym.info != NoType
- case _ => true
+ /** Format `raw` implicitNotFound argument, replacing all
+ * occurrences of `${X}` where `X` is in `paramNames` with the
+ * corresponding shown type in `args`.
+ */
+ def implicitNotFoundString(raw: String, paramNames: List[String], args: List[Type]): String = {
+ def translate(name: String): Option[String] = {
+ val idx = paramNames.indexOf(name)
+ if (idx >= 0) Some(quoteReplacement(ex"${args(idx)}")) else None
}
-
- val s = new StringInterpolators(sc).i(args : _*)
- if (args.forall(isSensical(_))) s
- else Diagnostic.nonSensicalStartTag + s + Diagnostic.nonSensicalEndTag
+ """\$\{\w*\}""".r.replaceSomeIn(raw, m => translate(m.matched.drop(2).init))
}
}
+
+ def err(implicit ctx: Context): Errors = new Errors
}
diff --git a/src/dotty/tools/dotc/typer/EtaExpansion.scala b/src/dotty/tools/dotc/typer/EtaExpansion.scala
index f4a1f97cd..397b6d95b 100644
--- a/src/dotty/tools/dotc/typer/EtaExpansion.scala
+++ b/src/dotty/tools/dotc/typer/EtaExpansion.scala
@@ -142,9 +142,13 @@ object EtaExpansion {
var ids: List[Tree] = mt.paramNames map (name => Ident(name).withPos(tree.pos))
if (mt.paramTypes.nonEmpty && mt.paramTypes.last.isRepeatedParam)
ids = ids.init :+ repeated(ids.last)
- val body = Apply(lifted, ids)
+ var body: Tree = Apply(lifted, ids)
+ mt.resultType match {
+ case rt: MethodType if !rt.isImplicit => body = PostfixOp(body, nme.WILDCARD)
+ case _ =>
+ }
val fn = untpd.Function(params, body)
- if (defs.nonEmpty) untpd.Block(defs.toList map untpd.TypedSplice, fn) else fn
+ if (defs.nonEmpty) untpd.Block(defs.toList map (untpd.TypedSplice(_)), fn) else fn
}
}
diff --git a/src/dotty/tools/dotc/typer/FrontEnd.scala b/src/dotty/tools/dotc/typer/FrontEnd.scala
index eee8744a5..4ce24b633 100644
--- a/src/dotty/tools/dotc/typer/FrontEnd.scala
+++ b/src/dotty/tools/dotc/typer/FrontEnd.scala
@@ -7,7 +7,8 @@ import Contexts._
import Symbols._
import dotty.tools.dotc.parsing.JavaParsers.JavaParser
import parsing.Parsers.Parser
-import config.Printers._
+import config.Config
+import config.Printers.{typr, default}
import util.Stats._
import scala.util.control.NonFatal
import ast.Trees._
@@ -23,7 +24,7 @@ class FrontEnd extends Phase {
try body
catch {
case NonFatal(ex) =>
- ctx.println(s"exception occurred while $doing ${ctx.compilationUnit}")
+ ctx.echo(s"exception occurred while $doing ${ctx.compilationUnit}")
throw ex
}
@@ -34,6 +35,8 @@ class FrontEnd extends Phase {
else new Parser(unit.source).parse()
val printer = if (ctx.settings.Xprint.value.contains("parser")) default else typr
printer.println("parsed:\n" + unit.untpdTree.show)
+ if (Config.checkPositions)
+ unit.untpdTree.checkPos(nonOverlapping = !unit.isJava && !ctx.reporter.hasErrors)
}
def enterSyms(implicit ctx: Context) = monitor("indexing") {
@@ -57,7 +60,7 @@ class FrontEnd extends Phase {
case _ => NoSymbol
}
- private def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
+ protected def discardAfterTyper(unit: CompilationUnit)(implicit ctx: Context) =
unit.isJava || firstTopLevelDef(unit.tpdTree :: Nil).isPrimitiveValueClass
override def runOn(units: List[CompilationUnit])(implicit ctx: Context): List[CompilationUnit] = {
diff --git a/src/dotty/tools/dotc/typer/Implicits.scala b/src/dotty/tools/dotc/typer/Implicits.scala
index 446b39799..f3dceea71 100644
--- a/src/dotty/tools/dotc/typer/Implicits.scala
+++ b/src/dotty/tools/dotc/typer/Implicits.scala
@@ -10,6 +10,7 @@ import printing.Showable
import Contexts._
import Types._
import Flags._
+import TypeErasure.{erasure, hasStableErasure}
import Mode.ImplicitsEnabled
import Denotations._
import NameOps._
@@ -23,9 +24,11 @@ import Constants._
import Applications._
import ProtoTypes._
import ErrorReporting._
+import Inferencing.fullyDefinedType
+import Trees._
import Hashable._
import config.Config
-import config.Printers._
+import config.Printers.{implicits, implicitsDetailed}
import collection.mutable
/** Implicit resolution */
@@ -104,7 +107,7 @@ object Implicits {
*/
class OfTypeImplicits(tp: Type, val companionRefs: TermRefSet)(initctx: Context) extends ImplicitRefs(initctx) {
assert(initctx.typer != null)
- val refs: List[TermRef] = {
+ lazy val refs: List[TermRef] = {
val buf = new mutable.ListBuffer[TermRef]
for (companion <- companionRefs) buf ++= companion.implicitMembers
buf.toList
@@ -191,7 +194,7 @@ object Implicits {
/** A successful search
* @param ref The implicit reference that succeeded
- * @param tree The typed tree that can needs to be inserted
+ * @param tree The typed tree that needs to be inserted
* @param ctx The context after the implicit search
*/
case class SearchSuccess(tree: tpd.Tree, ref: TermRef, tstate: TyperState) extends SearchResult {
@@ -214,8 +217,8 @@ object Implicits {
protected def pt: Type
protected def argument: tpd.Tree
protected def qualify(implicit ctx: Context) =
- if (argument.isEmpty) d"match type $pt"
- else d"convert from ${argument.tpe} to $pt"
+ if (argument.isEmpty) em"match type $pt"
+ else em"convert from ${argument.tpe} to $pt"
/** An explanation of the cause of the failure as a string */
def explanation(implicit ctx: Context): String
@@ -224,7 +227,7 @@ object Implicits {
/** An ambiguous implicits failure */
class AmbiguousImplicits(alt1: TermRef, alt2: TermRef, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
def explanation(implicit ctx: Context): String =
- d"both ${err.refStr(alt1)} and ${err.refStr(alt2)} $qualify"
+ em"both ${err.refStr(alt1)} and ${err.refStr(alt2)} $qualify"
override def postscript(implicit ctx: Context) =
"\nNote that implicit conversions cannot be applied because they are ambiguous;" +
"\n " + explanation
@@ -232,17 +235,17 @@ object Implicits {
class NonMatchingImplicit(ref: TermRef, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
def explanation(implicit ctx: Context): String =
- d"${err.refStr(ref)} does not $qualify"
+ em"${err.refStr(ref)} does not $qualify"
}
class ShadowedImplicit(ref: TermRef, shadowing: Type, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
def explanation(implicit ctx: Context): String =
- d"${err.refStr(ref)} does $qualify but is shadowed by ${err.refStr(shadowing)}"
+ em"${err.refStr(ref)} does $qualify but is shadowed by ${err.refStr(shadowing)}"
}
class DivergingImplicit(ref: TermRef, val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
def explanation(implicit ctx: Context): String =
- d"${err.refStr(ref)} produces a diverging implicit search when trying to $qualify"
+ em"${err.refStr(ref)} produces a diverging implicit search when trying to $qualify"
}
class FailedImplicit(failures: List[ExplainedSearchFailure], val pt: Type, val argument: tpd.Tree) extends ExplainedSearchFailure {
@@ -250,7 +253,9 @@ object Implicits {
if (failures.isEmpty) s" No implicit candidates were found that $qualify"
else " " + (failures map (_.explanation) mkString "\n ")
override def postscript(implicit ctx: Context): String =
- "\nImplicit search failure summary:\n" + explanation
+ i"""
+ |Implicit search failure summary:
+ |$explanation"""
}
}
@@ -285,11 +290,20 @@ trait ImplicitRunInfo { self: RunInfo =>
case tp: TypeRef if tp.symbol.isAbstractOrAliasType =>
val pre = tp.prefix
def joinClass(tp: Type, cls: ClassSymbol) =
- AndType(tp, cls.typeRef.asSeenFrom(pre, cls.owner))
+ AndType.make(tp, cls.typeRef.asSeenFrom(pre, cls.owner))
val lead = if (tp.prefix eq NoPrefix) defn.AnyType else apply(tp.prefix)
(lead /: tp.classSymbols)(joinClass)
case tp: TypeVar =>
apply(tp.underlying)
+ case tp: HKApply =>
+ def applyArg(arg: Type) = arg match {
+ case TypeBounds(lo, hi) => AndType.make(lo, hi)
+ case _: WildcardType => defn.AnyType
+ case _ => arg
+ }
+ (apply(tp.tycon) /: tp.args)((tc, arg) => AndType.make(tc, applyArg(arg)))
+ case tp: PolyType =>
+ apply(tp.resType)
case _ =>
mapOver(tp)
}
@@ -320,8 +334,8 @@ trait ImplicitRunInfo { self: RunInfo =>
}
def addParentScope(parent: TypeRef): Unit = {
iscopeRefs(parent) foreach addRef
- for (param <- parent.typeParams)
- comps ++= iscopeRefs(pre.member(param.name).info)
+ for (param <- parent.typeParamSymbols)
+ comps ++= iscopeRefs(tp.member(param.name).info)
}
val companion = cls.companionModule
if (companion.exists) addRef(companion.valRef)
@@ -338,8 +352,6 @@ trait ImplicitRunInfo { self: RunInfo =>
}
}
- def ofTypeImplicits(comps: TermRefSet) = new OfTypeImplicits(tp, comps)(ctx)
-
/** The implicit scope of type `tp`
* @param isLifted Type `tp` is the result of a `liftToClasses` application
*/
@@ -349,9 +361,12 @@ trait ImplicitRunInfo { self: RunInfo =>
ctx.typerState.ephemeral = false
try {
val liftedTp = if (isLifted) tp else liftToClasses(tp)
- val result =
- if (liftedTp ne tp) iscope(liftedTp, isLifted = true)
- else ofTypeImplicits(collectCompanions(tp))
+ val refs =
+ if (liftedTp ne tp)
+ iscope(liftedTp, isLifted = true).companionRefs
+ else
+ collectCompanions(tp)
+ val result = new OfTypeImplicits(tp, refs)(ctx)
if (ctx.typerState.ephemeral) record("ephemeral cache miss: implicitScope")
else if (cacheResult) implicitScopeCache(tp) = result
result
@@ -363,7 +378,18 @@ trait ImplicitRunInfo { self: RunInfo =>
computeIScope(cacheResult = false)
else implicitScopeCache get tp match {
case Some(is) => is
- case None => computeIScope(cacheResult = true)
+ case None =>
+ // Implicit scopes are tricky to cache because of loops. For example
+ // in `tests/pos/implicit-scope-loop.scala`, the scope of B contains
+ // the scope of A which contains the scope of B. We break the loop
+ // by returning EmptyTermRefSet in `collectCompanions` for types
+ // that we have already seen, but this means that we cannot cache
+ // the computed scope of A, it is incomplete.
+ // Keeping track of exactly where these loops happen would require a
+ // lot of book-keeping, instead we choose to be conservative and only
+ // cache scopes before any type has been seen. This is unfortunate
+ // because loops are very common for types in scala.collection.
+ computeIScope(cacheResult = seen.isEmpty)
}
}
@@ -388,7 +414,7 @@ trait Implicits { self: Typer =>
&& !to.isError
&& !ctx.isAfterTyper
&& (ctx.mode is Mode.ImplicitsEnabled)
- && from.isInstanceOf[ValueType]
+ && from.isValueType
&& ( from.isValueSubType(to)
|| inferView(dummyTreeOfType(from), to)
(ctx.fresh.addMode(Mode.ImplicitExploration).setExploreTyperState)
@@ -417,6 +443,81 @@ trait Implicits { self: Typer =>
}
}
+ /** Find an implicit argument for parameter `formal`.
+ * @param error An error handler that gets an error message parameter
+ * which is itself parameterized by another string,
+ * indicating where the implicit parameter is needed
+ */
+ def inferImplicitArg(formal: Type, error: (String => String) => Unit, pos: Position)(implicit ctx: Context): Tree =
+ inferImplicit(formal, EmptyTree, pos) match {
+ case SearchSuccess(arg, _, _) =>
+ arg
+ case ambi: AmbiguousImplicits =>
+ error(where => s"ambiguous implicits: ${ambi.explanation} of $where")
+ EmptyTree
+ case failure: SearchFailure =>
+ val arg = synthesizedClassTag(formal, pos)
+ if (!arg.isEmpty) arg
+ else {
+ var msgFn = (where: String) =>
+ em"no implicit argument of type $formal found for $where" + failure.postscript
+ for {
+ notFound <- formal.typeSymbol.getAnnotation(defn.ImplicitNotFoundAnnot)
+ Trees.Literal(Constant(raw: String)) <- notFound.argument(0)
+ } {
+ msgFn = where =>
+ err.implicitNotFoundString(
+ raw,
+ formal.typeSymbol.typeParams.map(_.name.unexpandedName.toString),
+ formal.argInfos)
+ }
+ error(msgFn)
+ EmptyTree
+ }
+ }
+
+ /** If `formal` is of the form ClassTag[T], where `T` is a class type,
+ * synthesize a class tag for `T`.
+ */
+ def synthesizedClassTag(formal: Type, pos: Position)(implicit ctx: Context): Tree = {
+ if (formal.isRef(defn.ClassTagClass))
+ formal.argTypes match {
+ case arg :: Nil =>
+ val tp = fullyDefinedType(arg, "ClassTag argument", pos)
+ if (hasStableErasure(tp))
+ return ref(defn.ClassTagModule)
+ .select(nme.apply)
+ .appliedToType(tp)
+ .appliedTo(clsOf(erasure(tp)))
+ .withPos(pos)
+ case _ =>
+ }
+ EmptyTree
+ }
+
+ private def assumedCanEqual(ltp: Type, rtp: Type)(implicit ctx: Context) = {
+ val lift = new TypeMap {
+ def apply(t: Type) = t match {
+ case t: TypeRef =>
+ t.info match {
+ case TypeBounds(lo, hi) if lo ne hi => hi
+ case _ => t
+ }
+ case _ =>
+ if (variance > 0) mapOver(t) else t
+ }
+ }
+ ltp.isError || rtp.isError || ltp <:< lift(rtp) || rtp <:< lift(ltp)
+ }
+
+ /** Check that equality tests between types `ltp` and `rtp` make sense */
+ def checkCanEqual(ltp: Type, rtp: Type, pos: Position)(implicit ctx: Context): Unit =
+ if (!ctx.isAfterTyper && !assumedCanEqual(ltp, rtp)) {
+ val res = inferImplicitArg(
+ defn.EqType.appliedTo(ltp, rtp), msgFun => ctx.error(msgFun(""), pos), pos)
+ implicits.println(i"Eq witness found: $res: ${res.tpe}")
+ }
+
/** Find an implicit parameter or conversion.
* @param pt The expected type of the parameter or conversion.
* @param argument If an implicit conversion is searched, the argument to which
@@ -441,7 +542,19 @@ trait Implicits { self: Typer =>
result
case result: AmbiguousImplicits =>
val deepPt = pt.deepenProto
- if (deepPt ne pt) inferImplicit(deepPt, argument, pos) else result
+ if (deepPt ne pt) inferImplicit(deepPt, argument, pos)
+ else if (ctx.scala2Mode && !ctx.mode.is(Mode.OldOverloadingResolution)) {
+ inferImplicit(pt, argument, pos)(ctx.addMode(Mode.OldOverloadingResolution)) match {
+ case altResult: SearchSuccess =>
+ ctx.migrationWarning(
+ s"According to new implicit resolution rules, this will be ambiguous:\n ${result.explanation}",
+ pos)
+ altResult
+ case _ =>
+ result
+ }
+ }
+ else result
case _ =>
assert(prevConstr eq ctx.typerState.constraint)
result
@@ -459,7 +572,7 @@ trait Implicits { self: Typer =>
// Not clear whether we need to drop the `.widen` here. All tests pass with it in place, though.
assert(argument.isEmpty || argument.tpe.isValueType || argument.tpe.isInstanceOf[ExprType],
- d"found: $argument: ${argument.tpe}, expected: $pt")
+ em"found: $argument: ${argument.tpe}, expected: $pt")
/** The expected type for the searched implicit */
lazy val fullProto = implicitProto(pt, identity)
@@ -502,6 +615,18 @@ trait Implicits { self: Typer =>
case _ => false
}
}
+ // Does there exist an implicit value of type `Eq[tp, tp]`?
+ def hasEq(tp: Type): Boolean =
+ new ImplicitSearch(defn.EqType.appliedTo(tp, tp), EmptyTree, pos).bestImplicit match {
+ case result: SearchSuccess => result.ref.symbol != defn.Predef_eqAny
+ case result: AmbiguousImplicits => true
+ case _ => false
+ }
+ def validEqAnyArgs(tp1: Type, tp2: Type) = {
+ List(tp1, tp2).foreach(fullyDefinedType(_, "eqAny argument", pos))
+ assumedCanEqual(tp1, tp2) || !hasEq(tp1) && !hasEq(tp2) ||
+ { implicits.println(i"invalid eqAny[$tp1, $tp2]"); false }
+ }
if (ctx.reporter.hasErrors)
nonMatchingImplicit(ref)
else if (contextual && !ctx.mode.is(Mode.ImplicitShadowing) &&
@@ -509,8 +634,13 @@ trait Implicits { self: Typer =>
implicits.println(i"SHADOWING $ref in ${ref.termSymbol.owner} is shadowed by $shadowing in ${shadowing.symbol.owner}")
shadowedImplicit(ref, methPart(shadowing).tpe)
}
- else
- SearchSuccess(generated1, ref, ctx.typerState)
+ else generated1 match {
+ case TypeApply(fn, targs @ (arg1 :: arg2 :: Nil))
+ if fn.symbol == defn.Predef_eqAny && !validEqAnyArgs(arg1.tpe, arg2.tpe) =>
+ nonMatchingImplicit(ref)
+ case _ =>
+ SearchSuccess(generated1, ref, ctx.typerState)
+ }
}}
/** Given a list of implicit references, produce a list of all implicit search successes,
@@ -644,7 +774,7 @@ class SearchHistory(val searchDepth: Int, val seen: Map[ClassSymbol, Int]) {
case tp: RefinedType =>
foldOver(n + 1, tp)
case tp: TypeRef if tp.info.isAlias =>
- apply(n, tp.info.bounds.hi)
+ apply(n, tp.superType)
case _ =>
foldOver(n, tp)
}
@@ -673,14 +803,15 @@ class SearchHistory(val searchDepth: Int, val seen: Map[ClassSymbol, Int]) {
def updateMap(csyms: List[ClassSymbol], seen: Map[ClassSymbol, Int]): SearchHistory = csyms match {
case csym :: csyms1 =>
seen get csym match {
+ // proto complexity is >= than the last time it was seen → diverge
case Some(prevSize) if size >= prevSize => this
case _ => updateMap(csyms1, seen.updated(csym, size))
}
- case nil =>
- if (csyms.isEmpty) this
- else new SearchHistory(searchDepth + 1, seen)
+ case _ =>
+ new SearchHistory(searchDepth + 1, seen)
}
- updateMap(proto.classSymbols, seen)
+ if (proto.classSymbols.isEmpty) this
+ else updateMap(proto.classSymbols, seen)
}
}
}
diff --git a/src/dotty/tools/dotc/typer/ImportInfo.scala b/src/dotty/tools/dotc/typer/ImportInfo.scala
index 2ca90311f..3aa289181 100644
--- a/src/dotty/tools/dotc/typer/ImportInfo.scala
+++ b/src/dotty/tools/dotc/typer/ImportInfo.scala
@@ -60,10 +60,11 @@ class ImportInfo(symf: => Symbol, val selectors: List[untpd.Tree], val isRootImp
def recur(sels: List[untpd.Tree]): Unit = sels match {
case sel :: sels1 =>
sel match {
- case Pair(Ident(name: TermName), Ident(nme.WILDCARD)) =>
+ case Thicket(Ident(name: TermName) :: Ident(nme.WILDCARD) :: Nil) =>
myExcluded += name
- case Pair(Ident(from: TermName), Ident(to: TermName)) =>
+ case Thicket(Ident(from: TermName) :: Ident(to: TermName) :: Nil) =>
myMapped = myMapped.updated(to, from)
+ myExcluded += from
myOriginals += from
case Ident(nme.WILDCARD) =>
myWildcardImport = true
@@ -98,7 +99,7 @@ class ImportInfo(symf: => Symbol, val selectors: List[untpd.Tree], val isRootImp
lazy val hiddenRoot: Symbol = {
val sym = site.termSymbol
def hasMaskingSelector = selectors exists {
- case Pair(_, Ident(nme.WILDCARD)) => true
+ case Thicket(_ :: Ident(nme.WILDCARD) :: Nil) => true
case _ => false
}
if ((defn.RootImportTypes exists (_.symbol == sym)) && hasMaskingSelector) sym else NoSymbol
diff --git a/src/dotty/tools/dotc/typer/Inferencing.scala b/src/dotty/tools/dotc/typer/Inferencing.scala
index 99e8cd150..9a1a42e44 100644
--- a/src/dotty/tools/dotc/typer/Inferencing.scala
+++ b/src/dotty/tools/dotc/typer/Inferencing.scala
@@ -15,8 +15,9 @@ import util.{Stats, SimpleMap}
import util.common._
import Decorators._
import Uniques._
-import config.Printers._
+import config.Printers.{typr, constr}
import annotation.tailrec
+import reporting._
import collection.mutable
object Inferencing {
@@ -46,7 +47,7 @@ object Inferencing {
/** Instantiate selected type variables `tvars` in type `tp` */
def instantiateSelected(tp: Type, tvars: List[Type])(implicit ctx: Context): Unit =
- new IsFullyDefinedAccumulator(new ForceDegree.Value(tvars.contains)).process(tp)
+ new IsFullyDefinedAccumulator(new ForceDegree.Value(tvars.contains, minimizeAll = true)).process(tp)
/** The accumulator which forces type variables using the policy encoded in `force`
* and returns whether the type is fully defined. The direction in which
@@ -77,7 +78,8 @@ object Inferencing {
def apply(x: Boolean, tp: Type): Boolean = tp.dealias match {
case _: WildcardType | _: ProtoType =>
false
- case tvar: TypeVar if !tvar.isInstantiated =>
+ case tvar: TypeVar
+ if !tvar.isInstantiated && ctx.typerState.constraint.contains(tvar) =>
force.appliesTo(tvar) && {
val direction = instDirection(tvar.origin)
if (direction != 0) {
@@ -86,6 +88,7 @@ object Inferencing {
}
else {
val minimize =
+ force.minimizeAll ||
variance >= 0 && !(
force == ForceDegree.noBottom &&
defn.isBottomType(ctx.typeComparer.approximation(tvar.origin, fromBelow = true)))
@@ -174,8 +177,12 @@ object Inferencing {
/** Recursively widen and also follow type declarations and type aliases. */
def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match {
- case tp: TypeRef if !tp.symbol.isClass => widenForMatchSelector(tp.info.bounds.hi)
- case tp: AnnotatedType => tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot)
+ case tp: TypeRef if !tp.symbol.isClass =>
+ widenForMatchSelector(tp.superType)
+ case tp: HKApply =>
+ widenForMatchSelector(tp.superType)
+ case tp: AnnotatedType =>
+ tp.derivedAnnotatedType(widenForMatchSelector(tp.tpe), tp.annot)
case tp => tp
}
@@ -217,13 +224,38 @@ object Inferencing {
val vs = variances(tp, qualifies)
var changed = false
- vs foreachBinding { (tvar, v) =>
- if (v != 0) {
- typr.println(s"interpolate ${if (v == 1) "co" else "contra"}variant ${tvar.show} in ${tp.show}")
- tvar.instantiate(fromBelow = v == 1)
- changed = true
- }
+ val hasUnreportedErrors = ctx.typerState.reporter match {
+ case r: StoreReporter if r.hasErrors => true
+ case _ => false
}
+ // Avoid interpolating variables if typerstate has unreported errors.
+ // Reason: The errors might reflect unsatisfiable constraints. In that
+ // case interpolating without taking account the constraints risks producing
+ // nonsensical types that then in turn produce incomprehensible errors.
+ // An example is in neg/i1240.scala. Without the condition in the next code line
+ // we get for
+ //
+ // val y: List[List[String]] = List(List(1))
+ //
+ // i1430.scala:5: error: type mismatch:
+ // found : Int(1)
+ // required: Nothing
+ // val y: List[List[String]] = List(List(1))
+ // ^
+ // With the condition, we get the much more sensical:
+ //
+ // i1430.scala:5: error: type mismatch:
+ // found : Int(1)
+ // required: String
+ // val y: List[List[String]] = List(List(1))
+ if (!hasUnreportedErrors)
+ vs foreachBinding { (tvar, v) =>
+ if (v != 0) {
+ typr.println(s"interpolate ${if (v == 1) "co" else "contra"}variant ${tvar.show} in ${tp.show}")
+ tvar.instantiate(fromBelow = v == 1)
+ changed = true
+ }
+ }
if (changed) // instantiations might have uncovered new typevars to interpolate
interpolateUndetVars(tree, ownedBy)
else
@@ -293,9 +325,9 @@ object Inferencing {
/** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */
@sharable object ForceDegree {
- class Value(val appliesTo: TypeVar => Boolean)
- val none = new Value(_ => false)
- val all = new Value(_ => true)
- val noBottom = new Value(_ => true)
+ class Value(val appliesTo: TypeVar => Boolean, val minimizeAll: Boolean)
+ val none = new Value(_ => false, minimizeAll = false)
+ val all = new Value(_ => true, minimizeAll = false)
+ val noBottom = new Value(_ => true, minimizeAll = false)
}
diff --git a/src/dotty/tools/dotc/typer/Inliner.scala b/src/dotty/tools/dotc/typer/Inliner.scala
new file mode 100644
index 000000000..55008c0c5
--- /dev/null
+++ b/src/dotty/tools/dotc/typer/Inliner.scala
@@ -0,0 +1,521 @@
+package dotty.tools
+package dotc
+package typer
+
+import dotty.tools.dotc.ast.Trees.NamedArg
+import dotty.tools.dotc.ast.{Trees, untpd, tpd, TreeTypeMap}
+import Trees._
+import core._
+import Flags._
+import Symbols._
+import Types._
+import Decorators._
+import Constants._
+import StdNames.nme
+import Contexts.Context
+import Names.{Name, TermName}
+import NameOps._
+import SymDenotations.SymDenotation
+import Annotations._
+import transform.ExplicitOuter
+import Inferencing.fullyDefinedType
+import config.Printers.inlining
+import ErrorReporting.errorTree
+import collection.mutable
+import transform.TypeUtils._
+
+object Inliner {
+ import tpd._
+
+ /** Adds accessors accessors for all non-public term members accessed
+ * from `tree`. Non-public type members are currently left as they are.
+ * This means that references to a private type will lead to typing failures
+ * on the code when it is inlined. Less than ideal, but hard to do better (see below).
+ *
+ * @return If there are accessors generated, a thicket consisting of the rewritten `tree`
+ * and all accessors, otherwise the original tree.
+ */
+ private def makeInlineable(tree: Tree)(implicit ctx: Context) = {
+
+ /** A tree map which inserts accessors for all non-public term members accessed
+ * from inlined code. Accesors are collected in the `accessors` buffer.
+ */
+ object addAccessors extends TreeMap {
+ val inlineMethod = ctx.owner
+ val accessors = new mutable.ListBuffer[MemberDef]
+
+ /** A definition needs an accessor if it is private, protected, or qualified private */
+ def needsAccessor(sym: Symbol)(implicit ctx: Context) =
+ sym.is(AccessFlags) || sym.privateWithin.exists
+
+ /** The name of the next accessor to be generated */
+ def accessorName(implicit ctx: Context) =
+ ctx.freshNames.newName(inlineMethod.name.asTermName.inlineAccessorName.toString)
+
+ /** A fresh accessor symbol.
+ *
+ * @param tree The tree representing the original access to the non-public member
+ * @param accessorInfo The type of the accessor
+ */
+ def accessorSymbol(tree: Tree, accessorInfo: Type)(implicit ctx: Context): Symbol =
+ ctx.newSymbol(
+ owner = inlineMethod.owner,
+ name = if (tree.isTerm) accessorName.toTermName else accessorName.toTypeName,
+ flags = if (tree.isTerm) Synthetic | Method else Synthetic,
+ info = accessorInfo,
+ coord = tree.pos).entered
+
+ /** Add an accessor to a non-public method and replace the original access with a
+ * call to the accessor.
+ *
+ * @param tree The original access to the non-public symbol
+ * @param refPart The part that refers to the method or field of the original access
+ * @param targs All type arguments passed in the access, if any
+ * @param argss All value arguments passed in the access, if any
+ * @param accessedType The type of the accessed method or field, as seen from the access site.
+ * @param rhs A function that builds the right-hand side of the accessor,
+ * given a reference to the accessed symbol and any type and
+ * value arguments the need to be integrated.
+ * @return The call to the accessor method that replaces the original access.
+ */
+ def addAccessor(tree: Tree, refPart: Tree, targs: List[Tree], argss: List[List[Tree]],
+ accessedType: Type, rhs: (Tree, List[Type], List[List[Tree]]) => Tree)(implicit ctx: Context): Tree = {
+ val qual = qualifier(refPart)
+ def refIsLocal = qual match {
+ case qual: This => qual.symbol == refPart.symbol.owner
+ case _ => false
+ }
+ val (accessorDef, accessorRef) =
+ if (refPart.symbol.isStatic || refIsLocal) {
+ // Easy case: Reference to a static symbol or a symbol referenced via `this.`
+ val accessorType = accessedType.ensureMethodic
+ val accessor = accessorSymbol(tree, accessorType).asTerm
+ val accessorDef = polyDefDef(accessor, tps => argss =>
+ rhs(refPart, tps, argss))
+ val accessorRef = ref(accessor).appliedToTypeTrees(targs).appliedToArgss(argss)
+ (accessorDef, accessorRef)
+ } else {
+ // Hard case: Reference needs to go via a dynamic prefix
+ inlining.println(i"adding inline accessor for $tree -> (${qual.tpe}, $refPart: ${refPart.getClass}, [$targs%, %], ($argss%, %))")
+
+ // Need to dealias in order to catch all possible references to abstracted over types in
+ // substitutions
+ val dealiasMap = new TypeMap {
+ def apply(t: Type) = mapOver(t.dealias)
+ }
+
+ val qualType = dealiasMap(qual.tpe.widen)
+
+ // Add qualifier type as leading method argument to argument `tp`
+ def addQualType(tp: Type): Type = tp match {
+ case tp: PolyType => tp.derivedPolyType(tp.paramNames, tp.paramBounds, addQualType(tp.resultType))
+ case tp: ExprType => addQualType(tp.resultType)
+ case tp => MethodType(qualType :: Nil, tp)
+ }
+
+ // The types that are local to the inlined method, and that therefore have
+ // to be abstracted out in the accessor, which is external to the inlined method
+ val localRefs = qualType.namedPartsWith(_.symbol.isContainedIn(inlineMethod)).toList
+
+ // Abstract accessed type over local refs
+ def abstractQualType(mtpe: Type): Type =
+ if (localRefs.isEmpty) mtpe
+ else PolyType.fromSymbols(localRefs.map(_.symbol), mtpe).asInstanceOf[PolyType].flatten
+
+ val accessorType = abstractQualType(addQualType(dealiasMap(accessedType)))
+ val accessor = accessorSymbol(tree, accessorType).asTerm
+
+ val accessorDef = polyDefDef(accessor, tps => argss =>
+ rhs(argss.head.head.select(refPart.symbol), tps.drop(localRefs.length), argss.tail))
+
+ val accessorRef = ref(accessor)
+ .appliedToTypeTrees(localRefs.map(TypeTree(_)) ++ targs)
+ .appliedToArgss((qual :: Nil) :: argss)
+ (accessorDef, accessorRef)
+ }
+ accessors += accessorDef
+ inlining.println(i"added inline accessor: $accessorDef")
+ accessorRef
+ }
+
+ override def transform(tree: Tree)(implicit ctx: Context): Tree = super.transform {
+ tree match {
+ case _: Apply | _: TypeApply | _: RefTree if needsAccessor(tree.symbol) =>
+ if (tree.isTerm) {
+ val (methPart, targs, argss) = decomposeCall(tree)
+ addAccessor(tree, methPart, targs, argss,
+ accessedType = methPart.tpe.widen,
+ rhs = (qual, tps, argss) => qual.appliedToTypes(tps).appliedToArgss(argss))
+ } else {
+ // TODO: Handle references to non-public types.
+ // This is quite tricky, as such types can appear anywhere, including as parts
+ // of types of other things. For the moment we do nothing and complain
+ // at the implicit expansion site if there's a reference to an inaccessible type.
+ // Draft code (incomplete):
+ //
+ // val accessor = accessorSymbol(tree, TypeAlias(tree.tpe)).asType
+ // myAccessors += TypeDef(accessor)
+ // ref(accessor)
+ //
+ tree
+ }
+ case Assign(lhs: RefTree, rhs) if needsAccessor(lhs.symbol) =>
+ addAccessor(tree, lhs, Nil, (rhs :: Nil) :: Nil,
+ accessedType = MethodType(rhs.tpe.widen :: Nil, defn.UnitType),
+ rhs = (lhs, tps, argss) => lhs.becomes(argss.head.head))
+ case _ => tree
+ }
+ }
+ }
+
+ val tree1 = addAccessors.transform(tree)
+ flatTree(tree1 :: addAccessors.accessors.toList)
+ }
+
+ /** Register inline info for given inline method `sym`.
+ *
+ * @param sym The symbol denotatioon of the inline method for which info is registered
+ * @param treeExpr A function that computes the tree to be inlined, given a context
+ * This tree may still refer to non-public members.
+ * @param ctx The context to use for evaluating `treeExpr`. It needs
+ * to have the inlined method as owner.
+ */
+ def registerInlineInfo(
+ sym: SymDenotation, treeExpr: Context => Tree)(implicit ctx: Context): Unit = {
+ sym.unforcedAnnotation(defn.BodyAnnot) match {
+ case Some(ann: ConcreteBodyAnnotation) =>
+ case Some(ann: LazyBodyAnnotation) if ann.isEvaluated =>
+ case _ =>
+ if (!ctx.isAfterTyper) {
+ val inlineCtx = ctx
+ sym.updateAnnotation(LazyBodyAnnotation { _ =>
+ implicit val ctx: Context = inlineCtx
+ val tree1 = treeExpr(ctx)
+ makeInlineable(tree1)
+ })
+ }
+ }
+ }
+
+ /** `sym` has an inline method with a known body to inline (note: definitions coming
+ * from Scala2x class files might be `@inline`, but still lack that body.
+ */
+ def hasBodyToInline(sym: SymDenotation)(implicit ctx: Context): Boolean =
+ sym.isInlineMethod && sym.hasAnnotation(defn.BodyAnnot)
+
+ private def bodyAndAccessors(sym: SymDenotation)(implicit ctx: Context): (Tree, List[MemberDef]) =
+ sym.unforcedAnnotation(defn.BodyAnnot).get.tree match {
+ case Thicket(body :: accessors) => (body, accessors.asInstanceOf[List[MemberDef]])
+ case body => (body, Nil)
+ }
+
+ /** The body to inline for method `sym`.
+ * @pre hasBodyToInline(sym)
+ */
+ def bodyToInline(sym: SymDenotation)(implicit ctx: Context): Tree =
+ bodyAndAccessors(sym)._1
+
+ /** The accessors to non-public members needed by the inlinable body of `sym`.
+ * These accessors are dropped as a side effect of calling this method.
+ * @pre hasBodyToInline(sym)
+ */
+ def removeInlineAccessors(sym: SymDenotation)(implicit ctx: Context): List[MemberDef] = {
+ val (body, accessors) = bodyAndAccessors(sym)
+ if (accessors.nonEmpty) sym.updateAnnotation(ConcreteBodyAnnotation(body))
+ accessors
+ }
+
+ /** Try to inline a call to a `@inline` method. Fail with error if the maximal
+ * inline depth is exceeded.
+ *
+ * @param tree The call to inline
+ * @param pt The expected type of the call.
+ * @return An `Inlined` node that refers to the original call and the inlined bindings
+ * and body that replace it.
+ */
+ def inlineCall(tree: Tree, pt: Type)(implicit ctx: Context): Tree =
+ if (enclosingInlineds.length < ctx.settings.xmaxInlines.value)
+ new Inliner(tree, bodyToInline(tree.symbol)).inlined(pt)
+ else errorTree(tree,
+ i"""Maximal number of successive inlines (${ctx.settings.xmaxInlines.value}) exceeded,
+ | Maybe this is caused by a recursive inline method?
+ | You can use -Xmax:inlines to change the limit.""")
+
+ /** Replace `Inlined` node by a block that contains its bindings and expansion */
+ def dropInlined(inlined: tpd.Inlined)(implicit ctx: Context): Tree = {
+ val reposition = new TreeMap {
+ override def transform(tree: Tree)(implicit ctx: Context): Tree =
+ tree.withPos(inlined.call.pos)
+ }
+ tpd.seq(inlined.bindings, reposition.transform(inlined.expansion))
+ }
+
+ /** The qualifier part of a Select or Ident.
+ * For an Ident, this is the `This` of the current class. (TODO: use elsewhere as well?)
+ */
+ private def qualifier(tree: Tree)(implicit ctx: Context) = tree match {
+ case Select(qual, _) => qual
+ case _ => This(ctx.owner.enclosingClass.asClass)
+ }
+}
+
+/** Produces an inlined version of `call` via its `inlined` method.
+ *
+ * @param call The original call to a `@inline` method
+ * @param rhs The body of the inline method that replaces the call.
+ */
+class Inliner(call: tpd.Tree, rhs: tpd.Tree)(implicit ctx: Context) {
+ import tpd._
+ import Inliner._
+
+ private val (methPart, targs, argss) = decomposeCall(call)
+ private val meth = methPart.symbol
+ private val prefix = qualifier(methPart)
+
+ // Make sure all type arguments to the call are fully determined
+ for (targ <- targs) fullyDefinedType(targ.tpe, "inlined type argument", targ.pos)
+
+ /** A map from parameter names of the inline method to references of the actual arguments.
+ * For a type argument this is the full argument type.
+ * For a value argument, it is a reference to either the argument value
+ * (if the argument is a pure expression of singleton type), or to `val` or `def` acting
+ * as a proxy (if the argument is something else).
+ */
+ private val paramBinding = new mutable.HashMap[Name, Type]
+
+ /** A map from references to (type and value) parameters of the inline method
+ * to their corresponding argument or proxy references, as given by `paramBinding`.
+ */
+ private val paramProxy = new mutable.HashMap[Type, Type]
+
+ /** A map from (direct and outer) this references in `rhs` to references of their proxies */
+ private val thisProxy = new mutable.HashMap[Type, TermRef]
+
+ /** A buffer for bindings that define proxies for actual arguments */
+ val bindingsBuf = new mutable.ListBuffer[ValOrDefDef]
+
+ computeParamBindings(meth.info, targs, argss)
+
+ private def newSym(name: Name, flags: FlagSet, info: Type): Symbol =
+ ctx.newSymbol(ctx.owner, name, flags, info, coord = call.pos)
+
+ /** Populate `paramBinding` and `bindingsBuf` by matching parameters with
+ * corresponding arguments. `bindingbuf` will be further extended later by
+ * proxies to this-references.
+ */
+ private def computeParamBindings(tp: Type, targs: List[Tree], argss: List[List[Tree]]): Unit = tp match {
+ case tp: PolyType =>
+ (tp.paramNames, targs).zipped.foreach { (name, arg) =>
+ paramBinding(name) = arg.tpe.stripTypeVar
+ }
+ computeParamBindings(tp.resultType, Nil, argss)
+ case tp: MethodType =>
+ (tp.paramNames, tp.paramTypes, argss.head).zipped.foreach { (name, paramtp, arg) =>
+ def isByName = paramtp.dealias.isInstanceOf[ExprType]
+ paramBinding(name) = arg.tpe.stripAnnots.stripTypeVar match {
+ case argtpe: SingletonType if isByName || isIdempotentExpr(arg) => argtpe
+ case argtpe =>
+ val inlineFlag = if (paramtp.hasAnnotation(defn.InlineParamAnnot)) Inline else EmptyFlags
+ val (bindingFlags, bindingType) =
+ if (isByName) (inlineFlag | Method, ExprType(argtpe.widen))
+ else (inlineFlag, argtpe.widen)
+ val boundSym = newSym(name, bindingFlags, bindingType).asTerm
+ val binding =
+ if (isByName) DefDef(boundSym, arg.changeOwner(ctx.owner, boundSym))
+ else ValDef(boundSym, arg)
+ bindingsBuf += binding
+ boundSym.termRef
+ }
+ }
+ computeParamBindings(tp.resultType, targs, argss.tail)
+ case _ =>
+ assert(targs.isEmpty)
+ assert(argss.isEmpty)
+ }
+
+ /** Populate `thisProxy` and `paramProxy` as follows:
+ *
+ * 1a. If given type refers to a static this, thisProxy binds it to corresponding global reference,
+ * 1b. If given type refers to an instance this, create a proxy symbol and bind the thistype to
+ * refer to the proxy. The proxy is not yet entered in `bindingsBuf` that will come later.
+ * 2. If given type refers to a parameter, make `paramProxy` refer to the entry stored
+ * in `paramNames` under the parameter's name. This roundabout way to bind parameter
+ * references to proxies is done because we not known a priori what the parameter
+ * references of a method are (we only know the method's type, but that contains PolyParams
+ * and MethodParams, not TypeRefs or TermRefs.
+ */
+ private def registerType(tpe: Type): Unit = tpe match {
+ case tpe: ThisType
+ if !ctx.owner.isContainedIn(tpe.cls) && !tpe.cls.is(Package) &&
+ !thisProxy.contains(tpe) =>
+ if (tpe.cls.isStaticOwner)
+ thisProxy(tpe) = tpe.cls.sourceModule.termRef
+ else {
+ val proxyName = s"${tpe.cls.name}_this".toTermName
+ val proxyType = tpe.asSeenFrom(prefix.tpe, meth.owner)
+ thisProxy(tpe) = newSym(proxyName, EmptyFlags, proxyType).termRef
+ registerType(meth.owner.thisType) // make sure we have a base from which to outer-select
+ }
+ case tpe: NamedType
+ if tpe.symbol.is(Param) && tpe.symbol.owner == meth &&
+ !paramProxy.contains(tpe) =>
+ paramProxy(tpe) = paramBinding(tpe.name)
+ case _ =>
+ }
+
+ /** Register type of leaf node */
+ private def registerLeaf(tree: Tree): Unit = tree match {
+ case _: This | _: Ident | _: TypeTree =>
+ tree.tpe.foreachPart(registerType, stopAtStatic = true)
+ case _ =>
+ }
+
+ /** The Inlined node representing the inlined call */
+ def inlined(pt: Type) = {
+ // make sure prefix is executed if it is impure
+ if (!isIdempotentExpr(prefix)) registerType(meth.owner.thisType)
+
+ // Register types of all leaves of inlined body so that the `paramProxy` and `thisProxy` maps are defined.
+ rhs.foreachSubTree(registerLeaf)
+
+ // The class that the this-proxy `selfSym` represents
+ def classOf(selfSym: Symbol) = selfSym.info.widen.classSymbol
+
+ // The name of the outer selector that computes the rhs of `selfSym`
+ def outerSelector(selfSym: Symbol): TermName = classOf(selfSym).name.toTermName ++ nme.OUTER_SELECT
+
+ // The total nesting depth of the class represented by `selfSym`.
+ def outerLevel(selfSym: Symbol): Int = classOf(selfSym).ownersIterator.length
+
+ // All needed this-proxies, sorted by nesting depth of the classes they represent (innermost first)
+ val accessedSelfSyms = thisProxy.values.toList.map(_.symbol).sortBy(-outerLevel(_))
+
+ // Compute val-definitions for all this-proxies and append them to `bindingsBuf`
+ var lastSelf: Symbol = NoSymbol
+ for (selfSym <- accessedSelfSyms) {
+ val rhs =
+ if (!lastSelf.exists)
+ prefix
+ else
+ untpd.Select(ref(lastSelf), outerSelector(selfSym)).withType(selfSym.info)
+ bindingsBuf += ValDef(selfSym.asTerm, rhs)
+ lastSelf = selfSym
+ }
+
+ // The type map to apply to the inlined tree. This maps references to this-types
+ // and parameters to type references of their arguments or proxies.
+ val typeMap = new TypeMap {
+ def apply(t: Type) = t match {
+ case t: ThisType => thisProxy.getOrElse(t, t)
+ case t: TypeRef => paramProxy.getOrElse(t, mapOver(t))
+ case t: SingletonType => paramProxy.getOrElse(t, mapOver(t))
+ case t => mapOver(t)
+ }
+ }
+
+ // The tree map to apply to the inlined tree. This maps references to this-types
+ // and parameters to references of their arguments or their proxies.
+ def treeMap(tree: Tree) = {
+ tree match {
+ case _: This =>
+ thisProxy.get(tree.tpe) match {
+ case Some(t) => ref(t).withPos(tree.pos)
+ case None => tree
+ }
+ case _: Ident =>
+ paramProxy.get(tree.tpe) match {
+ case Some(t: SingletonType) if tree.isTerm => singleton(t).withPos(tree.pos)
+ case Some(t) if tree.isType => TypeTree(t).withPos(tree.pos)
+ case None => tree
+ }
+ case _ => tree
+ }}
+
+ // The complete translation maps referenves to this and parameters to
+ // corresponding arguments or proxies on the type and term level. It also changes
+ // the owner from the inlined method to the current owner.
+ val inliner = new TreeTypeMap(typeMap, treeMap, meth :: Nil, ctx.owner :: Nil)
+
+ val expansion = inliner(rhs.withPos(call.pos))
+ ctx.traceIndented(i"inlining $call\n, BINDINGS =\n${bindingsBuf.toList}%\n%\nEXPANSION =\n$expansion", inlining, show = true) {
+
+ // The final expansion runs a typing pass over the inlined tree. See InlineTyper for details.
+ val expansion1 = InlineTyper.typed(expansion, pt)(inlineContext(call))
+
+ /** Does given definition bind a closure that will be inlined? */
+ def bindsDeadClosure(defn: ValOrDefDef) = Ident(defn.symbol.termRef) match {
+ case InlineableClosure(_) => !InlineTyper.retainedClosures.contains(defn.symbol)
+ case _ => false
+ }
+
+ /** All bindings in `bindingsBuf` except bindings of inlineable closures */
+ val bindings = bindingsBuf.toList.filterNot(bindsDeadClosure).map(_.withPos(call.pos))
+
+ tpd.Inlined(call, bindings, expansion1)
+ }
+ }
+
+ /** An extractor for references to closure arguments that refer to `@inline` methods */
+ private object InlineableClosure {
+ lazy val paramProxies = paramProxy.values.toSet
+ def unapply(tree: Ident)(implicit ctx: Context): Option[Tree] =
+ if (paramProxies.contains(tree.tpe)) {
+ bindingsBuf.find(_.name == tree.name) match {
+ case Some(ddef: ValDef) if ddef.symbol.is(Inline) =>
+ ddef.rhs match {
+ case closure(_, meth, _) => Some(meth)
+ case _ => None
+ }
+ case _ => None
+ }
+ } else None
+ }
+
+ /** A typer for inlined code. Its purpose is:
+ * 1. Implement constant folding over inlined code
+ * 2. Selectively expand ifs with constant conditions
+ * 3. Inline arguments that are inlineable closures
+ * 4. Make sure inlined code is type-correct.
+ * 5. Make sure that the tree's typing is idempotent (so that future -Ycheck passes succeed)
+ */
+ private object InlineTyper extends ReTyper {
+
+ var retainedClosures = Set[Symbol]()
+
+ override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context) = {
+ val tree1 = super.typedIdent(tree, pt)
+ tree1 match {
+ case InlineableClosure(_) => retainedClosures += tree.symbol
+ case _ =>
+ }
+ tree1
+ }
+
+ override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = {
+ val res = super.typedSelect(tree, pt)
+ ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.pos)
+ res
+ }
+
+ override def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context) = {
+ val cond1 = typed(tree.cond, defn.BooleanType)
+ cond1.tpe.widenTermRefExpr match {
+ case ConstantType(Constant(condVal: Boolean)) =>
+ val selected = typed(if (condVal) tree.thenp else tree.elsep, pt)
+ if (isIdempotentExpr(cond1)) selected
+ else Block(cond1 :: Nil, selected)
+ case _ =>
+ val if1 = untpd.cpy.If(tree)(cond = untpd.TypedSplice(cond1))
+ super.typedIf(if1, pt)
+ }
+ }
+
+ override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context) = tree.asInstanceOf[tpd.Tree] match {
+ case Apply(Select(InlineableClosure(fn), nme.apply), args) =>
+ inlining.println(i"reducing $tree with closure $fn")
+ typed(fn.appliedToArgs(args), pt)
+ case _ =>
+ super.typedApply(tree, pt)
+ }
+ }
+}
diff --git a/src/dotty/tools/dotc/typer/Namer.scala b/src/dotty/tools/dotc/typer/Namer.scala
index de27333d5..00e92cbfb 100644
--- a/src/dotty/tools/dotc/typer/Namer.scala
+++ b/src/dotty/tools/dotc/typer/Namer.scala
@@ -4,17 +4,18 @@ package typer
import core._
import ast._
-import Trees._, Constants._, StdNames._, Scopes._, Denotations._
+import Trees._, Constants._, StdNames._, Scopes._, Denotations._, Comments._
import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._, Decorators._
import ast.desugar, ast.desugar._
import ProtoTypes._
import util.Positions._
-import util.{Attachment, SourcePosition, DotClass}
+import util.{Property, SourcePosition, DotClass}
import collection.mutable
import annotation.tailrec
import ErrorReporting._
import tpd.ListOfTreeDecorator
-import config.Printers._
+import config.Config
+import config.Printers.{typr, completions, noPrinter}
import Annotations._
import Inferencing._
import transform.ValueClasses._
@@ -159,9 +160,9 @@ class Namer { typer: Typer =>
import untpd._
- val TypedAhead = new Attachment.Key[tpd.Tree]
- val ExpandedTree = new Attachment.Key[Tree]
- val SymOfTree = new Attachment.Key[Symbol]
+ val TypedAhead = new Property.Key[tpd.Tree]
+ val ExpandedTree = new Property.Key[Tree]
+ val SymOfTree = new Property.Key[Symbol]
/** A partial map from unexpanded member and pattern defs and to their expansions.
* Populated during enterSyms, emptied during typer.
@@ -272,6 +273,7 @@ class Namer { typer: Typer =>
}
val inSuperCall = if (ctx.mode is Mode.InSuperCall) InSuperCall else EmptyFlags
+
tree match {
case tree: TypeDef if tree.isClassDef =>
val name = checkNoConflict(tree.name.encode).asTypeName
@@ -279,7 +281,7 @@ class Namer { typer: Typer =>
val cls = recordSym(ctx.newClassSymbol(
ctx.owner, name, flags | inSuperCall,
cls => adjustIfModule(new ClassCompleter(cls, tree)(ctx), tree),
- privateWithinClass(tree.mods), tree.pos, ctx.source.file), tree)
+ privateWithinClass(tree.mods), tree.namePos, ctx.source.file), tree)
cls.completer.asInstanceOf[ClassCompleter].init()
cls
case tree: MemberDef =>
@@ -306,10 +308,15 @@ class Namer { typer: Typer =>
// have no implementation.
val cctx = if (tree.name == nme.CONSTRUCTOR && !(tree.mods is JavaDefined)) ctx.outer else ctx
+ val completer = tree match {
+ case tree: TypeDef => new TypeDefCompleter(tree)(cctx)
+ case _ => new Completer(tree)(cctx)
+ }
+
recordSym(ctx.newSymbol(
ctx.owner, name, flags | deferred | method | higherKinded | inSuperCall1,
- adjustIfModule(new Completer(tree)(cctx), tree),
- privateWithinClass(tree.mods), tree.pos), tree)
+ adjustIfModule(completer, tree),
+ privateWithinClass(tree.mods), tree.namePos), tree)
case tree: Import =>
recordSym(ctx.newSymbol(
ctx.owner, nme.IMPORT, Synthetic, new Completer(tree), NoSymbol, tree.pos), tree)
@@ -396,22 +403,62 @@ class Namer { typer: Typer =>
/** Create top-level symbols for all statements in the expansion of this statement and
* enter them into symbol table
*/
- def indexExpanded(stat: Tree)(implicit ctx: Context): Context = expanded(stat) match {
- case pcl: PackageDef =>
- val pkg = createPackageSymbol(pcl.pid)
- index(pcl.stats)(ctx.fresh.setOwner(pkg.moduleClass))
- invalidateCompanions(pkg, Trees.flatten(pcl.stats map expanded))
- ctx
- case imp: Import =>
- importContext(createSymbol(imp), imp.selectors)
- case mdef: DefTree =>
- enterSymbol(createSymbol(mdef))
- ctx
- case stats: Thicket =>
- for (tree <- stats.toList) enterSymbol(createSymbol(tree))
- ctx
+ def indexExpanded(origStat: Tree)(implicit ctx: Context): Context = {
+ def recur(stat: Tree): Context = stat match {
+ case pcl: PackageDef =>
+ val pkg = createPackageSymbol(pcl.pid)
+ index(pcl.stats)(ctx.fresh.setOwner(pkg.moduleClass))
+ invalidateCompanions(pkg, Trees.flatten(pcl.stats map expanded))
+ setDocstring(pkg, stat)
+ ctx
+ case imp: Import =>
+ importContext(createSymbol(imp), imp.selectors)
+ case mdef: DefTree =>
+ val sym = enterSymbol(createSymbol(mdef))
+ setDocstring(sym, origStat)
+ addEnumConstants(mdef, sym)
+ ctx
+ case stats: Thicket =>
+ stats.toList.foreach(recur)
+ ctx
+ case _ =>
+ ctx
+ }
+ recur(expanded(origStat))
+ }
+
+ /** Determines whether this field holds an enum constant.
+ * To qualify, the following conditions must be met:
+ * - The field's class has the ENUM flag set
+ * - The field's class extends java.lang.Enum
+ * - The field has the ENUM flag set
+ * - The field is static
+ * - The field is stable
+ */
+ def isEnumConstant(vd: ValDef)(implicit ctx: Context) = {
+ // val ownerHasEnumFlag =
+ // Necessary to check because scalac puts Java's static members into the companion object
+ // while Scala's enum constants live directly in the class.
+ // We don't check for clazz.superClass == JavaEnumClass, because this causes a illegal
+ // cyclic reference error. See the commit message for details.
+ // if (ctx.compilationUnit.isJava) ctx.owner.companionClass.is(Enum) else ctx.owner.is(Enum)
+ vd.mods.is(allOf(Enum, Stable, JavaStatic, JavaDefined)) // && ownerHasEnumFlag
+ }
+
+ /** Add java enum constants */
+ def addEnumConstants(mdef: DefTree, sym: Symbol)(implicit ctx: Context): Unit = mdef match {
+ case vdef: ValDef if (isEnumConstant(vdef)) =>
+ val enumClass = sym.owner.linkedClass
+ if (!(enumClass is Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed)
+ enumClass.addAnnotation(Annotation.makeChild(sym))
case _ =>
- ctx
+ }
+
+
+ def setDocstring(sym: Symbol, tree: Tree)(implicit ctx: Context) = tree match {
+ case t: MemberDef if t.rawComment.isDefined =>
+ ctx.docCtx.foreach(_.addDocstring(sym, t.rawComment))
+ case _ => ()
}
/** Create top-level symbols for statements and enter them into symbol table */
@@ -478,32 +525,11 @@ class Namer { typer: Typer =>
}
/** The completer of a symbol defined by a member def or import (except ClassSymbols) */
- class Completer(val original: Tree)(implicit ctx: Context) extends TypeParamsCompleter {
+ class Completer(val original: Tree)(implicit ctx: Context) extends LazyType {
protected def localContext(owner: Symbol) = ctx.fresh.setOwner(owner).setTree(original)
- private var myTypeParams: List[TypeSymbol] = null
- private var nestedCtx: Context = null
-
- def completerTypeParams(sym: Symbol): List[TypeSymbol] = {
- if (myTypeParams == null) {
- //println(i"completing type params of $sym in ${sym.owner}")
- myTypeParams = original match {
- case tdef: TypeDef =>
- nestedCtx = localContext(sym).setNewScope
- locally {
- implicit val ctx: Context = nestedCtx
- completeParams(tdef.tparams)
- tdef.tparams.map(symbolOfTree(_).asType)
- }
- case _ =>
- Nil
- }
- }
- myTypeParams
- }
-
- private def typeSig(sym: Symbol): Type = original match {
+ protected def typeSig(sym: Symbol): Type = original match {
case original: ValDef =>
if (sym is Module) moduleValSig(sym)
else valOrDefDefSig(original, sym, Nil, Nil, identity)(localContext(sym).setNewScope)
@@ -511,9 +537,6 @@ class Namer { typer: Typer =>
val typer1 = ctx.typer.newLikeThis
nestedTyper(sym) = typer1
typer1.defDefSig(original, sym)(localContext(sym).setTyper(typer1))
- case original: TypeDef =>
- assert(!original.isClassDef)
- typeDefSig(original, sym, completerTypeParams(sym))(nestedCtx)
case imp: Import =>
try {
val expr1 = typedAheadExpr(imp.expr, AnySelectionProto)
@@ -540,24 +563,69 @@ class Namer { typer: Typer =>
protected def addAnnotations(denot: SymDenotation): Unit = original match {
case original: untpd.MemberDef =>
+ var hasInlineAnnot = false
for (annotTree <- untpd.modsDeco(original).mods.annotations) {
val cls = typedAheadAnnotation(annotTree)
val ann = Annotation.deferred(cls, implicit ctx => typedAnnotation(annotTree))
denot.addAnnotation(ann)
+ if (cls == defn.InlineAnnot) {
+ hasInlineAnnot = true
+ addInlineInfo(denot, original)
+ }
+ }
+ if (!hasInlineAnnot && denot.is(InlineMethod)) {
+ // create a @inline annotation. Currently, the inlining trigger
+ // is really the annotation, not the flag. This is done so that
+ // we can still compile inline methods from Scala2x. Once we stop
+ // being compatible with Scala2 we should revise the logic to
+ // be based on the flag. Then creating a separate annotation becomes unnecessary.
+ denot.addAnnotation(Annotation(defn.InlineAnnot))
+ addInlineInfo(denot, original)
}
case _ =>
}
+ private def addInlineInfo(denot: SymDenotation, original: untpd.Tree) = original match {
+ case original: untpd.DefDef =>
+ Inliner.registerInlineInfo(
+ denot,
+ implicit ctx => typedAheadExpr(original).asInstanceOf[tpd.DefDef].rhs
+ )(localContext(denot.symbol))
+ case _ =>
+ }
+
/** Intentionally left without `implicit ctx` parameter. We need
* to pick up the context at the point where the completer was created.
*/
def completeInCreationContext(denot: SymDenotation): Unit = {
- denot.info = typeSig(denot.symbol)
addAnnotations(denot)
+ denot.info = typeSig(denot.symbol)
Checking.checkWellFormed(denot.symbol)
}
}
+ class TypeDefCompleter(original: TypeDef)(ictx: Context) extends Completer(original)(ictx) with TypeParamsCompleter {
+ private var myTypeParams: List[TypeSymbol] = null
+ private var nestedCtx: Context = null
+ assert(!original.isClassDef)
+
+ def completerTypeParams(sym: Symbol)(implicit ctx: Context): List[TypeSymbol] = {
+ if (myTypeParams == null) {
+ //println(i"completing type params of $sym in ${sym.owner}")
+ nestedCtx = localContext(sym).setNewScope
+ myTypeParams = {
+ implicit val ctx: Context = nestedCtx
+ completeParams(original.tparams)
+ original.tparams.map(symbolOfTree(_).asType)
+ }
+ }
+ myTypeParams
+ }
+
+ override protected def typeSig(sym: Symbol): Type =
+ typeDefSig(original, sym, completerTypeParams(sym)(ictx))(nestedCtx)
+ }
+
class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) {
withDecls(newScope)
@@ -581,7 +649,7 @@ class Namer { typer: Typer =>
*/
def parentType(parent: untpd.Tree)(implicit ctx: Context): Type =
if (parent.isType) {
- typedAheadType(parent).tpe
+ typedAheadType(parent, AnyTypeConstructorProto).tpe
} else {
val (core, targs) = stripApply(parent) match {
case TypeApply(core, targs) => (core, targs)
@@ -632,7 +700,7 @@ class Namer { typer: Typer =>
val pname = paramAccessor.name
def illegal(how: String): Unit = {
- ctx.error(d"Illegal override of public type parameter $pname in $parent$how", paramAccessor.pos)
+ ctx.error(em"Illegal override of public type parameter $pname in $parent$how", paramAccessor.pos)
ok = false
}
@@ -645,7 +713,7 @@ class Namer { typer: Typer =>
case TypeRef(pre, name1) if name1 == pname && (pre =:= cls.thisType) =>
// OK, parameter is passed on directly
case _ =>
- illegal(d".\nParameter is both redeclared and instantiated with $alias.")
+ illegal(em".\nParameter is both redeclared and instantiated with $alias.")
}
case _ => // OK, argument is not fully defined
}
@@ -660,6 +728,8 @@ class Namer { typer: Typer =>
ok
}
+ addAnnotations(denot)
+
val selfInfo =
if (self.isEmpty) NoType
else if (cls.is(Module)) {
@@ -672,7 +742,8 @@ class Namer { typer: Typer =>
else createSymbol(self)
// pre-set info, so that parent types can refer to type params
- denot.info = ClassInfo(cls.owner.thisType, cls, Nil, decls, selfInfo)
+ val tempInfo = new TempClassInfo(cls.owner.thisType, cls, decls, selfInfo)
+ denot.info = tempInfo
// Ensure constructor is completed so that any parameter accessors
// which have type trees deriving from its parameters can be
@@ -682,14 +753,15 @@ class Namer { typer: Typer =>
index(constr)
symbolOfTree(constr).ensureCompleted()
+ index(rest)(inClassContext(selfInfo))
+
val tparamAccessors = decls.filter(_ is TypeParamAccessor).toList
val parentTypes = ensureFirstIsClass(parents.map(checkedParentType(_, tparamAccessors)))
val parentRefs = ctx.normalizeToClassRefs(parentTypes, cls, decls)
typr.println(s"completing $denot, parents = $parents, parentTypes = $parentTypes, parentRefs = $parentRefs")
- index(rest)(inClassContext(selfInfo))
- denot.info = ClassInfo(cls.owner.thisType, cls, parentRefs, decls, selfInfo)
- addAnnotations(denot)
+ tempInfo.finalize(denot, parentRefs)
+
Checking.checkWellFormed(cls)
if (isDerivedValueClass(cls)) cls.setFlag(Final)
cls.setApplicableFlags(
@@ -761,20 +833,27 @@ class Namer { typer: Typer =>
lazy val schema = paramFn(WildcardType)
val site = sym.owner.thisType
((NoType: Type) /: sym.owner.info.baseClasses.tail) { (tp, cls) =>
- val iRawInfo =
- cls.info.nonPrivateDecl(sym.name).matchingDenotation(site, schema).info
- val iInstInfo = iRawInfo match {
- case iRawInfo: PolyType =>
- if (iRawInfo.paramNames.length == typeParams.length)
- iRawInfo.instantiate(typeParams map (_.typeRef))
+ def instantiatedResType(info: Type, tparams: List[Symbol], paramss: List[List[Symbol]]): Type = info match {
+ case info: PolyType =>
+ if (info.paramNames.length == typeParams.length)
+ instantiatedResType(info.instantiate(tparams.map(_.typeRef)), Nil, paramss)
else NoType
+ case info: MethodType =>
+ paramss match {
+ case params :: paramss1 if info.paramNames.length == params.length =>
+ instantiatedResType(info.instantiate(params.map(_.termRef)), tparams, paramss1)
+ case _ =>
+ NoType
+ }
case _ =>
- if (typeParams.isEmpty) iRawInfo
+ if (tparams.isEmpty && paramss.isEmpty) info.widenExpr
else NoType
}
- val iResType = iInstInfo.finalResultType.asSeenFrom(site, cls)
+ val iRawInfo =
+ cls.info.nonPrivateDecl(sym.name).matchingDenotation(site, schema).info
+ val iResType = instantiatedResType(iRawInfo, typeParams, paramss).asSeenFrom(site, cls)
if (iResType.exists)
- typr.println(i"using inherited type for ${mdef.name}; raw: $iRawInfo, inst: $iInstInfo, inherited: $iResType")
+ typr.println(i"using inherited type for ${mdef.name}; raw: $iRawInfo, inherited: $iResType")
tp & iResType
}
}
@@ -813,14 +892,22 @@ class Namer { typer: Typer =>
// println(s"final inherited for $sym: ${inherited.toString}") !!!
// println(s"owner = ${sym.owner}, decls = ${sym.owner.info.decls.show}")
- def isInline = sym.is(Final, butNot = Method)
+ def isInline = sym.is(FinalOrInline, butNot = Method | Mutable)
+
+ // Widen rhs type and approximate `|' but keep ConstantTypes if
+ // definition is inline (i.e. final in Scala2).
def widenRhs(tp: Type): Type = tp.widenTermRefExpr match {
case tp: ConstantType if isInline => tp
- case _ => tp.widen.approximateUnion
+ case _ => ctx.harmonizeUnion(tp.widen)
}
+
+ // Replace aliases to Unit by Unit itself. If we leave the alias in
+ // it would be erased to BoxedUnit.
+ def dealiasIfUnit(tp: Type) = if (tp.isRef(defn.UnitClass)) defn.UnitType else tp
+
val rhsCtx = ctx.addMode(Mode.InferringReturnType)
def rhsType = typedAheadExpr(mdef.rhs, inherited orElse rhsProto)(rhsCtx).tpe
- def cookedRhsType = ctx.deskolemize(widenRhs(rhsType))
+ def cookedRhsType = ctx.deskolemize(dealiasIfUnit(widenRhs(rhsType)))
lazy val lhsType = fullyDefinedType(cookedRhsType, "right-hand side", mdef.pos)
//if (sym.name.toString == "y") println(i"rhs = $rhsType, cooked = $cookedRhsType")
if (inherited.exists)
@@ -830,7 +917,7 @@ class Namer { typer: Typer =>
else {
if (sym is Implicit) {
val resStr = if (mdef.isInstanceOf[DefDef]) "result " else ""
- ctx.error(d"${resStr}type of implicit definition needs to be given explicitly", mdef.pos)
+ ctx.error(s"${resStr}type of implicit definition needs to be given explicitly", mdef.pos)
sym.resetFlag(Implicit)
}
lhsType orElse WildcardType
@@ -859,7 +946,7 @@ class Namer { typer: Typer =>
WildcardType
}
paramFn(typedAheadType(mdef.tpt, tptProto).tpe)
- }
+ }
/** The type signature of a DefDef with given symbol */
def defDefSig(ddef: DefDef, sym: Symbol)(implicit ctx: Context) = {
@@ -911,13 +998,8 @@ class Namer { typer: Typer =>
}
def typeDefSig(tdef: TypeDef, sym: Symbol, tparamSyms: List[TypeSymbol])(implicit ctx: Context): Type = {
- val isDerived = tdef.rhs.isInstanceOf[untpd.DerivedTypeTree]
- //val toParameterize = tparamSyms.nonEmpty && !isDerived
- //val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived
def abstracted(tp: Type): Type =
- if (tparamSyms.nonEmpty && !isDerived) tp.LambdaAbstract(tparamSyms)
- //else if (toParameterize) tp.parameterizeWith(tparamSyms)
- else tp
+ if (tparamSyms.nonEmpty) tp.LambdaAbstract(tparamSyms) else tp
val dummyInfo = abstracted(TypeBounds.empty)
sym.info = dummyInfo
@@ -931,7 +1013,10 @@ class Namer { typer: Typer =>
//
// The scheme critically relies on an implementation detail of isRef, which
// inspects a TypeRef's info, instead of simply dealiasing alias types.
- val rhsType = abstracted(typedAheadType(tdef.rhs).tpe)
+
+ val isDerived = tdef.rhs.isInstanceOf[untpd.DerivedTypeTree]
+ val rhsBodyType = typedAheadType(tdef.rhs).tpe
+ val rhsType = if (isDerived) rhsBodyType else abstracted(rhsBodyType)
val unsafeInfo = rhsType match {
case bounds: TypeBounds => bounds
case alias => TypeAlias(alias, if (sym is Local) sym.variance else 0)
@@ -953,28 +1038,6 @@ class Namer { typer: Typer =>
}
ensureUpToDate(sym.typeRef, dummyInfo)
ensureUpToDate(sym.typeRef.appliedTo(tparamSyms.map(_.typeRef)), TypeBounds.empty)
-
- etaExpandArgs.apply(sym.info)
- }
-
- /** Eta expand all class types C appearing as arguments to a higher-kinded
- * type parameter to type lambdas, e.g. [HK0] => C[HK0]. This is necessary
- * because in `typedAppliedTypeTree` we might have missed some eta expansions
- * of arguments in F-bounds, because the recursive type was initialized with
- * TypeBounds.empty.
- */
- def etaExpandArgs(implicit ctx: Context) = new TypeMap {
- def apply(tp: Type): Type = tp match {
- case tp: RefinedType =>
- val args = tp.argInfos.mapconserve(this)
- if (args.nonEmpty) {
- val tycon = tp.withoutArgs(args)
- val tycon1 = this(tycon)
- val tparams = tycon.typeParams
- val args1 = if (args.length == tparams.length) etaExpandIfHK(tparams, args) else args
- if ((tycon1 eq tycon) && (args1 eq args)) tp else tycon1.appliedTo(args1)
- } else mapOver(tp)
- case _ => mapOver(tp)
- }
+ sym.info
}
}
diff --git a/src/dotty/tools/dotc/typer/ProtoTypes.scala b/src/dotty/tools/dotc/typer/ProtoTypes.scala
index 740258821..08f566d49 100644
--- a/src/dotty/tools/dotc/typer/ProtoTypes.scala
+++ b/src/dotty/tools/dotc/typer/ProtoTypes.scala
@@ -15,7 +15,7 @@ import util.common._
import Decorators._
import Uniques._
import ErrorReporting.errorType
-import config.Printers._
+import config.Printers.typr
import collection.mutable
object ProtoTypes {
@@ -43,6 +43,11 @@ object ProtoTypes {
isCompatible(normalize(tp, pt)(nestedCtx), pt)(nestedCtx)
}
+ private def disregardProto(pt: Type)(implicit ctx: Context): Boolean = pt.dealias match {
+ case _: OrType => true
+ case pt => pt.isRef(defn.UnitClass)
+ }
+
/** Check that the result type of the current method
* fits the given expected result type.
*/
@@ -54,7 +59,7 @@ object ProtoTypes {
case _ =>
true
}
- case _: ValueTypeOrProto if !(pt isRef defn.UnitClass) =>
+ case _: ValueTypeOrProto if !disregardProto(pt) =>
mt match {
case mt: MethodType =>
mt.isDependent || isCompatible(normalize(mt, pt), pt)
@@ -172,24 +177,54 @@ object ProtoTypes {
/** A map in which typed arguments can be stored to be later integrated in `typedArgs`. */
private var myTypedArg: SimpleMap[untpd.Tree, Tree] = SimpleMap.Empty
+ /** A map recording the typer states in which arguments stored in myTypedArg were typed */
+ private var evalState: SimpleMap[untpd.Tree, TyperState] = SimpleMap.Empty
+
def isMatchedBy(tp: Type)(implicit ctx: Context) =
typer.isApplicable(tp, Nil, typedArgs, resultType)
- def derivedFunProto(args: List[untpd.Tree], resultType: Type, typer: Typer) =
+ def derivedFunProto(args: List[untpd.Tree] = this.args, resultType: Type, typer: Typer = this.typer) =
if ((args eq this.args) && (resultType eq this.resultType) && (typer eq this.typer)) this
else new FunProto(args, resultType, typer)
- def argsAreTyped: Boolean = myTypedArgs.size == args.length
+ override def notApplied = WildcardType
+
+ /** Forget the types of any arguments that have been typed producing a constraint in a
+ * typer state that is not yet committed into the one of the current context `ctx`.
+ * This is necessary to avoid "orphan" PolyParams that are referred to from
+ * type variables in the typed arguments, but that are not registered in the
+ * current constraint. A test case is pos/t1756.scala.
+ * @return True if all arguments have types (in particular, no types were forgotten).
+ */
+ def allArgTypesAreCurrent()(implicit ctx: Context): Boolean = {
+ evalState foreachBinding { (arg, tstate) =>
+ if (tstate.uncommittedAncestor.constraint ne ctx.typerState.constraint) {
+ typr.println(i"need to invalidate $arg / ${myTypedArg(arg)}, ${tstate.constraint}, current = ${ctx.typerState.constraint}")
+ myTypedArg = myTypedArg.remove(arg)
+ evalState = evalState.remove(arg)
+ }
+ }
+ myTypedArg.size == args.length
+ }
+
+ private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree)(implicit ctx: Context): Tree = {
+ var targ = myTypedArg(arg)
+ if (targ == null) {
+ targ = typerFn(arg)
+ if (!ctx.reporter.hasPending) {
+ myTypedArg = myTypedArg.updated(arg, targ)
+ evalState = evalState.updated(arg, ctx.typerState)
+ }
+ }
+ targ
+ }
/** The typed arguments. This takes any arguments already typed using
* `typedArg` into account.
*/
def typedArgs: List[Tree] = {
- if (!argsAreTyped)
- myTypedArgs = args mapconserve { arg =>
- val targ = myTypedArg(arg)
- if (targ != null) targ else typer.typed(arg)
- }
+ if (myTypedArgs.size != args.length)
+ myTypedArgs = args.mapconserve(cacheTypedArg(_, typer.typed(_)))
myTypedArgs
}
@@ -197,11 +232,7 @@ object ProtoTypes {
* used to avoid repeated typings of trees when backtracking.
*/
def typedArg(arg: untpd.Tree, formal: Type)(implicit ctx: Context): Tree = {
- var targ = myTypedArg(arg)
- if (targ == null) {
- targ = typer.typedUnadapted(arg, formal)
- if (!ctx.reporter.hasPending) myTypedArg = myTypedArg.updated(arg, targ)
- }
+ val targ = cacheTypedArg(arg, typer.typedUnadapted(_, formal))
typer.adapt(targ, formal, arg)
}
@@ -237,7 +268,6 @@ object ProtoTypes {
*/
class FunProtoTyped(args: List[tpd.Tree], resultType: Type, typer: Typer)(implicit ctx: Context) extends FunProto(args, resultType, typer)(ctx) {
override def typedArgs = args
- override def argsAreTyped = true
}
/** A prototype for implicitly inferred views:
@@ -274,7 +304,7 @@ object ProtoTypes {
}
class UnapplyFunProto(argType: Type, typer: Typer)(implicit ctx: Context) extends FunProto(
- untpd.TypedSplice(dummyTreeOfType(argType)) :: Nil, WildcardType, typer)
+ untpd.TypedSplice(dummyTreeOfType(argType))(ctx) :: Nil, WildcardType, typer)
/** A prototype for expressions [] that are type-parameterized:
*
@@ -286,7 +316,7 @@ object ProtoTypes {
override def isMatchedBy(tp: Type)(implicit ctx: Context) = {
def isInstantiatable(tp: Type) = tp.widen match {
- case PolyType(paramNames) => paramNames.length == targs.length
+ case tp: PolyType => tp.paramNames.length == targs.length
case _ => false
}
isInstantiatable(tp) || tp.member(nme.apply).hasAltWith(d => isInstantiatable(d.info))
@@ -296,6 +326,8 @@ object ProtoTypes {
if ((targs eq this.targs) && (resType eq this.resType)) this
else PolyProto(targs, resType)
+ override def notApplied = WildcardType
+
def map(tm: TypeMap)(implicit ctx: Context): PolyProto =
derivedPolyProto(targs mapConserve tm, tm(resultType))
@@ -311,6 +343,9 @@ object ProtoTypes {
*/
@sharable object AnyFunctionProto extends UncachedGroundType with MatchAlways
+ /** A prototype for type constructors that are followed by a type application */
+ @sharable object AnyTypeConstructorProto extends UncachedGroundType with MatchAlways
+
/** Add all parameters in given polytype `pt` to the constraint's domain.
* If the constraint contains already some of these parameters in its domain,
* make a copy of the polytype and add the copy's type parameters instead.
@@ -328,7 +363,7 @@ object ProtoTypes {
yield new TypeVar(PolyParam(pt, n), state, owningTree, ctx.owner)
val added =
- if (state.constraint contains pt) pt.duplicate(pt.paramNames, pt.paramBounds, pt.resultType)
+ if (state.constraint contains pt) pt.newLikeThis(pt.paramNames, pt.paramBounds, pt.resultType)
else pt
val tvars = if (owningTree.isEmpty) Nil else newTypeVars(added)
ctx.typeComparer.addToConstraint(added, tvars)
@@ -365,9 +400,10 @@ object ProtoTypes {
if (mt.isDependent) tp
else {
val rt = normalize(mt.resultType, pt)
- if (pt.isInstanceOf[ApplyingProto])
- mt.derivedMethodType(mt.paramNames, mt.paramTypes, rt)
- else {
+ pt match {
+ case pt: IgnoredProto => mt
+ case pt: ApplyingProto => mt.derivedMethodType(mt.paramNames, mt.paramTypes, rt)
+ case _ =>
val ft = defn.FunctionOf(mt.paramTypes, rt)
if (mt.paramTypes.nonEmpty || ft <:< pt) ft else rt
}
@@ -402,6 +438,11 @@ object ProtoTypes {
WildcardType(TypeBounds.upper(wildApprox(mt.paramTypes(pnum))))
case tp: TypeVar =>
wildApprox(tp.underlying)
+ case tp @ HKApply(tycon, args) =>
+ wildApprox(tycon) match {
+ case _: WildcardType => WildcardType // this ensures we get a * type
+ case tycon1 => tp.derivedAppliedType(tycon1, args.mapConserve(wildApprox(_)))
+ }
case tp: AndType =>
val tp1a = wildApprox(tp.tp1)
val tp2a = wildApprox(tp.tp2)
@@ -430,6 +471,8 @@ object ProtoTypes {
(if (theMap != null) theMap else new WildApproxMap).mapOver(tp)
}
+ @sharable object AssignProto extends UncachedGroundType with MatchAlways
+
private[ProtoTypes] class WildApproxMap(implicit ctx: Context) extends TypeMap {
def apply(tp: Type) = wildApprox(tp, this)
}
diff --git a/src/dotty/tools/dotc/typer/ReTyper.scala b/src/dotty/tools/dotc/typer/ReTyper.scala
index 225451886..2413c0c22 100644
--- a/src/dotty/tools/dotc/typer/ReTyper.scala
+++ b/src/dotty/tools/dotc/typer/ReTyper.scala
@@ -10,7 +10,8 @@ import typer.ProtoTypes._
import ast.{tpd, untpd}
import ast.Trees._
import scala.util.control.NonFatal
-import config.Printers
+import util.Positions.Position
+import config.Printers.typr
/** A version of Typer that keeps all symbols defined and referenced in a
* previously typed tree.
@@ -23,6 +24,7 @@ import config.Printers
class ReTyper extends Typer {
import tpd._
+ /** Checks that the given tree has been typed */
protected def promote(tree: untpd.Tree)(implicit ctx: Context): tree.ThisTree[Type] = {
assert(tree.hasType, i"$tree ${tree.getClass} ${tree.uniqueId}")
tree.withType(tree.typeOpt)
@@ -37,12 +39,6 @@ class ReTyper extends Typer {
untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt)
}
- override def typedSelectFromTypeTree(tree: untpd.SelectFromTypeTree, pt: Type)(implicit ctx: Context): Tree = {
- assert(tree.hasType)
- val qual1 = typed(tree.qualifier, AnySelectionProto)
- untpd.cpy.SelectFromTypeTree(tree)(qual1, tree.name).withType(tree.typeOpt)
- }
-
override def typedLiteral(tree: untpd.Literal)(implicit ctc: Context): Literal =
promote(tree)
@@ -61,6 +57,13 @@ class ReTyper extends Typer {
untpd.cpy.Bind(tree)(tree.name, body1).withType(tree.typeOpt)
}
+ override def typedUnApply(tree: untpd.UnApply, selType: Type)(implicit ctx: Context): UnApply = {
+ val fun1 = typedExpr(tree.fun, AnyFunctionProto)
+ val implicits1 = tree.implicits.map(typedExpr(_))
+ val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.tpe))
+ untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.tpe)
+ }
+
override def localDummy(cls: ClassSymbol, impl: untpd.Template)(implicit ctx: Context) = impl.symbol
override def retrieveSym(tree: untpd.Tree)(implicit ctx: Context): Symbol = tree.symbol
@@ -92,9 +95,14 @@ class ReTyper extends Typer {
try super.typedUnadapted(tree, pt)
catch {
case NonFatal(ex) =>
- println(i"exception while typing $tree of class ${tree.getClass} # ${tree.uniqueId}")
+ if (ctx.isAfterTyper)
+ println(i"exception while typing $tree of class ${tree.getClass} # ${tree.uniqueId}")
throw ex
}
override def checkVariance(tree: Tree)(implicit ctx: Context) = ()
+ override def inferView(from: Tree, to: Type)(implicit ctx: Context): Implicits.SearchResult =
+ Implicits.NoImplicitMatches
+ override def checkCanEqual(ltp: Type, rtp: Type, pos: Position)(implicit ctx: Context): Unit = ()
+ override def inlineExpansion(mdef: DefDef)(implicit ctx: Context): List[Tree] = mdef :: Nil
}
diff --git a/src/dotty/tools/dotc/typer/RefChecks.scala b/src/dotty/tools/dotc/typer/RefChecks.scala
index afbb43faf..4d82a2d12 100644
--- a/src/dotty/tools/dotc/typer/RefChecks.scala
+++ b/src/dotty/tools/dotc/typer/RefChecks.scala
@@ -81,14 +81,14 @@ object RefChecks {
def checkSelfConforms(other: TypeRef, category: String, relation: String) = {
val otherSelf = other.givenSelfType.asSeenFrom(cls.thisType, other.classSymbol)
if (otherSelf.exists && !(cinfo.selfType <:< otherSelf))
- ctx.error(d"$category: self type ${cinfo.selfType} of $cls does not conform to self type $otherSelf of $relation ${other.classSymbol}", cls.pos)
+ ctx.error(ex"$category: self type ${cinfo.selfType} of $cls does not conform to self type $otherSelf of $relation ${other.classSymbol}", cls.pos)
}
for (parent <- cinfo.classParents) {
val pclazz = parent.classSymbol
if (pclazz.is(Final))
- ctx.error(d"cannot extend final $pclazz", cls.pos)
+ ctx.error(em"cannot extend final $pclazz", cls.pos)
if (pclazz.is(Sealed) && pclazz.associatedFile != cls.associatedFile)
- ctx.error(d"cannot extend sealed $pclazz in different compilation unit", cls.pos)
+ ctx.error(em"cannot extend sealed $pclazz in different compilation unit", cls.pos)
checkSelfConforms(parent, "illegal inheritance", "parent")
}
for (reqd <- cinfo.givenSelfType.classSymbols)
@@ -200,7 +200,7 @@ object RefChecks {
infoStringWithLocation(other),
infoStringWithLocation(member))
else if (ctx.settings.debug.value)
- err.typeMismatchStr(memberTp, otherTp)
+ err.typeMismatchMsg(memberTp, otherTp)
else ""
"overriding %s;\n %s %s%s".format(
@@ -246,6 +246,8 @@ object RefChecks {
isDefaultGetter(member.name) || // default getters are not checked for compatibility
memberTp.overrides(otherTp)
+ def domain(sym: Symbol): Set[Name] = sym.info.namedTypeParams.map(_.name)
+
//Console.println(infoString(member) + " overrides " + infoString(other) + " in " + clazz);//DEBUG
// return if we already checked this combination elsewhere
@@ -342,6 +344,9 @@ object RefChecks {
overrideError("cannot be used here - only term macros can override term macros")
} else if (!compatibleTypes) {
overrideError("has incompatible type" + err.whyNoMatchStr(memberTp, otherTp))
+ } else if (member.isType && domain(member) != domain(other)) {
+ overrideError("has different named type parameters: "+
+ i"[${domain(member).toList}%, %] instead of [${domain(other).toList}%, %]")
} else {
checkOverrideDeprecated()
}
@@ -520,7 +525,7 @@ object RefChecks {
subclassMsg(concreteSym, abstractSym)
else ""
- undefined(s"\n(Note that $pa does not match $pc$addendum)")
+ undefined(s"\n(Note that ${pa.show} does not match ${pc.show}$addendum)")
case xs =>
undefined(s"\n(The class implements a member with a different type: ${concrete.showDcl})")
}
@@ -832,7 +837,7 @@ class RefChecks extends MiniPhase { thisTransformer =>
if (tree.symbol is Macro) EmptyTree else tree
}
- override def transformTemplate(tree: Template)(implicit ctx: Context, info: TransformerInfo) = {
+ override def transformTemplate(tree: Template)(implicit ctx: Context, info: TransformerInfo) = try {
val cls = ctx.owner
checkOverloadedRestrictions(cls)
checkParents(cls)
@@ -840,6 +845,10 @@ class RefChecks extends MiniPhase { thisTransformer =>
checkAllOverrides(cls)
checkDerivedValueClass(cls, tree.body)
tree
+ } catch {
+ case ex: MergeError =>
+ ctx.error(ex.getMessage, tree.pos)
+ tree
}
override def transformTypeTree(tree: TypeTree)(implicit ctx: Context, info: TransformerInfo) = {
diff --git a/src/dotty/tools/dotc/typer/TypeAssigner.scala b/src/dotty/tools/dotc/typer/TypeAssigner.scala
index 84951fd2b..861847b11 100644
--- a/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -6,11 +6,14 @@ import core._
import ast._
import Scopes._, Contexts._, Constants._, Types._, Symbols._, Names._, Flags._, Decorators._
import ErrorReporting._, Annotations._, Denotations._, SymDenotations._, StdNames._, TypeErasure._
+import TypeApplications.AppliedType
import util.Positions._
-import config.Printers._
+import config.Printers.typr
import ast.Trees._
import NameOps._
import collection.mutable
+import reporting.diagnostic.Message
+import reporting.diagnostic.messages._
trait TypeAssigner {
import tpd._
@@ -58,7 +61,10 @@ trait TypeAssigner {
case _ => false
}
def apply(tp: Type): Type = tp match {
- case tp: TermRef if toAvoid(tp) && variance > 0 =>
+ case tp: TermRef
+ if toAvoid(tp) && (variance > 0 || tp.info.widenExpr <:< tp) =>
+ // Can happen if `x: y.type`, then `x.type =:= y.type`, hence we can widen `x.type`
+ // to y.type in all contexts, not just covariant ones.
apply(tp.info.widenExpr)
case tp: TypeRef if toAvoid(tp) =>
tp.info match {
@@ -77,10 +83,11 @@ trait TypeAssigner {
parentType.findMember(decl.name, info.cls.thisType, Private)
.suchThat(decl.matches(_))
val inheritedInfo = inherited.info
- if (inheritedInfo.exists && decl.info <:< inheritedInfo && !(inheritedInfo <:< decl.info))
- typr.echo(
- i"add ref $parent $decl --> ",
- RefinedType(parent, decl.name, decl.info))
+ if (inheritedInfo.exists && decl.info <:< inheritedInfo && !(inheritedInfo <:< decl.info)) {
+ val r = RefinedType(parent, decl.name, decl.info)
+ typr.println(i"add ref $parent $decl --> " + r)
+ r
+ }
else
parent
}
@@ -93,27 +100,23 @@ trait TypeAssigner {
case _ =>
mapOver(tp)
}
- case tp @ RefinedType(parent, name) if variance > 0 =>
- // The naive approach here would be to first approximate the parent,
- // but if the base type of the approximated parent is different from
- // the current base type, then the current refinement won't be valid
- // if it's a type parameter refinement.
- // Therefore we first approximate the base type, then use `baseArgInfos`
- // to get correct refinements for the approximated base type, then
- // recursively approximate the resulting type.
- val base = tp.unrefine
- if (toAvoid(base)) {
- val base1 = apply(base)
- apply(base1.appliedTo(tp.baseArgInfos(base1.typeSymbol)))
+ case tp @ HKApply(tycon, args) if toAvoid(tycon) =>
+ apply(tp.superType)
+ case tp @ AppliedType(tycon, args) if toAvoid(tycon) =>
+ val base = apply(tycon)
+ var args = tp.baseArgInfos(base.typeSymbol)
+ if (base.typeParams.length != args.length)
+ args = base.typeParams.map(_.paramBounds)
+ apply(base.appliedTo(args))
+ case tp @ RefinedType(parent, name, rinfo) if variance > 0 =>
+ val parent1 = apply(tp.parent)
+ val refinedInfo1 = apply(rinfo)
+ if (toAvoid(refinedInfo1)) {
+ typr.println(s"dropping refinement from $tp")
+ if (name.isTypeName) tp.derivedRefinedType(parent1, name, TypeBounds.empty)
+ else parent1
} else {
- val parent1 = apply(tp.parent)
- val refinedInfo1 = apply(tp.refinedInfo)
- if (toAvoid(refinedInfo1)) {
- typr.println(s"dropping refinement from $tp")
- parent1
- } else {
- tp.derivedRefinedType(parent1, name, refinedInfo1)
- }
+ tp.derivedRefinedType(parent1, name, refinedInfo1)
}
case tp: TypeVar if ctx.typerState.constraint.contains(tp) =>
val lo = ctx.typerState.constraint.fullLowerBound(tp.origin)
@@ -126,6 +129,9 @@ trait TypeAssigner {
widenMap(tp)
}
+ def avoidingType(expr: Tree, bindings: List[Tree])(implicit ctx: Context): Type =
+ avoid(expr.tpe, localSyms(bindings).filter(_.isTerm))
+
def seqToRepeated(tree: Tree)(implicit ctx: Context): Tree =
Typed(tree, TypeTree(tree.tpe.widen.translateParameterized(defn.SeqClass, defn.RepeatedParamClass)))
@@ -152,7 +158,7 @@ trait TypeAssigner {
* which are accessible.
*
* Also performs the following normalizations on the type `tpe`.
- * (1) parameter accessors are alwys dereferenced.
+ * (1) parameter accessors are always dereferenced.
* (2) if the owner of the denotation is a package object, it is assured
* that the package object shows up as the prefix.
*/
@@ -168,7 +174,9 @@ trait TypeAssigner {
val d2 = pre.nonPrivateMember(name)
if (reallyExists(d2) && firstTry)
test(tpe.shadowed.withDenot(d2), false)
- else {
+ else if (pre.derivesFrom(defn.DynamicClass)) {
+ TryDynamicCallType
+ } else {
val alts = tpe.denot.alternatives.map(_.symbol).filter(_.exists)
val what = alts match {
case Nil =>
@@ -176,13 +184,13 @@ trait TypeAssigner {
case sym :: Nil =>
if (sym.owner == pre.typeSymbol) sym.show else sym.showLocated
case _ =>
- d"none of the overloaded alternatives named $name"
+ em"none of the overloaded alternatives named $name"
}
val where = if (ctx.owner.exists) s" from ${ctx.owner.enclosingClass}" else ""
val whyNot = new StringBuffer
alts foreach (_.isAccessibleFrom(pre, superAccess, whyNot))
if (!tpe.isError)
- ctx.error(d"$what cannot be accessed as a member of $pre$where.$whyNot", pos)
+ ctx.error(ex"$what cannot be accessed as a member of $pre$where.$whyNot", pos)
ErrorType
}
}
@@ -204,11 +212,18 @@ trait TypeAssigner {
def selectionType(site: Type, name: Name, pos: Position)(implicit ctx: Context): Type = {
val mbr = site.member(name)
if (reallyExists(mbr)) site.select(name, mbr)
- else {
+ else if (site.derivesFrom(defn.DynamicClass) && !Dynamic.isDynamicMethod(name)) {
+ TryDynamicCallType
+ } else {
if (!site.isErroneous) {
+ def kind = if (name.isTypeName) "type" else "value"
+ def addendum =
+ if (site.derivesFrom(defn.DynamicClass)) "\npossible cause: maybe a wrong Dynamic method signature?"
+ else ""
ctx.error(
- if (name == nme.CONSTRUCTOR) d"$site does not have a constructor"
- else d"$name is not a member of $site", pos)
+ if (name == nme.CONSTRUCTOR) ex"$site does not have a constructor"
+ else NotAMember(site, name, kind),
+ pos)
}
ErrorType
}
@@ -253,9 +268,6 @@ trait TypeAssigner {
tree.withType(tp)
}
- def assignType(tree: untpd.SelectFromTypeTree, qual: Tree)(implicit ctx: Context) =
- tree.withType(accessibleSelectionType(tree, qual))
-
def assignType(tree: untpd.New, tpt: Tree)(implicit ctx: Context) =
tree.withType(tpt.tpe)
@@ -283,7 +295,7 @@ trait TypeAssigner {
case p :: Nil =>
p
case Nil =>
- errorType(d"$mix does not name a parent class of $cls", tree.pos)
+ errorType(em"$mix does not name a parent class of $cls", tree.pos)
case p :: q :: _ =>
errorType("ambiguous parent class qualifier", tree.pos)
}
@@ -302,7 +314,7 @@ trait TypeAssigner {
val ownType = fn.tpe.widen match {
case fntpe @ MethodType(_, ptypes) =>
if (sameLength(ptypes, args) || ctx.phase.prev.relaxedTyping) fntpe.instantiate(args.tpes)
- else errorType(i"wrong number of parameters for ${fn.tpe}; expected: ${ptypes.length}", tree.pos)
+ else wrongNumberOfArgs(fn.tpe, "", ptypes.length, tree.pos)
case t =>
errorType(i"${err.exprStr(fn)} does not take parameters", tree.pos)
}
@@ -314,21 +326,30 @@ trait TypeAssigner {
case pt: PolyType =>
val paramNames = pt.paramNames
if (hasNamedArg(args)) {
- val argMap = new mutable.HashMap[Name, Type]
+ // Type arguments which are specified by name (immutable after this first loop)
+ val namedArgMap = new mutable.HashMap[Name, Type]
for (NamedArg(name, arg) <- args)
- if (argMap.contains(name))
+ if (namedArgMap.contains(name))
ctx.error("duplicate name", arg.pos)
else if (!paramNames.contains(name))
ctx.error(s"undefined parameter name, required: ${paramNames.mkString(" or ")}", arg.pos)
else
- argMap(name) = arg.tpe
+ namedArgMap(name) = arg.tpe
+
+ // Holds indexes of non-named typed arguments in paramNames
val gapBuf = new mutable.ListBuffer[Int]
- def nextPoly = {
- val idx = gapBuf.length
+ def nextPoly(idx: Int) = {
+ val newIndex = gapBuf.length
gapBuf += idx
- PolyParam(pt, idx)
+ // Re-index unassigned type arguments that remain after transformation
+ PolyParam(pt, newIndex)
}
- val normArgs = paramNames.map(pname => argMap.getOrElse(pname, nextPoly))
+
+ // Type parameters after naming assignment, conserving paramNames order
+ val normArgs: List[Type] = paramNames.zipWithIndex.map { case (pname, idx) =>
+ namedArgMap.getOrElse(pname, nextPoly(idx))
+ }
+
val transform = new TypeMap {
def apply(t: Type) = t match {
case PolyParam(`pt`, idx) => normArgs(idx)
@@ -340,25 +361,23 @@ trait TypeAssigner {
else {
val gaps = gapBuf.toList
pt.derivedPolyType(
- gaps.map(paramNames.filterNot(argMap.contains)),
+ gaps.map(paramNames),
gaps.map(idx => transform(pt.paramBounds(idx)).bounds),
resultType1)
}
}
else {
val argTypes = args.tpes
- if (sameLength(argTypes, paramNames)|| ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes)
- else errorType(d"wrong number of type parameters for ${fn.tpe}; expected: ${pt.paramNames.length}", tree.pos)
+ if (sameLength(argTypes, paramNames) || ctx.phase.prev.relaxedTyping) pt.instantiate(argTypes)
+ else wrongNumberOfArgs(fn.tpe, "type ", pt.paramNames.length, tree.pos)
}
case _ =>
errorType(i"${err.exprStr(fn)} does not take type parameters", tree.pos)
}
+
tree.withType(ownType)
}
- def assignType(tree: untpd.Pair, left: Tree, right: Tree)(implicit ctx: Context) =
- tree.withType(defn.PairType.appliedTo(left.tpe :: right.tpe :: Nil))
-
def assignType(tree: untpd.Typed, tpt: Tree)(implicit ctx: Context) =
tree.withType(tpt.tpe)
@@ -369,15 +388,18 @@ trait TypeAssigner {
tree.withType(defn.UnitType)
def assignType(tree: untpd.Block, stats: List[Tree], expr: Tree)(implicit ctx: Context) =
- tree.withType(avoid(expr.tpe, localSyms(stats) filter (_.isTerm)))
+ tree.withType(avoidingType(expr, stats))
+
+ def assignType(tree: untpd.Inlined, bindings: List[Tree], expansion: Tree)(implicit ctx: Context) =
+ tree.withType(avoidingType(expansion, bindings))
def assignType(tree: untpd.If, thenp: Tree, elsep: Tree)(implicit ctx: Context) =
tree.withType(thenp.tpe | elsep.tpe)
def assignType(tree: untpd.Closure, meth: Tree, target: Tree)(implicit ctx: Context) =
tree.withType(
- if (target.isEmpty) meth.tpe.widen.toFunctionType(tree.env.length)
- else target.tpe)
+ if (target.isEmpty) meth.tpe.widen.toFunctionType(tree.env.length)
+ else target.tpe)
def assignType(tree: untpd.CaseDef, body: Tree)(implicit ctx: Context) =
tree.withType(body.tpe)
@@ -394,7 +416,7 @@ trait TypeAssigner {
def assignType(tree: untpd.SeqLiteral, elems: List[Tree], elemtpt: Tree)(implicit ctx: Context) = {
val ownType = tree match {
- case tree: JavaSeqLiteral => defn.ArrayOf(elemtpt.tpe)
+ case tree: untpd.JavaSeqLiteral => defn.ArrayOf(elemtpt.tpe)
case _ => if (ctx.erasedTypes) defn.SeqType else defn.SeqType.appliedTo(elemtpt.tpe)
}
tree.withType(ownType)
@@ -413,29 +435,29 @@ trait TypeAssigner {
def assignType(tree: untpd.AppliedTypeTree, tycon: Tree, args: List[Tree])(implicit ctx: Context) = {
val tparams = tycon.tpe.typeParams
+ lazy val ntparams = tycon.tpe.namedTypeParams
def refineNamed(tycon: Type, arg: Tree) = arg match {
case ast.Trees.NamedArg(name, argtpt) =>
// Dotty deviation: importing ast.Trees._ and matching on NamedArg gives a cyclic ref error
- val tparam = tparams.find(_.name == name) match {
+ val tparam = tparams.find(_.paramName == name) match {
case Some(tparam) => tparam
- case none =>
- val sym = tycon.member(name).symbol
- if (sym.isAbstractType) sym
- else if (sym.is(ParamAccessor)) sym.info.dealias.typeSymbol
- else NoSymbol
+ case none => ntparams.find(_.name == name).getOrElse(NoSymbol)
}
- if (tparam.exists) RefinedType(tycon, name, argtpt.tpe.toBounds(tparam))
- else errorType(s"$tycon does not have a parameter or abstract type member named $name", arg.pos)
+ if (tparam.isTypeParam) RefinedType(tycon, name, argtpt.tpe.toBounds(tparam))
+ else errorType(i"$tycon does not have a parameter or abstract type member named $name", arg.pos)
case _ =>
errorType(s"named and positional type arguments may not be mixed", arg.pos)
}
val ownType =
if (hasNamedArg(args)) (tycon.tpe /: args)(refineNamed)
else if (sameLength(tparams, args)) tycon.tpe.appliedTo(args.tpes)
- else errorType(d"wrong number of type arguments for ${tycon.tpe}, should be ${tparams.length}", tree.pos)
+ else wrongNumberOfArgs(tycon.tpe, "type ", tparams.length, tree.pos)
tree.withType(ownType)
}
+ def assignType(tree: untpd.PolyTypeTree, tparamDefs: List[TypeDef], body: Tree)(implicit ctx: Context) =
+ tree.withType(PolyType.fromSymbols(tparamDefs.map(_.symbol), body.tpe))
+
def assignType(tree: untpd.ByNameTypeTree, result: Tree)(implicit ctx: Context) =
tree.withType(ExprType(result.tpe))
@@ -480,8 +502,8 @@ trait TypeAssigner {
def assignType(tree: untpd.Import, sym: Symbol)(implicit ctx: Context) =
tree.withType(sym.nonMemberTermRef)
- def assignType(tree: untpd.Annotated, annot: Tree, arg: Tree)(implicit ctx: Context) =
- tree.withType(AnnotatedType(arg.tpe, Annotation(annot)))
+ def assignType(tree: untpd.Annotated, arg: Tree, annot: Tree)(implicit ctx: Context) =
+ tree.withType(AnnotatedType(arg.tpe.widen, Annotation(annot)))
def assignType(tree: untpd.PackageDef, pid: Tree)(implicit ctx: Context) =
tree.withType(pid.symbol.valRef)
diff --git a/src/dotty/tools/dotc/typer/Typer.scala b/src/dotty/tools/dotc/typer/Typer.scala
index af041e785..6fb0dd7c7 100644
--- a/src/dotty/tools/dotc/typer/Typer.scala
+++ b/src/dotty/tools/dotc/typer/Typer.scala
@@ -11,13 +11,13 @@ import Scopes._
import Denotations._
import ProtoTypes._
import Contexts._
+import Comments._
import Symbols._
import Types._
import SymDenotations._
import Annotations._
import Names._
import NameOps._
-import Applications._
import Flags._
import Decorators._
import ErrorReporting._
@@ -32,11 +32,12 @@ import collection.mutable
import annotation.tailrec
import Implicits._
import util.Stats.{track, record}
-import config.Printers._
+import config.Printers.{typr, gadts}
import rewrite.Rewrites.patch
import NavigateAST._
import transform.SymUtils._
import language.implicitConversions
+import printing.SyntaxHighlighting._
object Typer {
@@ -58,11 +59,14 @@ object Typer {
assert(tree.pos.exists, s"position not set for $tree # ${tree.uniqueId}")
}
-class Typer extends Namer with TypeAssigner with Applications with Implicits with Checking {
+class Typer extends Namer with TypeAssigner with Applications with Implicits with Dynamic with Checking with Docstrings {
import Typer._
import tpd.{cpy => _, _}
import untpd.cpy
+ import Dynamic.isDynamicMethod
+ import reporting.diagnostic.Message
+ import reporting.diagnostic.messages._
/** A temporary data item valid for a single typed ident:
* The set of all root import symbols that have been
@@ -72,6 +76,13 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
*/
private var importedFromRoot: Set[Symbol] = Set()
+ /** Temporary data item for single call to typed ident:
+ * This symbol would be found under Scala2 mode, but is not
+ * in dotty (because dotty conforms to spec section 2
+ * wrt to package member resolution but scalac doe not).
+ */
+ private var foundUnderScala2: Type = NoType
+
def newLikeThis: Typer = new Typer
/** Attribute an identifier consisting of a simple name or wildcard
@@ -88,7 +99,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
/** Method is necessary because error messages need to bind to
* to typedIdent's context which is lost in nested calls to findRef
*/
- def error(msg: => String, pos: Position) = ctx.error(msg, pos)
+ def error(msg: => Message, pos: Position) = ctx.error(msg, pos)
/** Is this import a root import that has been shadowed by an explicit
* import in the same program?
@@ -134,13 +145,21 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
* or defined in <symbol>
*/
def bindingString(prec: Int, whereFound: Context, qualifier: String = "") =
- if (prec == wildImport || prec == namedImport) d"imported$qualifier by ${whereFound.importInfo}"
- else d"defined$qualifier in ${whereFound.owner}"
+ if (prec == wildImport || prec == namedImport) {
+ ex"""imported$qualifier by ${hl"${whereFound.importInfo.toString}"}"""
+ } else
+ ex"""defined$qualifier in ${hl"${whereFound.owner.toString}"}"""
/** Check that any previously found result from an inner context
* does properly shadow the new one from an outer context.
+ * @param found The newly found result
+ * @param newPrec Its precedence
+ * @param scala2pkg Special mode where we check members of the same package, but defined
+ * in different compilation units under Scala2. If set, and the
+ * previous and new contexts do not have the same scope, we select
+ * the previous (inner) definition. This models what scalac does.
*/
- def checkNewOrShadowed(found: Type, newPrec: Int): Type =
+ def checkNewOrShadowed(found: Type, newPrec: Int, scala2pkg: Boolean = false)(implicit ctx: Context): Type =
if (!previous.exists || ctx.typeComparer.isSameRef(previous, found)) found
else if ((prevCtx.scope eq ctx.scope) &&
(newPrec == definition ||
@@ -150,11 +169,11 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
found
}
else {
- if (!previous.isError && !found.isError) {
+ if (!scala2pkg && !previous.isError && !found.isError) {
error(
- d"""reference to $name is ambiguous;
- |it is both ${bindingString(newPrec, ctx, "")}
- |and ${bindingString(prevPrec, prevCtx, " subsequently")}""".stripMargin,
+ ex"""|reference to `$name` is ambiguous
+ |it is both ${bindingString(newPrec, ctx, "")}
+ |and ${bindingString(prevPrec, prevCtx, " subsequently")}""",
tree.pos)
}
previous
@@ -163,25 +182,29 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
/** The type representing a named import with enclosing name when imported
* from given `site` and `selectors`.
*/
- def namedImportRef(site: Type, selectors: List[untpd.Tree]): Type = {
+ def namedImportRef(site: Type, selectors: List[untpd.Tree])(implicit ctx: Context): Type = {
def checkUnambiguous(found: Type) = {
val other = namedImportRef(site, selectors.tail)
if (other.exists && found.exists && (found != other))
- error(d"reference to $name is ambiguous; it is imported twice in ${ctx.tree}",
+ error(em"reference to `$name` is ambiguous; it is imported twice in ${ctx.tree}",
tree.pos)
found
}
val Name = name.toTermName.decode
selectors match {
- case Pair(Ident(from), Ident(Name)) :: rest =>
- val selName = if (name.isTypeName) from.toTypeName else from
- // Pass refctx so that any errors are reported in the context of the
- // reference instead of the context of the import.
- checkUnambiguous(selectionType(site, selName, tree.pos)(refctx))
- case Ident(Name) :: rest =>
- checkUnambiguous(selectionType(site, name, tree.pos)(refctx))
- case _ :: rest =>
- namedImportRef(site, rest)
+ case selector :: rest =>
+ selector match {
+ case Thicket(fromId :: Ident(Name) :: _) =>
+ val Ident(from) = fromId
+ val selName = if (name.isTypeName) from.toTypeName else from
+ // Pass refctx so that any errors are reported in the context of the
+ // reference instead of the context of the import.
+ checkUnambiguous(selectionType(site, selName, tree.pos)(refctx))
+ case Ident(Name) =>
+ checkUnambiguous(selectionType(site, name, tree.pos)(refctx))
+ case _ =>
+ namedImportRef(site, rest)
+ }
case nil =>
NoType
}
@@ -190,7 +213,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
/** The type representing a wildcard import with enclosing name when imported
* from given import info
*/
- def wildImportRef(imp: ImportInfo): Type = {
+ def wildImportRef(imp: ImportInfo)(implicit ctx: Context): Type = {
if (imp.isWildcardImport) {
val pre = imp.site
if (!isDisabled(imp, pre) && !(imp.excluded contains name.toTermName) && name != nme.CONSTRUCTOR) {
@@ -204,54 +227,71 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
/** Is (some alternative of) the given predenotation `denot`
* defined in current compilation unit?
*/
- def isDefinedInCurrentUnit(denot: Denotation): Boolean = denot match {
+ def isDefinedInCurrentUnit(denot: Denotation)(implicit ctx: Context): Boolean = denot match {
case MultiDenotation(d1, d2) => isDefinedInCurrentUnit(d1) || isDefinedInCurrentUnit(d2)
case denot: SingleDenotation => denot.symbol.sourceFile == ctx.source.file
}
/** Is `denot` the denotation of a self symbol? */
- def isSelfDenot(denot: Denotation) = denot match {
+ def isSelfDenot(denot: Denotation)(implicit ctx: Context) = denot match {
case denot: SymDenotation => denot is SelfName
case _ => false
}
- // begin findRef
- if (ctx.scope == null) previous
- else {
- val outer = ctx.outer
- if ((ctx.scope ne outer.scope) || (ctx.owner ne outer.owner)) {
- val defDenot = ctx.denotNamed(name)
- if (qualifies(defDenot)) {
- val curOwner = ctx.owner
- val found =
- if (isSelfDenot(defDenot)) curOwner.enclosingClass.thisType
- else curOwner.thisType.select(name, defDenot)
- if (!(curOwner is Package) || isDefinedInCurrentUnit(defDenot))
- return checkNewOrShadowed(found, definition) // no need to go further out, we found highest prec entry
- else if (defDenot.symbol is Package)
- return checkNewOrShadowed(previous orElse found, packageClause)
- else if (prevPrec < packageClause)
- return findRef(found, packageClause, ctx)(outer)
+ /** Would import of kind `prec` be not shadowed by a nested higher-precedence definition? */
+ def isPossibleImport(prec: Int)(implicit ctx: Context) =
+ prevPrec < prec || prevPrec == prec && (prevCtx.scope eq ctx.scope)
+
+ @tailrec def loop(implicit ctx: Context): Type = {
+ if (ctx.scope == null) previous
+ else {
+ val outer = ctx.outer
+ var result: Type = NoType
+
+ // find definition
+ if ((ctx.scope ne outer.scope) || (ctx.owner ne outer.owner)) {
+ val defDenot = ctx.denotNamed(name)
+ if (qualifies(defDenot)) {
+ val curOwner = ctx.owner
+ val found =
+ if (isSelfDenot(defDenot)) curOwner.enclosingClass.thisType
+ else curOwner.thisType.select(name, defDenot)
+ if (!(curOwner is Package) || isDefinedInCurrentUnit(defDenot))
+ result = checkNewOrShadowed(found, definition) // no need to go further out, we found highest prec entry
+ else {
+ if (ctx.scala2Mode && !foundUnderScala2.exists)
+ foundUnderScala2 = checkNewOrShadowed(found, definition, scala2pkg = true)
+ if (defDenot.symbol is Package)
+ result = checkNewOrShadowed(previous orElse found, packageClause)
+ else if (prevPrec < packageClause)
+ result = findRef(found, packageClause, ctx)(outer)
+ }
+ }
}
- }
- val curImport = ctx.importInfo
- if (ctx.owner.is(Package) && curImport != null && curImport.isRootImport && previous.exists)
- return previous // no more conflicts possible in this case
- // would import of kind `prec` be not shadowed by a nested higher-precedence definition?
- def isPossibleImport(prec: Int) =
- prevPrec < prec || prevPrec == prec && (prevCtx.scope eq ctx.scope)
- if (isPossibleImport(namedImport) && (curImport ne outer.importInfo) && !curImport.sym.isCompleting) {
- val namedImp = namedImportRef(curImport.site, curImport.selectors)
- if (namedImp.exists)
- return findRef(checkNewOrShadowed(namedImp, namedImport), namedImport, ctx)(outer)
- if (isPossibleImport(wildImport)) {
- val wildImp = wildImportRef(curImport)
- if (wildImp.exists)
- return findRef(checkNewOrShadowed(wildImp, wildImport), wildImport, ctx)(outer)
+
+ if (result.exists) result
+ else { // find import
+ val curImport = ctx.importInfo
+ if (ctx.owner.is(Package) && curImport != null && curImport.isRootImport && previous.exists)
+ previous // no more conflicts possible in this case
+ else if (isPossibleImport(namedImport) && (curImport ne outer.importInfo) && !curImport.sym.isCompleting) {
+ val namedImp = namedImportRef(curImport.site, curImport.selectors)
+ if (namedImp.exists)
+ findRef(checkNewOrShadowed(namedImp, namedImport), namedImport, ctx)(outer)
+ else if (isPossibleImport(wildImport)) {
+ val wildImp = wildImportRef(curImport)
+ if (wildImp.exists)
+ findRef(checkNewOrShadowed(wildImp, wildImport), wildImport, ctx)(outer)
+ else loop(outer)
+ }
+ else loop(outer)
+ }
+ else loop(outer)
}
}
- findRef(previous, prevPrec, prevCtx)(outer)
}
+
+ loop
}
// begin typedIdent
@@ -264,18 +304,34 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
return typed(desugar.patternVar(tree), pt)
}
- val saved = importedFromRoot
- importedFromRoot = Set.empty
- val rawType =
- try findRef(NoType, BindingPrec.nothingBound, NoContext)
- finally importedFromRoot = saved
+ val rawType = {
+ val saved1 = importedFromRoot
+ val saved2 = foundUnderScala2
+ importedFromRoot = Set.empty
+ foundUnderScala2 = NoType
+ try {
+ var found = findRef(NoType, BindingPrec.nothingBound, NoContext)
+ if (foundUnderScala2.exists && !(foundUnderScala2 =:= found)) {
+ ctx.migrationWarning(
+ ex"""Name resolution will change.
+ | currently selected : $foundUnderScala2
+ | in the future, without -language:Scala2: $found""", tree.pos)
+ found = foundUnderScala2
+ }
+ found
+ }
+ finally {
+ importedFromRoot = saved1
+ foundUnderScala2 = saved2
+ }
+ }
val ownType =
if (rawType.exists)
ensureAccessible(rawType, superAccess = false, tree.pos)
else {
- error(d"not found: $kind$name", tree.pos)
+ error(new MissingIdent(tree, kind, name.show), tree.pos)
ErrorType
}
@@ -304,52 +360,51 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
tree match {
case tree @ Select(qual, _) if !qual.tpe.isStable =>
val alt = typedSelect(tree, pt, Typed(qual, TypeTree(SkolemType(qual.tpe.widen))))
- typr.println(d"healed type: ${tree.tpe} --> $alt")
+ typr.println(i"healed type: ${tree.tpe} --> $alt")
alt.asInstanceOf[T]
case _ =>
- ctx.error(d"unsafe instantiation of type ${tree.tpe}", tree.pos)
+ ctx.error(ex"unsafe instantiation of type ${tree.tpe}", tree.pos)
tree
}
else tree
def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = track("typedSelect") {
- def asSelect(implicit ctx: Context): Tree = {
+ def typeSelectOnTerm(implicit ctx: Context): Tree = {
val qual1 = typedExpr(tree.qualifier, selectionProto(tree.name, pt, this))
if (tree.name.isTypeName) checkStable(qual1.tpe, qual1.pos)
- typedSelect(tree, pt, qual1)
- }
-
- def asJavaSelectFromTypeTree(implicit ctx: Context): Tree = {
- // Translate names in Select/Ident nodes to type names.
- def convertToTypeName(tree: untpd.Tree): Option[untpd.Tree] = tree match {
- case Select(qual, name) => Some(untpd.Select(qual, name.toTypeName))
- case Ident(name) => Some(untpd.Ident(name.toTypeName))
- case _ => None
- }
+ val select = typedSelect(tree, pt, qual1)
+ if (select.tpe ne TryDynamicCallType) select
+ else if (pt.isInstanceOf[PolyProto] || pt.isInstanceOf[FunProto] || pt == AssignProto) select
+ else typedDynamicSelect(tree, Nil, pt)
+ }
- // Try to convert Select(qual, name) to a SelectFromTypeTree.
- def convertToSelectFromType(qual: untpd.Tree, origName: Name): Option[untpd.SelectFromTypeTree] =
- convertToTypeName(qual) match {
- case Some(qual1) => Some(untpd.SelectFromTypeTree(qual1 withPos qual.pos, origName.toTypeName))
- case _ => None
- }
+ def typeSelectOnType(qual: untpd.Tree)(implicit ctx: Context) =
+ typedSelect(untpd.cpy.Select(tree)(qual, tree.name.toTypeName), pt)
- convertToSelectFromType(tree.qualifier, tree.name) match {
- case Some(sftt) => typedSelectFromTypeTree(sftt, pt)
- case _ => ctx.error(d"Could not convert $tree to a SelectFromTypeTree"); EmptyTree
- }
+ def tryJavaSelectOnType(implicit ctx: Context): Tree = tree.qualifier match {
+ case Select(qual, name) => typeSelectOnType(untpd.Select(qual, name.toTypeName))
+ case Ident(name) => typeSelectOnType(untpd.Ident(name.toTypeName))
+ case _ => errorTree(tree, "cannot convert to type selection") // will never be printed due to fallback
}
- if (ctx.compilationUnit.isJava && tree.name.isTypeName) {
+ def selectWithFallback(fallBack: Context => Tree) =
+ tryAlternatively(typeSelectOnTerm(_))(fallBack)
+
+ if (tree.qualifier.isType) {
+ val qual1 = typedType(tree.qualifier, selectionProto(tree.name, pt, this))
+ assignType(cpy.Select(tree)(qual1, tree.name), qual1)
+ }
+ else if (ctx.compilationUnit.isJava && tree.name.isTypeName)
// SI-3120 Java uses the same syntax, A.B, to express selection from the
// value A and from the type A. We have to try both.
- tryEither(tryCtx => asSelect(tryCtx))((_, _) => asJavaSelectFromTypeTree(ctx))
- } else asSelect(ctx)
- }
-
- def typedSelectFromTypeTree(tree: untpd.SelectFromTypeTree, pt: Type)(implicit ctx: Context): Tree = track("typedSelectFromTypeTree") {
- val qual1 = typedType(tree.qualifier, selectionProto(tree.name, pt, this))
- assignType(cpy.SelectFromTypeTree(tree)(qual1, tree.name), qual1)
+ selectWithFallback(tryJavaSelectOnType(_)) // !!! possibly exponential bcs of qualifier retyping
+ else if (tree.name == nme.withFilter && tree.getAttachment(desugar.MaybeFilter).isDefined)
+ selectWithFallback {
+ implicit ctx =>
+ typedSelect(untpd.cpy.Select(tree)(tree.qualifier, nme.filter), pt) // !!! possibly exponential bcs of qualifier retyping
+ }
+ else
+ typeSelectOnTerm(ctx)
}
def typedThis(tree: untpd.This)(implicit ctx: Context): Tree = track("typedThis") {
@@ -383,6 +438,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
typed(cpy.Block(tree)(clsDef :: Nil, New(Ident(x), Nil)), pt)
case _ =>
var tpt1 = typedType(tree.tpt)
+ tpt1 = tpt1.withType(ensureAccessible(tpt1.tpe, superAccess = false, tpt1.pos))
tpt1.tpe.dealias match {
case TypeApplications.EtaExpansion(tycon) => tpt1 = tpt1.withType(tycon)
case _ =>
@@ -393,16 +449,6 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
- def typedPair(tree: untpd.Pair, pt: Type)(implicit ctx: Context) = track("typedPair") {
- val (leftProto, rightProto) = pt.argTypesLo match {
- case l :: r :: Nil if pt isRef defn.PairClass => (l, r)
- case _ => (WildcardType, WildcardType)
- }
- val left1 = typed(tree.left, leftProto)
- val right1 = typed(tree.right, rightProto)
- assignType(cpy.Pair(tree)(left1, right1), left1, right1)
- }
-
def typedTyped(tree: untpd.Typed, pt: Type)(implicit ctx: Context): Tree = track("typedTyped") {
/* Handles three cases:
* @param ifPat how to handle a pattern (_: T)
@@ -420,14 +466,19 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case _ => ifExpr
}
def ascription(tpt: Tree, isWildcard: Boolean) = {
+ val underlyingTreeTpe =
+ if (isRepeatedParamType(tpt)) TypeTree(defn.SeqType.appliedTo(pt :: Nil))
+ else tpt
+
val expr1 =
- if (isWildcard) tree.expr.withType(tpt.tpe)
+ if (isRepeatedParamType(tpt)) tree.expr.withType(defn.SeqType.appliedTo(pt :: Nil))
+ else if (isWildcard) tree.expr.withType(tpt.tpe)
else typed(tree.expr, tpt.tpe.widenSkolem)
- assignType(cpy.Typed(tree)(expr1, tpt), tpt)
+ assignType(cpy.Typed(tree)(expr1, tpt), underlyingTreeTpe)
}
if (untpd.isWildcardStarArg(tree))
cases(
- ifPat = ascription(TypeTree(defn.SeqType.appliedTo(pt :: Nil)), isWildcard = true),
+ ifPat = ascription(TypeTree(defn.RepeatedParamType.appliedTo(pt)), isWildcard = true),
ifExpr = seqToRepeated(typedExpr(tree.expr, defn.SeqType)),
wildName = nme.WILDCARD_STAR)
else {
@@ -444,6 +495,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case _ =>
}
case _ =>
+ if (!ctx.isAfterTyper) tpt1.tpe.<:<(pt)(ctx.addMode(Mode.GADTflexible))
}
ascription(tpt1, isWildcard = true)
}
@@ -467,16 +519,16 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val rawUpdate: untpd.Tree = untpd.Select(untpd.TypedSplice(fn), nme.update)
val wrappedUpdate =
if (targs.isEmpty) rawUpdate
- else untpd.TypeApply(rawUpdate, targs map untpd.TypedSplice)
- val appliedUpdate = cpy.Apply(fn)(wrappedUpdate, (args map untpd.TypedSplice) :+ tree.rhs)
+ else untpd.TypeApply(rawUpdate, targs map (untpd.TypedSplice(_)))
+ val appliedUpdate = cpy.Apply(fn)(wrappedUpdate, (args map (untpd.TypedSplice(_))) :+ tree.rhs)
typed(appliedUpdate, pt)
case lhs =>
- val lhsCore = typedUnadapted(lhs)
+ val lhsCore = typedUnadapted(lhs, AssignProto)
def lhs1 = typed(untpd.TypedSplice(lhsCore))
def canAssign(sym: Symbol) = // allow assignments from the primary constructor to class fields
sym.is(Mutable, butNot = Accessor) ||
ctx.owner.isPrimaryConstructor && !sym.is(Method) && sym.owner == ctx.owner.owner ||
- ctx.owner.name.isTraitSetterName
+ ctx.owner.name.isTraitSetterName || ctx.owner.isStaticConstructor
lhsCore.tpe match {
case ref: TermRef if canAssign(ref.symbol) =>
assignType(cpy.Assign(tree)(lhs1, typed(tree.rhs, ref.info)))
@@ -499,6 +551,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case _ =>
reassignmentToVal
}
+ case TryDynamicCallType =>
+ typedDynamicAssign(tree, pt)
case tpe =>
reassignmentToVal
}
@@ -506,10 +560,18 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
+ def typedBlockStats(stats: List[untpd.Tree])(implicit ctx: Context): (Context, List[tpd.Tree]) =
+ (index(stats), typedStats(stats, ctx.owner))
+
def typedBlock(tree: untpd.Block, pt: Type)(implicit ctx: Context) = track("typedBlock") {
- val exprCtx = index(tree.stats)
- val stats1 = typedStats(tree.stats, ctx.owner)
- val expr1 = typedExpr(tree.expr, pt)(exprCtx)
+ val (exprCtx, stats1) = typedBlockStats(tree.stats)
+ val ept =
+ if (tree.isInstanceOf[untpd.InfixOpBlock])
+ // Right-binding infix operations are expanded to InfixBlocks, which may be followed by arguments.
+ // Example: `(a /: bs)(op)` expands to `{ val x = a; bs./:(x) } (op)` where `{...}` is an InfixBlock.
+ pt
+ else pt.notApplied
+ val expr1 = typedExpr(tree.expr, ept)(exprCtx)
ensureNoLocalRefs(
assignType(cpy.Block(tree)(stats1, expr1), stats1, expr1), pt, localSyms(stats1))
}
@@ -543,20 +605,23 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
ensureNoLocalRefs(tree1, pt, localSyms, forcedDefined = true)
} else
errorTree(tree,
- d"local definition of ${leaks.head.name} escapes as part of expression's type ${tree.tpe}"/*; full type: ${result.tpe.toString}"*/)
+ em"local definition of ${leaks.head.name} escapes as part of expression's type ${tree.tpe}"/*; full type: ${result.tpe.toString}"*/)
}
- def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context) = track("typedIf") {
+ def typedIf(tree: untpd.If, pt: Type)(implicit ctx: Context): Tree = track("typedIf") {
val cond1 = typed(tree.cond, defn.BooleanType)
- val thenp1 = typed(tree.thenp, pt)
- val elsep1 = typed(tree.elsep orElse untpd.unitLiteral withPos tree.pos, pt)
+ val thenp1 = typed(tree.thenp, pt.notApplied)
+ val elsep1 = typed(tree.elsep orElse (untpd.unitLiteral withPos tree.pos), pt.notApplied)
val thenp2 :: elsep2 :: Nil = harmonize(thenp1 :: elsep1 :: Nil)
assignType(cpy.If(tree)(cond1, thenp2, elsep2), thenp2, elsep2)
}
private def decomposeProtoFunction(pt: Type, defaultArity: Int)(implicit ctx: Context): (List[Type], Type) = pt match {
case _ if defn.isFunctionType(pt) =>
- (pt.dealias.argInfos.init, pt.dealias.argInfos.last)
+ // if expected parameter type(s) are wildcards, approximate from below.
+ // if expected result type is a wildcard, approximate from above.
+ // this can type the greatest set of admissible closures.
+ (pt.dealias.argTypesLo.init, pt.dealias.argTypesHi.last)
case SAMType(meth) =>
val mt @ MethodType(_, paramTypes) = meth.info
(paramTypes, mt.resultType)
@@ -571,6 +636,16 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
untpd.TypeTree(defn.FunctionClass(args.length).typeRef), args :+ body), pt)
else {
val params = args.asInstanceOf[List[untpd.ValDef]]
+
+ pt match {
+ case pt: TypeVar if untpd.isFunctionWithUnknownParamType(tree) =>
+ // try to instantiate `pt` if this is possible. If it does not
+ // work the error will be reported later in `inferredParam`,
+ // when we try to infer the parameter type.
+ isFullyDefined(pt, ForceDegree.noBottom)
+ case _ =>
+ }
+
val (protoFormals, protoResult) = decomposeProtoFunction(pt, params.length)
def refersTo(arg: untpd.Tree, param: untpd.ValDef): Boolean = arg match {
@@ -578,7 +653,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case _ => false
}
- /** The funcion body to be returned in the closure. Can become a TypedSplice
+ /** The function body to be returned in the closure. Can become a TypedSplice
* of a typed expression if this is necessary to infer a parameter type.
*/
var fnBody = tree.body
@@ -668,7 +743,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case WildcardType(_) => untpd.TypeTree()
case _ => untpd.TypeTree(protoResult)
}
- desugar.makeClosure(inferredParams, fnBody, resultTpt)
+ val inlineable = pt.hasAnnotation(defn.InlineParamAnnot)
+ desugar.makeClosure(inferredParams, fnBody, resultTpt, inlineable)
}
typed(desugared, pt)
}
@@ -684,14 +760,14 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
pt match {
case SAMType(meth) if !defn.isFunctionType(pt) && mt <:< meth.info =>
if (!isFullyDefined(pt, ForceDegree.all))
- ctx.error(d"result type of closure is an underspecified SAM type $pt", tree.pos)
+ ctx.error(ex"result type of closure is an underspecified SAM type $pt", tree.pos)
TypeTree(pt)
case _ =>
if (!mt.isDependent) EmptyTree
- else throw new Error(i"internal error: cannot turn dependent method type $mt into closure, position = ${tree.pos}, raw type = ${mt.toString}") // !!! DEBUG. Eventually, convert to an error?
+ else throw new java.lang.Error(i"internal error: cannot turn dependent method type $mt into closure, position = ${tree.pos}, raw type = ${mt.toString}") // !!! DEBUG. Eventually, convert to an error?
}
case tp =>
- throw new Error(i"internal error: closing over non-method $tp, pos = ${tree.pos}")
+ throw new java.lang.Error(i"internal error: closing over non-method $tp, pos = ${tree.pos}")
}
else typed(tree.tpt)
//println(i"typing closure $tree : ${meth1.tpe.widen}")
@@ -702,13 +778,14 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
tree.selector match {
case EmptyTree =>
val (protoFormals, _) = decomposeProtoFunction(pt, 1)
- typed(desugar.makeCaseLambda(tree.cases, protoFormals.length) withPos tree.pos, pt)
+ val unchecked = pt <:< defn.PartialFunctionType
+ typed(desugar.makeCaseLambda(tree.cases, protoFormals.length, unchecked) withPos tree.pos, pt)
case _ =>
val sel1 = typedExpr(tree.selector)
val selType = widenForMatchSelector(
fullyDefinedType(sel1.tpe, "pattern selector", tree.pos))
- val cases1 = typedCases(tree.cases, selType, pt)
+ val cases1 = typedCases(tree.cases, selType, pt.notApplied)
val cases2 = harmonize(cases1).asInstanceOf[List[CaseDef]]
assignType(cpy.Match(tree)(sel1, cases2), cases2)
}
@@ -745,17 +822,37 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def typedCase(tree: untpd.CaseDef, pt: Type, selType: Type, gadtSyms: Set[Symbol])(implicit ctx: Context): CaseDef = track("typedCase") {
val originalCtx = ctx
- def caseRest(pat: Tree)(implicit ctx: Context) = {
- pat foreachSubTree {
- case b: Bind =>
- if (ctx.scope.lookup(b.name) == NoSymbol) ctx.enter(b.symbol)
- else ctx.error(d"duplicate pattern variable: ${b.name}", b.pos)
- case _ =>
+ /** - replace all references to symbols associated with wildcards by their GADT bounds
+ * - enter all symbols introduced by a Bind in current scope
+ */
+ val indexPattern = new TreeMap {
+ val elimWildcardSym = new TypeMap {
+ def apply(t: Type) = t match {
+ case ref @ TypeRef(_, tpnme.WILDCARD) if ctx.gadt.bounds.contains(ref.symbol) =>
+ ctx.gadt.bounds(ref.symbol)
+ case TypeAlias(ref @ TypeRef(_, tpnme.WILDCARD)) if ctx.gadt.bounds.contains(ref.symbol) =>
+ ctx.gadt.bounds(ref.symbol)
+ case _ =>
+ mapOver(t)
+ }
}
+ override def transform(trt: Tree)(implicit ctx: Context) =
+ super.transform(trt.withType(elimWildcardSym(trt.tpe))) match {
+ case b: Bind =>
+ if (ctx.scope.lookup(b.name) == NoSymbol) ctx.enter(b.symbol)
+ else ctx.error(new DuplicateBind(b, tree), b.pos)
+ b.symbol.info = elimWildcardSym(b.symbol.info)
+ b
+ case t => t
+ }
+ }
+
+ def caseRest(pat: Tree)(implicit ctx: Context) = {
+ val pat1 = indexPattern.transform(pat)
val guard1 = typedExpr(tree.guard, defn.BooleanType)
val body1 = ensureNoLocalRefs(typedExpr(tree.body, pt), pt, ctx.scope.toList)
.ensureConforms(pt)(originalCtx) // insert a cast if body does not conform to expected type if we disregard gadt bounds
- assignType(cpy.CaseDef(tree)(pat, guard1, body1), body1)
+ assignType(cpy.CaseDef(tree)(pat1, guard1, body1), body1)
}
val gadtCtx =
@@ -790,12 +887,15 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
(EmptyTree, WildcardType)
}
else if (owner != cx.outer.owner && owner.isRealMethod) {
- if (owner.isCompleted) {
+ if (owner.isInlineMethod)
+ (EmptyTree, errorType(em"no explicit return allowed from inline $owner", tree.pos))
+ else if (!owner.isCompleted)
+ (EmptyTree, errorType(em"$owner has return statement; needs result type", tree.pos))
+ else {
val from = Ident(TermRef(NoPrefix, owner.asTerm))
val proto = returnProto(owner, cx.scope)
(from, proto)
}
- else (EmptyTree, errorType(d"$owner has return statement; needs result type", tree.pos))
}
else enclMethInfo(cx.outer)
}
@@ -815,8 +915,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
def typedTry(tree: untpd.Try, pt: Type)(implicit ctx: Context): Try = track("typedTry") {
- val expr1 = typed(tree.expr, pt)
- val cases1 = typedCases(tree.cases, defn.ThrowableType, pt)
+ val expr1 = typed(tree.expr, pt.notApplied)
+ val cases1 = typedCases(tree.cases, defn.ThrowableType, pt.notApplied)
val finalizer1 = typed(tree.finalizer, defn.UnitType)
val expr2 :: cases2x = harmonize(expr1 :: cases1)
val cases2 = cases2x.asInstanceOf[List[CaseDef]]
@@ -829,18 +929,29 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(implicit ctx: Context): SeqLiteral = track("typedSeqLiteral") {
- val proto1 = pt.elemType orElse WildcardType
+ val proto1 = pt.elemType match {
+ case NoType => WildcardType
+ case bounds: TypeBounds => WildcardType(bounds)
+ case elemtp => elemtp
+ }
val elems1 = tree.elems mapconserve (typed(_, proto1))
val proto2 = // the computed type of the `elemtpt` field
if (!tree.elemtpt.isEmpty) WildcardType
else if (isFullyDefined(proto1, ForceDegree.none)) proto1
- else if (tree.elems.isEmpty && tree.isInstanceOf[Trees.JavaSeqLiteral[_]])
+ else if (tree.elems.isEmpty && tree.isInstanceOf[Trees.JavaSeqLiteral[_]])
defn.ObjectType // generic empty Java varargs are of type Object[]
else ctx.typeComparer.lub(elems1.tpes)
val elemtpt1 = typed(tree.elemtpt, proto2)
assignType(cpy.SeqLiteral(tree)(elems1, elemtpt1), elems1, elemtpt1)
}
+ def typedInlined(tree: untpd.Inlined, pt: Type)(implicit ctx: Context): Inlined = {
+ val (exprCtx, bindings1) = typedBlockStats(tree.bindings)
+ val expansion1 = typed(tree.expansion, pt)(inlineContext(tree.call)(exprCtx))
+ assignType(cpy.Inlined(tree)(tree.call, bindings1.asInstanceOf[List[MemberDef]], expansion1),
+ bindings1, expansion1)
+ }
+
def typedTypeTree(tree: untpd.TypeTree, pt: Type)(implicit ctx: Context): TypeTree = track("typedTypeTree") {
if (tree.original.isEmpty)
tree match {
@@ -880,8 +991,9 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
def typedOrTypeTree(tree: untpd.OrTypeTree)(implicit ctx: Context): OrTypeTree = track("typedOrTypeTree") {
- val left1 = typed(tree.left)
- val right1 = typed(tree.right)
+ val where = "in a union type"
+ val left1 = checkNotSingleton(typed(tree.left), where)
+ val right1 = checkNotSingleton(typed(tree.right), where)
assignType(cpy.OrTypeTree(tree)(left1, right1), left1, right1)
}
@@ -897,23 +1009,24 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
typr.println(s"adding refinement $refinement")
checkRefinementNonCyclic(refinement, refineCls, seen)
val rsym = refinement.symbol
- if ((rsym.is(Method) || rsym.isType) && rsym.allOverriddenSymbols.isEmpty)
+ if (rsym.is(Method) && rsym.allOverriddenSymbols.isEmpty)
ctx.error(i"refinement $rsym without matching type in parent $parent", refinement.pos)
val rinfo = if (rsym is Accessor) rsym.info.resultType else rsym.info
- RefinedType(parent, rsym.name, rt => rinfo.substThis(refineCls, RefinedThis(rt)))
+ RefinedType(parent, rsym.name, rinfo)
// todo later: check that refinement is within bounds
}
- val res = cpy.RefinedTypeTree(tree)(tpt1, refinements1) withType
- (tpt1.tpe /: refinements1)(addRefinement)
+ val refined = (tpt1.tpe /: refinements1)(addRefinement)
+ val res = cpy.RefinedTypeTree(tree)(tpt1, refinements1).withType(
+ RecType.closeOver(rt => refined.substThis(refineCls, RecThis(rt))))
typr.println(i"typed refinement: ${res.tpe}")
res
}
def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(implicit ctx: Context): Tree = track("typedAppliedTypeTree") {
- val tpt1 = typed(tree.tpt)(ctx retractMode Mode.Pattern)
+ val tpt1 = typed(tree.tpt, AnyTypeConstructorProto)(ctx.retractMode(Mode.Pattern))
val tparams = tpt1.tpe.typeParams
if (tparams.isEmpty) {
- ctx.error(d"${tpt1.tpe} does not take type parameters", tree.pos)
+ ctx.error(ex"${tpt1.tpe} does not take type parameters", tree.pos)
tpt1
}
else {
@@ -922,17 +1035,16 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
if (hasNamedArg(args)) typedNamedArgs(args)
else {
if (args.length != tparams.length) {
- ctx.error(d"wrong number of type arguments for ${tpt1.tpe}, should be ${tparams.length}", tree.pos)
+ wrongNumberOfArgs(tpt1.tpe, "type ", tparams.length, tree.pos)
args = args.take(tparams.length)
}
- def typedArg(arg: untpd.Tree, tparam: Symbol) = {
+ def typedArg(arg: untpd.Tree, tparam: TypeParamInfo) = {
val (desugaredArg, argPt) =
if (ctx.mode is Mode.Pattern)
- (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.info)
+ (if (isVarPattern(arg)) desugar.patternVar(arg) else arg, tparam.paramBounds)
else
(arg, WildcardType)
- val arg1 = typed(desugaredArg, argPt)
- adaptTypeArg(arg1, tparam.info)
+ typed(desugaredArg, argPt)
}
args.zipWithConserve(tparams)(typedArg(_, _)).asInstanceOf[List[Tree]]
}
@@ -941,25 +1053,61 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
+ def typedPolyTypeTree(tree: untpd.PolyTypeTree)(implicit ctx: Context): Tree = track("typedPolyTypeTree") {
+ val PolyTypeTree(tparams, body) = tree
+ index(tparams)
+ val tparams1 = tparams.mapconserve(typed(_).asInstanceOf[TypeDef])
+ val body1 = typedType(tree.body)
+ assignType(cpy.PolyTypeTree(tree)(tparams1, body1), tparams1, body1)
+ }
+
def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(implicit ctx: Context): ByNameTypeTree = track("typedByNameTypeTree") {
val result1 = typed(tree.result)
assignType(cpy.ByNameTypeTree(tree)(result1), result1)
}
+ /** Define a new symbol associated with a Bind or pattern wildcard and
+ * make it gadt narrowable.
+ */
+ private def newPatternBoundSym(name: Name, info: Type, pos: Position)(implicit ctx: Context) = {
+ val flags = if (name.isTypeName) BindDefinedType else EmptyFlags
+ val sym = ctx.newSymbol(ctx.owner, name, flags | Case, info, coord = pos)
+ if (name.isTypeName) ctx.gadt.setBounds(sym, info.bounds)
+ sym
+ }
+
def typedTypeBoundsTree(tree: untpd.TypeBoundsTree)(implicit ctx: Context): TypeBoundsTree = track("typedTypeBoundsTree") {
val TypeBoundsTree(lo, hi) = desugar.typeBoundsTree(tree)
val lo1 = typed(lo)
val hi1 = typed(hi)
- assignType(cpy.TypeBoundsTree(tree)(lo1, hi1), lo1, hi1)
+ val tree1 = assignType(cpy.TypeBoundsTree(tree)(lo1, hi1), lo1, hi1)
+ if (ctx.mode.is(Mode.Pattern)) {
+ // Associate a pattern-bound type symbol with the wildcard.
+ // The bounds of the type symbol can be constrained when comparing a pattern type
+ // with an expected type in typedTyped. The type symbol is eliminated once
+ // the enclosing pattern has been typechecked; see `indexPattern` in `typedCase`.
+ val wildcardSym = newPatternBoundSym(tpnme.WILDCARD, tree1.tpe, tree.pos)
+ tree1.withType(wildcardSym.typeRef)
+ }
+ else tree1
}
- def typedBind(tree: untpd.Bind, pt: Type)(implicit ctx: Context): Bind = track("typedBind") {
+ def typedBind(tree: untpd.Bind, pt: Type)(implicit ctx: Context): Tree = track("typedBind") {
val pt1 = fullyDefinedType(pt, "pattern variable", tree.pos)
val body1 = typed(tree.body, pt1)
typr.println(i"typed bind $tree pt = $pt1 bodytpe = ${body1.tpe}")
- val flags = if (tree.isType) BindDefinedType else EmptyFlags
- val sym = ctx.newSymbol(ctx.owner, tree.name, flags, body1.tpe, coord = tree.pos)
- assignType(cpy.Bind(tree)(tree.name, body1), sym)
+ body1 match {
+ case UnApply(fn, Nil, arg :: Nil) if tree.body.isInstanceOf[untpd.Typed] =>
+ // A typed pattern `x @ (_: T)` with an implicit `ctag: ClassTag[T]`
+ // was rewritten to `x @ ctag(_)`.
+ // Rewrite further to `ctag(x @ _)`
+ assert(fn.symbol.owner == defn.ClassTagClass)
+ tpd.cpy.UnApply(body1)(fn, Nil,
+ typed(untpd.Bind(tree.name, arg).withPos(tree.pos), arg.tpe) :: Nil)
+ case _ =>
+ val sym = newPatternBoundSym(tree.name, body1.tpe, tree.pos)
+ assignType(cpy.Bind(tree)(tree.name, body1), sym)
+ }
}
def typedAlternative(tree: untpd.Alternative, pt: Type)(implicit ctx: Context): Alternative = track("typedAlternative") {
@@ -969,9 +1117,10 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(implicit ctx: Context): Unit = {
// necessary to force annotation trees to be computed.
- sym.annotations.foreach(_.tree)
+ sym.annotations.foreach(_.ensureCompleted)
+ val annotCtx = ctx.outersIterator.dropWhile(_.owner == sym).next
// necessary in order to mark the typed ahead annotations as definitely typed:
- untpd.modsDeco(mdef).mods.annotations.foreach(typedAnnotation)
+ untpd.modsDeco(mdef).mods.annotations.foreach(typedAnnotation(_)(annotCtx))
}
def typedAnnotation(annot: untpd.Tree)(implicit ctx: Context): Tree = track("typedAnnotation") {
@@ -987,6 +1136,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case rhs => typedExpr(rhs, tpt1.tpe)
}
val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym)
+ if (sym.is(Inline, butNot = DeferredOrParamAccessor))
+ checkInlineConformant(rhs1, "right-hand side of inline value")
patchIfLazy(vdef1)
vdef1
}
@@ -997,31 +1148,45 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
if (sym.is(Lazy, butNot = Deferred | Module | Synthetic) && !sym.isVolatile &&
ctx.scala2Mode && ctx.settings.rewrite.value.isDefined &&
!ctx.isAfterTyper)
- patch(Position(toUntyped(vdef).envelope.start), "@volatile ")
+ patch(Position(toUntyped(vdef).pos.start), "@volatile ")
}
def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(implicit ctx: Context) = track("typedDefDef") {
val DefDef(name, tparams, vparamss, tpt, _) = ddef
completeAnnotations(ddef, sym)
val tparams1 = tparams mapconserve (typed(_).asInstanceOf[TypeDef])
- // for secondary constructors we need to use that their type parameters
- // are aliases of the class type parameters. See pos/i941.scala
- if (sym.isConstructor && !sym.isPrimaryConstructor)
- (sym.owner.typeParams, tparams1).zipped.foreach {(tparam, tdef) =>
- tdef.symbol.info = TypeAlias(tparam.typeRef)
- }
-
val vparamss1 = vparamss nestedMapconserve (typed(_).asInstanceOf[ValDef])
if (sym is Implicit) checkImplicitParamsNotSingletons(vparamss1)
- val tpt1 = checkSimpleKinded(typedType(tpt))
- val rhs1 = typedExpr(ddef.rhs, tpt1.tpe)
+ var tpt1 = checkSimpleKinded(typedType(tpt))
+
+ var rhsCtx = ctx
+ if (sym.isConstructor && !sym.isPrimaryConstructor && tparams1.nonEmpty) {
+ // for secondary constructors we need a context that "knows"
+ // that their type parameters are aliases of the class type parameters.
+ // See pos/i941.scala
+ rhsCtx = ctx.fresh.setFreshGADTBounds
+ (tparams1, sym.owner.typeParams).zipped.foreach ((tdef, tparam) =>
+ rhsCtx.gadt.setBounds(tdef.symbol, TypeAlias(tparam.typeRef)))
+ }
+ val rhs1 = typedExpr(ddef.rhs, tpt1.tpe)(rhsCtx)
+
+ // Overwrite inline body to make sure it is not evaluated twice
+ if (sym.hasAnnotation(defn.InlineAnnot))
+ Inliner.registerInlineInfo(sym, _ => rhs1)
+
+ if (sym.isAnonymousFunction) {
+ // If we define an anonymous function, make sure the return type does not
+ // refer to parameters. This is necessary because closure types are
+ // function types so no dependencies on parameters are allowed.
+ tpt1 = tpt1.withType(avoid(tpt1.tpe, vparamss1.flatMap(_.map(_.symbol))))
+ }
+
assignType(cpy.DefDef(ddef)(name, tparams1, vparamss1, tpt1, rhs1), sym)
//todo: make sure dependent method types do not depend on implicits or by-name params
}
def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(implicit ctx: Context): Tree = track("typedTypeDef") {
val TypeDef(name, rhs) = tdef
- checkLowerNotHK(sym, tdef.tparams.map(symbolOfTree), tdef.pos)
completeAnnotations(tdef, sym)
assignType(cpy.TypeDef(tdef)(name, typedType(rhs), Nil), sym)
}
@@ -1029,8 +1194,43 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(implicit ctx: Context) = track("typedClassDef") {
val TypeDef(name, impl @ Template(constr, parents, self, _)) = cdef
val superCtx = ctx.superCallContext
+
+ /** If `ref` is an implicitly parameterized trait, pass an implicit argument list.
+ * Otherwise, if `ref` is a parameterized trait, error.
+ * Note: Traits and classes currently always have at least an empty parameter list ()
+ * before the implicit parameters (this is inserted if not given in source).
+ * We skip this parameter list when deciding whether a trait is parameterless or not.
+ * @param ref The tree referring to the (parent) trait
+ * @param psym Its type symbol
+ * @param cinfo The info of its constructor
+ */
+ def maybeCall(ref: Tree, psym: Symbol, cinfo: Type): Tree = cinfo match {
+ case cinfo: PolyType =>
+ maybeCall(ref, psym, cinfo.resultType)
+ case cinfo @ MethodType(Nil, _) if cinfo.resultType.isInstanceOf[ImplicitMethodType] =>
+ val icall = New(ref).select(nme.CONSTRUCTOR).appliedToNone
+ typedExpr(untpd.TypedSplice(icall))(superCtx)
+ case cinfo @ MethodType(Nil, _) if !cinfo.resultType.isInstanceOf[MethodType] =>
+ ref
+ case cinfo: MethodType =>
+ if (!ctx.erasedTypes) { // after constructors arguments are passed in super call.
+ typr.println(i"constr type: $cinfo")
+ ctx.error(em"parameterized $psym lacks argument list", ref.pos)
+ }
+ ref
+ case _ =>
+ ref
+ }
+
def typedParent(tree: untpd.Tree): Tree =
- if (tree.isType) typedType(tree)(superCtx)
+ if (tree.isType) {
+ val result = typedType(tree)(superCtx)
+ val psym = result.tpe.typeSymbol
+ if (psym.is(Trait) && !cls.is(Trait) && !cls.superClass.isSubClass(psym))
+ maybeCall(result, psym, psym.primaryConstructor.info)
+ else
+ result
+ }
else {
val result = typedExpr(tree)(superCtx)
checkParentCall(result, cls)
@@ -1044,12 +1244,22 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val self1 = typed(self)(ctx.outer).asInstanceOf[ValDef] // outer context where class members are not visible
val dummy = localDummy(cls, impl)
val body1 = typedStats(impl.body, dummy)(inClassContext(self1.symbol))
+
+ // Expand comments and type usecases
+ cookComments(body1.map(_.symbol), self1.symbol)(localContext(cdef, cls).setNewScope)
+
checkNoDoubleDefs(cls)
val impl1 = cpy.Template(impl)(constr1, parents1, self1, body1)
.withType(dummy.nonMemberTermRef)
checkVariance(impl1)
- if (!cls.is(AbstractOrTrait) && !ctx.isAfterTyper) checkRealizableBounds(cls.typeRef, cdef.pos)
- assignType(cpy.TypeDef(cdef)(name, impl1, Nil), cls)
+ if (!cls.is(AbstractOrTrait) && !ctx.isAfterTyper) checkRealizableBounds(cls.typeRef, cdef.namePos)
+ val cdef1 = assignType(cpy.TypeDef(cdef)(name, impl1, Nil), cls)
+ if (ctx.phase.isTyper && cdef1.tpe.derivesFrom(defn.DynamicClass) && !ctx.dynamicsEnabled) {
+ val isRequired = parents1.exists(_.tpe.isRef(defn.DynamicClass))
+ ctx.featureWarning(nme.dynamics.toString, "extension of type scala.Dynamic", isScala2Feature = true,
+ cls, isRequired, cdef.pos)
+ }
+ cdef1
// todo later: check that
// 1. If class is non-abstract, it is instantiatable:
@@ -1098,7 +1308,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case _ =>
// add synthetic class type
val first :: _ = ensureFirstIsClass(parents.tpes)
- TypeTree(checkFeasible(first, pos, d"\n in inferred parent $first")).withPos(pos) :: parents
+ TypeTree(checkFeasible(first, pos, em"\n in inferred parent $first")).withPos(pos) :: parents
}
/** If this is a real class, make sure its first parent is a
@@ -1130,7 +1340,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val packageContext =
if (pkg is Package) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree)
else {
- ctx.error(d"$pkg is already defined, cannot be a package", tree.pos)
+ ctx.error(em"$pkg is already defined, cannot be a package", tree.pos)
ctx
}
val stats1 = typedStats(tree.stats, pkg.moduleClass)(packageContext)
@@ -1141,13 +1351,23 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val annot1 = typedExpr(tree.annot, defn.AnnotationType)
val arg1 = typed(tree.arg, pt)
if (ctx.mode is Mode.Type)
- assignType(cpy.Annotated(tree)(annot1, arg1), annot1, arg1)
+ assignType(cpy.Annotated(tree)(arg1, annot1), arg1, annot1)
else {
val tpt = TypeTree(AnnotatedType(arg1.tpe.widen, Annotation(annot1)))
assignType(cpy.Typed(tree)(arg1, tpt), tpt)
}
}
+ def typedTypedSplice(tree: untpd.TypedSplice)(implicit ctx: Context): Tree =
+ tree.tree match {
+ case tree1: TypeTree => tree1 // no change owner necessary here ...
+ case tree1: Ident => tree1 // ... or here, since these trees cannot contain bindings
+ case tree1 =>
+ if (ctx.owner ne tree.owner) tree1.changeOwner(tree.owner, ctx.owner)
+ else tree1
+ }
+
+
def typedAsFunction(tree: untpd.PostfixOp, pt: Type)(implicit ctx: Context): Tree = {
val untpd.PostfixOp(qual, nme.WILDCARD) = tree
val pt1 = if (defn.isFunctionType(pt)) pt else AnyFunctionProto
@@ -1198,7 +1418,6 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
tree match {
case tree: untpd.Ident => typedIdent(tree, pt)
case tree: untpd.Select => typedSelect(tree, pt)
- case tree: untpd.SelectFromTypeTree => typedSelectFromTypeTree(tree, pt)
case tree: untpd.Bind => typedBind(tree, pt)
case tree: untpd.ValDef =>
if (tree.isEmpty) tpd.EmptyValDef
@@ -1221,7 +1440,6 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case tree: untpd.This => typedThis(tree)
case tree: untpd.Literal => typedLiteral(tree)
case tree: untpd.New => typedNew(tree, pt)
- case tree: untpd.Pair => typedPair(tree, pt)
case tree: untpd.Typed => typedTyped(tree, pt)
case tree: untpd.NamedArg => typedNamedArg(tree, pt)
case tree: untpd.Assign => typedAssign(tree, pt)
@@ -1236,18 +1454,20 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case tree: untpd.TypeApply => typedTypeApply(tree, pt)
case tree: untpd.Super => typedSuper(tree, pt)
case tree: untpd.SeqLiteral => typedSeqLiteral(tree, pt)
+ case tree: untpd.Inlined => typedInlined(tree, pt)
case tree: untpd.TypeTree => typedTypeTree(tree, pt)
case tree: untpd.SingletonTypeTree => typedSingletonTypeTree(tree)
case tree: untpd.AndTypeTree => typedAndTypeTree(tree)
case tree: untpd.OrTypeTree => typedOrTypeTree(tree)
case tree: untpd.RefinedTypeTree => typedRefinedTypeTree(tree)
case tree: untpd.AppliedTypeTree => typedAppliedTypeTree(tree)
+ case tree: untpd.PolyTypeTree => typedPolyTypeTree(tree)(localContext(tree, NoSymbol).setNewScope)
case tree: untpd.ByNameTypeTree => typedByNameTypeTree(tree)
case tree: untpd.TypeBoundsTree => typedTypeBoundsTree(tree)
case tree: untpd.Alternative => typedAlternative(tree, pt)
case tree: untpd.PackageDef => typedPackageDef(tree)
case tree: untpd.Annotated => typedAnnotated(tree, pt)
- case tree: untpd.TypedSplice => tree.tree
+ case tree: untpd.TypedSplice => typedTypedSplice(tree)
case tree: untpd.UnApply => typedUnApply(tree, pt)
case tree @ untpd.PostfixOp(qual, nme.WILDCARD) => typedAsFunction(tree, pt)
case untpd.EmptyTree => tpd.EmptyTree
@@ -1289,7 +1509,12 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case Some(xtree) =>
traverse(xtree :: rest)
case none =>
- buf += typed(mdef)
+ typed(mdef) match {
+ case mdef1: DefDef if Inliner.hasBodyToInline(mdef1.symbol) =>
+ buf ++= inlineExpansion(mdef1)
+ case mdef1 =>
+ buf += mdef1
+ }
traverse(rest)
}
case Thicket(stats) :: rest =>
@@ -1303,6 +1528,14 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
traverse(stats)
}
+ /** Given an inline method `mdef`, the method rewritten so that its body
+ * uses accessors to access non-public members, followed by the accessor definitions.
+ * Overwritten in Retyper to return `mdef` unchanged.
+ */
+ protected def inlineExpansion(mdef: DefDef)(implicit ctx: Context): List[Tree] =
+ tpd.cpy.DefDef(mdef)(rhs = Inliner.bodyToInline(mdef.symbol)) ::
+ Inliner.removeInlineAccessors(mdef.symbol)
+
def typedExpr(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree =
typed(tree, pt)(ctx retractMode Mode.PatternOrType)
def typedType(tree: untpd.Tree, pt: Type = WildcardType)(implicit ctx: Context): Tree = // todo: retract mode between Type and Pattern?
@@ -1321,6 +1554,17 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
+ /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back
+ * to errors and result of `op1`.
+ */
+ def tryAlternatively[T](op1: Context => T)(op2: Context => T)(implicit ctx: Context): T =
+ tryEither(op1) { (failedVal, failedState) =>
+ tryEither(op2) { (_, _) =>
+ failedState.commit
+ failedVal
+ }
+ }
+
/** Add apply node or implicit conversions. Two strategies are tried, and the first
* that is successful is picked. If neither of the strategies are successful, continues with
* `fallBack`.
@@ -1335,30 +1579,27 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val sel = typedSelect(untpd.Select(untpd.TypedSplice(tree), nme.apply), pt)
if (sel.tpe.isError) sel else adapt(sel, pt)
} { (failedTree, failedState) =>
- val tree1 = tryInsertImplicitOnQualifier(tree, pt)
- if (tree1 eq tree) fallBack(failedTree, failedState)
- else adapt(tree1, pt)
+ tryInsertImplicitOnQualifier(tree, pt).getOrElse(fallBack(failedTree, failedState))
}
/** If this tree is a select node `qual.name`, try to insert an implicit conversion
- * `c` around `qual` so that `c(qual).name` conforms to `pt`. If that fails
- * return `tree` itself.
+ * `c` around `qual` so that `c(qual).name` conforms to `pt`.
*/
- def tryInsertImplicitOnQualifier(tree: Tree, pt: Type)(implicit ctx: Context): Tree = ctx.traceIndented(i"try insert impl on qualifier $tree $pt") {
+ def tryInsertImplicitOnQualifier(tree: Tree, pt: Type)(implicit ctx: Context): Option[Tree] = ctx.traceIndented(i"try insert impl on qualifier $tree $pt") {
tree match {
case Select(qual, name) =>
val qualProto = SelectionProto(name, pt, NoViewsAllowed)
tryEither { implicit ctx =>
val qual1 = adaptInterpolated(qual, qualProto, EmptyTree)
- if ((qual eq qual1) || ctx.reporter.hasErrors) tree
- else typedSelect(cpy.Select(tree)(untpd.TypedSplice(qual1), name), pt)
- } { (_, _) => tree
+ if ((qual eq qual1) || ctx.reporter.hasErrors) None
+ else Some(typed(cpy.Select(tree)(untpd.TypedSplice(qual1), name), pt))
+ } { (_, _) => None
}
- case _ => tree
+ case _ => None
}
}
- def adapt(tree: Tree, pt: Type, original: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context) = /*>|>*/ track("adapt") /*<|<*/ {
+ def adapt(tree: Tree, pt: Type, original: untpd.Tree = untpd.EmptyTree)(implicit ctx: Context): Tree = /*>|>*/ track("adapt") /*<|<*/ {
/*>|>*/ ctx.traceIndented(i"adapting $tree of type ${tree.tpe} to $pt", typr, show = true) /*<|<*/ {
if (tree.isDef) interpolateUndetVars(tree, tree.symbol)
else if (!tree.tpe.widen.isInstanceOf[MethodOrPoly]) interpolateUndetVars(tree, NoSymbol)
@@ -1412,8 +1653,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
def methodStr = err.refStr(methPart(tree).tpe)
def missingArgs = errorTree(tree,
- d"""missing arguments for $methodStr
- |follow this method with `_' if you want to treat it as a partially applied function""".stripMargin)
+ em"""missing arguments for $methodStr
+ |follow this method with `_' if you want to treat it as a partially applied function""")
def adaptOverloaded(ref: TermRef) = {
val altDenots = ref.denot.alternatives
@@ -1427,8 +1668,8 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case Nil =>
def noMatches =
errorTree(tree,
- d"""none of the ${err.overloadedAltsStr(altDenots)}
- |match $expectedStr""".stripMargin)
+ em"""none of the ${err.overloadedAltsStr(altDenots)}
+ |match $expectedStr""")
def hasEmptyParams(denot: SingleDenotation) = denot.info.paramTypess == ListOfNil
pt match {
case pt: FunProto =>
@@ -1443,19 +1684,27 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val remainingDenots = alts map (_.denot.asInstanceOf[SingleDenotation])
def all = if (remainingDenots.length == 2) "both" else "all"
errorTree(tree,
- d"""Ambiguous overload. The ${err.overloadedAltsStr(remainingDenots)}
- |$all match $expectedStr""".stripMargin)
+ em"""Ambiguous overload. The ${err.overloadedAltsStr(remainingDenots)}
+ |$all match $expectedStr""")
}
}
- def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match {
- case _: MethodType | _: PolyType =>
- def isUnary = wtp.firstParamTypes match {
+ def isUnary(tp: Type): Boolean = tp match {
+ case tp: MethodicType =>
+ tp.firstParamTypes match {
case ptype :: Nil => !ptype.isRepeatedParam
case _ => false
}
- if (pt.args.lengthCompare(1) > 0 && isUnary && ctx.canAutoTuple)
- adaptToArgs(wtp, pt.tupled)
+ case tp: TermRef =>
+ tp.denot.alternatives.forall(alt => isUnary(alt.info))
+ case _ =>
+ false
+ }
+
+ def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match {
+ case _: MethodType | _: PolyType =>
+ if (pt.args.lengthCompare(1) > 0 && isUnary(wtp) && ctx.canAutoTuple)
+ adaptInterpolated(tree, pt.tupled, original)
else
tree
case _ => tryInsertApplyOrImplicit(tree, pt) {
@@ -1463,7 +1712,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
case Apply(_, _) => " more"
case _ => ""
}
- (_, _) => errorTree(tree, d"$methodStr does not take$more parameters")
+ (_, _) => errorTree(tree, em"$methodStr does not take$more parameters")
}
}
@@ -1500,7 +1749,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
val tvarsToInstantiate = tvarsInParams(tree)
wtp.paramTypes.foreach(instantiateSelected(_, tvarsToInstantiate))
val constr = ctx.typerState.constraint
- def addImplicitArgs = {
+ def addImplicitArgs(implicit ctx: Context) = {
val errors = new mutable.ListBuffer[() => String]
def implicitArgError(msg: => String) = {
errors += (() => msg)
@@ -1508,20 +1757,12 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
def issueErrors() = {
for (err <- errors) ctx.error(err(), tree.pos.endPos)
- tree
+ tree.withType(wtp.resultType)
}
val args = (wtp.paramNames, wtp.paramTypes).zipped map { (pname, formal) =>
- def where = d"parameter $pname of $methodStr"
- inferImplicit(formal, EmptyTree, tree.pos.endPos) match {
- case SearchSuccess(arg, _, _) =>
- arg
- case ambi: AmbiguousImplicits =>
- implicitArgError(s"ambiguous implicits: ${ambi.explanation} of $where")
- case failure: SearchFailure =>
- val arg = synthesizedClassTag(formal)
- if (!arg.isEmpty) arg
- else implicitArgError(d"no implicit argument of type $formal found for $where" + failure.postscript)
- }
+ def implicitArgError(msg: String => String) =
+ errors += (() => msg(em"parameter $pname of $methodStr"))
+ inferImplicitArg(formal, implicitArgError, tree.pos.endPos)
}
if (errors.nonEmpty) {
// If there are several arguments, some arguments might already
@@ -1547,9 +1788,9 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
else adapt(tpd.Apply(tree, args), pt)
}
- if ((pt eq WildcardType) || original.isEmpty) addImplicitArgs
+ if ((pt eq WildcardType) || original.isEmpty) addImplicitArgs(argCtx(tree))
else
- ctx.typerState.tryWithFallback(addImplicitArgs) {
+ ctx.typerState.tryWithFallback(addImplicitArgs(argCtx(tree))) {
adapt(typed(original, WildcardType), pt, EmptyTree)
}
case wtp: MethodType if !pt.isInstanceOf[SingletonType] =>
@@ -1571,8 +1812,32 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
else
missingArgs
case _ =>
- if (tree.tpe <:< pt) tree
- else if (ctx.mode is Mode.Pattern) tree // no subtype check for pattern
+ ctx.typeComparer.GADTused = false
+ if (ctx.mode is Mode.Pattern) {
+ tree match {
+ case _: RefTree | _: Literal if !isVarPattern(tree) =>
+ checkCanEqual(pt, wtp, tree.pos)(ctx.retractMode(Mode.Pattern))
+ case _ =>
+ }
+ tree
+ }
+ else if (tree.tpe <:< pt) {
+ if (pt.hasAnnotation(defn.InlineParamAnnot))
+ checkInlineConformant(tree, "argument to inline parameter")
+ if (Inliner.hasBodyToInline(tree.symbol) &&
+ !ctx.owner.ownersIterator.exists(_.isInlineMethod) &&
+ !ctx.settings.YnoInline.value &&
+ !ctx.isAfterTyper)
+ adapt(Inliner.inlineCall(tree, pt), pt)
+ else if (ctx.typeComparer.GADTused && pt.isValueType)
+ // Insert an explicit cast, so that -Ycheck in later phases succeeds.
+ // I suspect, but am not 100% sure that this might affect inferred types,
+ // if the expected type is a supertype of the GADT bound. It would be good to come
+ // up with a test case for this.
+ tree.asInstance(pt)
+ else
+ tree
+ }
else if (wtp.isInstanceOf[MethodType]) missingArgs
else {
typr.println(i"adapt to subtype ${tree.tpe} !<:< $pt")
@@ -1580,29 +1845,6 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
adaptToSubType(wtp)
}
}
-
- /** If `formal` is of the form ClassTag[T], where `T` is a class type,
- * synthesize a class tag for `T`.
- */
- def synthesizedClassTag(formal: Type): Tree = {
- if (formal.isRef(defn.ClassTagClass))
- formal.argTypes match {
- case arg :: Nil =>
- val tp = fullyDefinedType(arg, "ClassTag argument", tree.pos)
- tp.underlyingClassRef(refinementOK = false) match {
- case tref: TypeRef =>
- return ref(defn.ClassTagModule)
- .select(nme.apply)
- .appliedToType(tp)
- .appliedTo(clsOf(tref))
- .withPos(tree.pos.endPos)
- case _ =>
- }
- case _ =>
- }
- EmptyTree
- }
-
/** Adapt an expression of constant type to a different constant type `tpe`. */
def adaptConstant(tree: Tree, tpe: ConstantType): Tree = {
def lit = Literal(tpe.value).withPos(tree.pos)
@@ -1621,7 +1863,9 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
if (folded ne tree) return adaptConstant(folded, folded.tpe.asInstanceOf[ConstantType])
// drop type if prototype is Unit
if (pt isRef defn.UnitClass)
- return tpd.Block(tree :: Nil, Literal(Constant(())))
+ // local adaptation makes sure every adapted tree conforms to its pt
+ // so will take the code path that decides on inlining
+ return tpd.Block(adapt(tree, WildcardType) :: Nil, Literal(Constant(())))
// convert function literal to SAM closure
tree match {
case Closure(Nil, id @ Ident(nme.ANON_FUN), _)
@@ -1648,22 +1892,36 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
}
}
+ def adaptType(tp: Type): Tree = {
+ val tree1 =
+ if ((pt eq AnyTypeConstructorProto) || tp.typeParamSymbols.isEmpty) tree
+ else tree.withType(tree.tpe.EtaExpand(tp.typeParamSymbols))
+ if ((ctx.mode is Mode.Pattern) || tree1.tpe <:< pt) tree1
+ else err.typeMismatch(tree1, pt)
+ }
+
tree match {
case _: MemberDef | _: PackageDef | _: Import | _: WithoutTypeOrPos[_] => tree
case _ => tree.tpe.widen match {
- case ErrorType =>
+ case _: ErrorType =>
tree
case ref: TermRef =>
- adaptOverloaded(ref)
- case poly: PolyType =>
+ pt match {
+ case pt: FunProto
+ if pt.args.lengthCompare(1) > 0 && isUnary(ref) && ctx.canAutoTuple =>
+ adaptInterpolated(tree, pt.tupled, original)
+ case _ =>
+ adaptOverloaded(ref)
+ }
+ case poly: PolyType if !(ctx.mode is Mode.Type) =>
if (pt.isInstanceOf[PolyProto]) tree
else {
var typeArgs = tree match {
- case Select(New(tpt), nme.CONSTRUCTOR) => tpt.tpe.dealias.argTypesLo
+ case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo
case _ => Nil
}
if (typeArgs.isEmpty) typeArgs = constrained(poly, tree)._2
- convertNewArray(
+ convertNewGenericArray(
adaptInterpolated(tree.appliedToTypes(typeArgs), pt, original))
}
case wtp =>
@@ -1675,9 +1933,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit
(_, _) => tree // error will be reported in typedTypeApply
}
case _ =>
- if (ctx.mode is Mode.Type)
- if ((ctx.mode is Mode.Pattern) || tree.tpe <:< pt) tree
- else err.typeMismatch(tree, pt)
+ if (ctx.mode is Mode.Type) adaptType(tree.tpe)
else adaptNoArgs(wtp)
}
}
diff --git a/src/dotty/tools/dotc/typer/VarianceChecker.scala b/src/dotty/tools/dotc/typer/VarianceChecker.scala
index 274218ee3..d5dd5a024 100644
--- a/src/dotty/tools/dotc/typer/VarianceChecker.scala
+++ b/src/dotty/tools/dotc/typer/VarianceChecker.scala
@@ -132,11 +132,11 @@ class VarianceChecker()(implicit ctx: Context) {
case defn: MemberDef if skip =>
ctx.debuglog(s"Skipping variance check of ${sym.showDcl}")
case tree: TypeDef =>
- checkVariance(sym, tree.envelope)
+ checkVariance(sym, tree.pos)
case tree: ValDef =>
- checkVariance(sym, tree.envelope)
+ checkVariance(sym, tree.pos)
case DefDef(_, tparams, vparamss, _, _) =>
- checkVariance(sym, tree.envelope)
+ checkVariance(sym, tree.pos)
tparams foreach traverse
vparamss foreach (_ foreach traverse)
case Template(_, _, _, body) =>
diff --git a/src/dotty/tools/dotc/typer/Variances.scala b/src/dotty/tools/dotc/typer/Variances.scala
index 55e6b5232..92bd9fd74 100644
--- a/src/dotty/tools/dotc/typer/Variances.scala
+++ b/src/dotty/tools/dotc/typer/Variances.scala
@@ -75,13 +75,26 @@ object Variances {
case tp @ TypeBounds(lo, hi) =>
if (lo eq hi) compose(varianceInType(hi)(tparam), tp.variance)
else flip(varianceInType(lo)(tparam)) & varianceInType(hi)(tparam)
- case tp @ RefinedType(parent, _) =>
- varianceInType(parent)(tparam) & varianceInType(tp.refinedInfo)(tparam)
+ case tp @ RefinedType(parent, _, rinfo) =>
+ varianceInType(parent)(tparam) & varianceInType(rinfo)(tparam)
+ case tp: RecType =>
+ varianceInType(tp.parent)(tparam)
case tp @ MethodType(_, paramTypes) =>
flip(varianceInTypes(paramTypes)(tparam)) & varianceInType(tp.resultType)(tparam)
case ExprType(restpe) =>
varianceInType(restpe)(tparam)
- case tp @ PolyType(_) =>
+ case tp @ HKApply(tycon, args) =>
+ def varianceInArgs(v: Variance, args: List[Type], tparams: List[TypeParamInfo]): Variance =
+ args match {
+ case arg :: args1 =>
+ varianceInArgs(
+ v & compose(varianceInType(arg)(tparam), tparams.head.paramVariance),
+ args1, tparams.tail)
+ case nil =>
+ v
+ }
+ varianceInArgs(varianceInType(tycon)(tparam), args, tycon.typeParams)
+ case tp: PolyType =>
flip(varianceInTypes(tp.paramBounds)(tparam)) & varianceInType(tp.resultType)(tparam)
case AnnotatedType(tp, annot) =>
varianceInType(tp)(tparam) & varianceInAnnot(annot)(tparam)
diff --git a/src/dotty/tools/dotc/util/Attachment.scala b/src/dotty/tools/dotc/util/Attachment.scala
index 8088b4cd0..20facfd97 100644
--- a/src/dotty/tools/dotc/util/Attachment.scala
+++ b/src/dotty/tools/dotc/util/Attachment.scala
@@ -4,9 +4,7 @@ package dotty.tools.dotc.util
* adding, removing and lookup of attachments. Attachments are typed key/value pairs.
*/
object Attachment {
-
- /** The class of keys for attachments yielding values of type V */
- class Key[+V]
+ import Property.Key
/** An implementation trait for attachments.
* Clients should inherit from Container instead.
diff --git a/src/dotty/tools/dotc/util/CommentParsing.scala b/src/dotty/tools/dotc/util/CommentParsing.scala
new file mode 100644
index 000000000..cc790d683
--- /dev/null
+++ b/src/dotty/tools/dotc/util/CommentParsing.scala
@@ -0,0 +1,239 @@
+/*
+ * Port of DocStrings.scala from nsc
+ * @author Martin Odersky
+ * @author Felix Mulder
+ */
+package dotty.tools.dotc.util
+
+/** The comment parsing in `dotc` is used by both the comment cooking and the
+ * dottydoc tool.
+ *
+ * The comment cooking is used to expand comments with `@inheritdoc` and
+ * `@define` annotations. The rest of the comment is untouched and later
+ * handled by dottydoc.
+ */
+object CommentParsing {
+ import scala.reflect.internal.Chars._
+
+ /** Returns index of string `str` following `start` skipping longest
+ * sequence of whitespace characters characters (but no newlines)
+ */
+ def skipWhitespace(str: String, start: Int): Int =
+ if (start < str.length && isWhitespace(str charAt start)) skipWhitespace(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipIdent(str: String, start: Int): Int =
+ if (start < str.length && isIdentifierPart(str charAt start)) skipIdent(str, start + 1)
+ else start
+
+ /** Returns index of string `str` following `start` skipping
+ * sequence of identifier characters.
+ */
+ def skipTag(str: String, start: Int): Int =
+ if (start < str.length && (str charAt start) == '@') skipIdent(str, start + 1)
+ else start
+
+
+ /** Returns index of string `str` after `start` skipping longest
+ * sequence of space and tab characters, possibly also containing
+ * a single `*` character or the `/``**` sequence.
+ * @pre start == str.length || str(start) == `\n`
+ */
+ def skipLineLead(str: String, start: Int): Int =
+ if (start == str.length) start
+ else {
+ val idx = skipWhitespace(str, start + 1)
+ if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1)
+ else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*')
+ skipWhitespace(str, idx + 3)
+ else idx
+ }
+
+ /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`.
+ */
+ def skipToEol(str: String, start: Int): Int =
+ if (start + 2 < str.length && (str charAt start) == '/' && (str charAt (start + 1)) == '*' && (str charAt (start + 2)) == '*') start + 3
+ else if (start < str.length && (str charAt start) != '\n') skipToEol(str, start + 1)
+ else start
+
+ /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment
+ * which satisfies predicate `p`.
+ */
+ def findNext(str: String, start: Int)(p: Int => Boolean): Int = {
+ val idx = skipLineLead(str, skipToEol(str, start))
+ if (idx < str.length && !p(idx)) findNext(str, idx)(p)
+ else idx
+ }
+
+ /** Return first index following `start` and starting a line (i.e. after skipLineLead)
+ * which satisfies predicate `p`.
+ */
+ def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = {
+ val idx = findNext(str, start)(p)
+ if (idx == str.length) List()
+ else idx :: findAll(str, idx)(p)
+ }
+
+ /** Produces a string index, which is a list of `sections`, i.e
+ * pairs of start/end positions of all tagged sections in the string.
+ * Every section starts with an at sign and extends to the next at sign,
+ * or to the end of the comment string, but excluding the final two
+ * characters which terminate the comment.
+ *
+ * Also take usecases into account - they need to expand until the next
+ * usecase or the end of the string, as they might include other sections
+ * of their own
+ */
+ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = {
+ var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx))
+ indices = mergeUsecaseSections(str, indices)
+ indices = mergeInheritdocSections(str, indices)
+
+ indices match {
+ case List() => List()
+ case idxs => idxs zip (idxs.tail ::: List(str.length - 2))
+ }
+ }
+
+ /**
+ * Merge sections following an usecase into the usecase comment, so they
+ * can override the parent symbol's sections
+ */
+ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = {
+ idxs.indexWhere(str.startsWith("@usecase", _)) match {
+ case firstUCIndex if firstUCIndex != -1 =>
+ val commentSections = idxs.take(firstUCIndex)
+ val usecaseSections = idxs.drop(firstUCIndex).filter(str.startsWith("@usecase", _))
+ commentSections ::: usecaseSections
+ case _ =>
+ idxs
+ }
+ }
+
+ /**
+ * Merge the inheritdoc sections, as they never make sense on their own
+ */
+ def mergeInheritdocSections(str: String, idxs: List[Int]): List[Int] =
+ idxs.filterNot(str.startsWith("@inheritdoc", _))
+
+ /** Does interval `iv` start with given `tag`?
+ */
+ def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean =
+ startsWithTag(str, section._1, tag)
+
+ def startsWithTag(str: String, start: Int, tag: String): Boolean =
+ str.startsWith(tag, start) && !isIdentifierPart(str charAt (start + tag.length))
+
+ /** The first start tag of a list of tag intervals,
+ * or the end of the whole comment string - 2 if list is empty
+ */
+ def startTag(str: String, sections: List[(Int, Int)]) = sections match {
+ case Nil => str.length - 2
+ case (start, _) :: _ => start
+ }
+
+ /** A map from parameter names to start/end indices describing all parameter
+ * sections in `str` tagged with `tag`, where `sections` is the index of `str`.
+ */
+ def paramDocs(str: String, tag: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections if startsWithTag(str, section, tag)) yield {
+ val start = skipWhitespace(str, section._1 + tag.length)
+ str.substring(start, skipIdent(str, start)) -> section
+ }
+ }
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @group. */
+ def groupDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@group"))
+
+
+ /** Optionally start and end index of return section in `str`, or `None`
+ * if `str` does not have a @return.
+ */
+ def returnDoc(str: String, sections: List[(Int, Int)]): Option[(Int, Int)] =
+ sections find (startsWithTag(str, _, "@return"))
+
+ /** Extracts variable name from a string, stripping any pair of surrounding braces */
+ def variableName(str: String): String =
+ if (str.length >= 2 && (str charAt 0) == '{' && (str charAt (str.length - 1)) == '}')
+ str.substring(1, str.length - 1)
+ else
+ str
+
+ /** Returns index following variable, or start index if no variable was recognized
+ */
+ def skipVariable(str: String, start: Int): Int = {
+ var idx = start
+ if (idx < str.length && (str charAt idx) == '{') {
+ do idx += 1
+ while (idx < str.length && (str charAt idx) != '}')
+ if (idx < str.length) idx + 1 else start
+ } else {
+ while (idx < str.length && isVarPart(str charAt idx))
+ idx += 1
+ idx
+ }
+ }
+
+ /** A map from the section tag to section parameters */
+ def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] =
+ Map() ++ {
+ for (section <- sections) yield
+ extractSectionTag(str, section) -> section
+ }
+
+ /** Extract the section tag, treating the section tag as an identifier */
+ def extractSectionTag(str: String, section: (Int, Int)): String =
+ str.substring(section._1, skipTag(str, section._1))
+
+ /** Extract the section parameter */
+ def extractSectionParam(str: String, section: (Int, Int)): String = {
+ val (beg, _) = section
+ assert(str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+
+ val start = skipWhitespace(str, skipTag(str, beg))
+ val finish = skipIdent(str, start)
+
+ str.substring(start, finish)
+ }
+
+ /** Extract the section text, except for the tag and comment newlines */
+ def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = {
+ val (beg, end) = section
+ if (str.startsWith("@param", beg) ||
+ str.startsWith("@tparam", beg) ||
+ str.startsWith("@throws", beg))
+ (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end)
+ else
+ (skipWhitespace(str, skipTag(str, beg)), end)
+ }
+
+ /** Cleanup section text */
+ def cleanupSectionText(str: String) = {
+ var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n")
+ while (result.endsWith("\n"))
+ result = result.substring(0, str.length - 1)
+ result
+ }
+
+
+ def removeSections(raw: String, xs: String*): String = {
+ val sections = tagIndex(raw)
+
+ val toBeRemoved = for {
+ section <- xs
+ lines = sections filter { startsWithTag(raw, _, section) }
+ } yield lines
+
+ val end = startTag(raw, toBeRemoved.flatten.sortBy(_._1).toList)
+
+ if (end == raw.length - 2) raw else raw.substring(0, end) + "*/"
+ }
+}
diff --git a/src/dotty/tools/dotc/util/DiffUtil.scala b/src/dotty/tools/dotc/util/DiffUtil.scala
new file mode 100644
index 000000000..b55aee719
--- /dev/null
+++ b/src/dotty/tools/dotc/util/DiffUtil.scala
@@ -0,0 +1,174 @@
+package dotty.tools.dotc.util
+
+import scala.annotation.tailrec
+import scala.collection.mutable
+
+object DiffUtil {
+
+ private final val ANSI_DEFAULT = "\u001B[0m"
+ private final val ANSI_RED = "\u001B[31m"
+ private final val ANSI_GREEN = "\u001B[32m"
+
+ private final val DELETION_COLOR = ANSI_RED
+ private final val ADDITION_COLOR = ANSI_GREEN
+
+ @tailrec private def splitTokens(str: String, acc: List[String] = Nil): List[String] = {
+ if (str == "") {
+ acc.reverse
+ } else {
+ val head = str.charAt(0)
+ val (token, rest) = if (Character.isAlphabetic(head) || Character.isDigit(head)) {
+ str.span(c => Character.isAlphabetic(c) || Character.isDigit(c))
+ } else if (Character.isMirrored(head) || Character.isWhitespace(head)) {
+ str.splitAt(1)
+ } else {
+ str.span { c =>
+ !Character.isAlphabetic(c) && !Character.isDigit(c) &&
+ !Character.isMirrored(c) && !Character.isWhitespace(c)
+ }
+ }
+ splitTokens(rest, token :: acc)
+ }
+ }
+
+
+ /** @return a tuple of the (found, expected, changedPercentage) diffs as strings */
+ def mkColoredTypeDiff(found: String, expected: String): (String, String, Double) = {
+ var totalChange = 0
+ val foundTokens = splitTokens(found, Nil).toArray
+ val expectedTokens = splitTokens(expected, Nil).toArray
+
+ val diffExp = hirschberg(foundTokens, expectedTokens)
+ val diffAct = hirschberg(expectedTokens, foundTokens)
+
+ val exp = diffExp.collect {
+ case Unmodified(str) => str
+ case Inserted(str) =>
+ totalChange += str.length
+ ADDITION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+
+ val fnd = diffAct.collect {
+ case Unmodified(str) => str
+ case Inserted(str) =>
+ totalChange += str.length
+ DELETION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+
+ (fnd, exp, totalChange.toDouble / (expected.length + found.length))
+ }
+
+ def mkColoredCodeDiff(code: String, lastCode: String, printDiffDel: Boolean): String = {
+
+ val tokens = splitTokens(code, Nil).toArray
+ val lastTokens = splitTokens(lastCode, Nil).toArray
+
+ val diff = hirschberg(lastTokens, tokens)
+
+ diff.collect {
+ case Unmodified(str) => str
+ case Inserted(str) => ADDITION_COLOR + str + ANSI_DEFAULT
+ case Modified(old, str) if printDiffDel => DELETION_COLOR + old + ADDITION_COLOR + str + ANSI_DEFAULT
+ case Modified(_, str) => ADDITION_COLOR + str + ANSI_DEFAULT
+ case Deleted(str) if printDiffDel => DELETION_COLOR + str + ANSI_DEFAULT
+ }.mkString
+ }
+
+ private sealed trait Patch
+ private final case class Unmodified(str: String) extends Patch
+ private final case class Modified(original: String, str: String) extends Patch
+ private final case class Deleted(str: String) extends Patch
+ private final case class Inserted(str: String) extends Patch
+
+ private def hirschberg(a: Array[String], b: Array[String]): Array[Patch] = {
+ def build(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = {
+ if (x.isEmpty) {
+ builder += Inserted(y.mkString)
+ } else if (y.isEmpty) {
+ builder += Deleted(x.mkString)
+ } else if (x.length == 1 || y.length == 1) {
+ needlemanWunsch(x, y, builder)
+ } else {
+ val xlen = x.length
+ val xmid = xlen / 2
+ val ylen = y.length
+
+ val (x1, x2) = x.splitAt(xmid)
+ val leftScore = nwScore(x1, y)
+ val rightScore = nwScore(x2.reverse, y.reverse)
+ val scoreSum = (leftScore zip rightScore.reverse).map {
+ case (left, right) => left + right
+ }
+ val max = scoreSum.max
+ val ymid = scoreSum.indexOf(max)
+
+ val (y1, y2) = y.splitAt(ymid)
+ build(x1, y1, builder)
+ build(x2, y2, builder)
+ }
+ }
+ val builder = Array.newBuilder[Patch]
+ build(a, b, builder)
+ builder.result()
+ }
+
+ private def nwScore(x: Array[String], y: Array[String]): Array[Int] = {
+ def ins(s: String) = -2
+ def del(s: String) = -2
+ def sub(s1: String, s2: String) = if (s1 == s2) 2 else -1
+
+ val score = Array.fill(x.length + 1, y.length + 1)(0)
+ for (j <- 1 to y.length)
+ score(0)(j) = score(0)(j - 1) + ins(y(j - 1))
+ for (i <- 1 to x.length) {
+ score(i)(0) = score(i - 1)(0) + del(x(i - 1))
+ for (j <- 1 to y.length) {
+ val scoreSub = score(i - 1)(j - 1) + sub(x(i - 1), y(j - 1))
+ val scoreDel = score(i - 1)(j) + del(x(i - 1))
+ val scoreIns = score(i)(j - 1) + ins(y(j - 1))
+ score(i)(j) = scoreSub max scoreDel max scoreIns
+ }
+ }
+ Array.tabulate(y.length + 1)(j => score(x.length)(j))
+ }
+
+ private def needlemanWunsch(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = {
+ def similarity(a: String, b: String) = if (a == b) 2 else -1
+ val d = 1
+ val score = Array.tabulate(x.length + 1, y.length + 1) { (i, j) =>
+ if (i == 0) d * j
+ else if (j == 0) d * i
+ else 0
+ }
+ for (i <- 1 to x.length) {
+ for (j <- 1 to y.length) {
+ val mtch = score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))
+ val delete = score(i - 1)(j) + d
+ val insert = score(i)(j - 1) + d
+ score(i)(j) = mtch max insert max delete
+ }
+ }
+
+ var alignment = List.empty[Patch]
+ var i = x.length
+ var j = y.length
+ while (i > 0 || j > 0) {
+ if (i > 0 && j > 0 && score(i)(j) == score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))) {
+ val newHead =
+ if (x(i - 1) == y(j - 1)) Unmodified(x(i - 1))
+ else Modified(x(i - 1), y(j - 1))
+ alignment = newHead :: alignment
+ i = i - 1
+ j = j - 1
+ } else if (i > 0 && score(i)(j) == score(i - 1)(j) + d) {
+ alignment = Deleted(x(i - 1)) :: alignment
+ i = i - 1
+ } else {
+ alignment = Inserted(y(j - 1)) :: alignment
+ j = j - 1
+ }
+ }
+ builder ++= alignment
+ }
+
+}
diff --git a/src/dotty/tools/dotc/util/Property.scala b/src/dotty/tools/dotc/util/Property.scala
new file mode 100644
index 000000000..608fc88e6
--- /dev/null
+++ b/src/dotty/tools/dotc/util/Property.scala
@@ -0,0 +1,10 @@
+package dotty.tools.dotc.util
+
+/** Defines a key type with which to tag properties, such as attachments
+ * or context properties
+ */
+object Property {
+
+ /** The class of keys for properties of type V */
+ class Key[+V]
+} \ No newline at end of file
diff --git a/src/dotty/tools/dotc/util/SourceFile.scala b/src/dotty/tools/dotc/util/SourceFile.scala
index 6b547203e..1d4c9c2ab 100644
--- a/src/dotty/tools/dotc/util/SourceFile.scala
+++ b/src/dotty/tools/dotc/util/SourceFile.scala
@@ -10,6 +10,7 @@ import java.io.IOException
import Chars._
import ScriptSourceFile._
import Positions._
+import scala.io.Codec
import java.util.Optional
@@ -36,9 +37,9 @@ object ScriptSourceFile {
case class SourceFile(file: AbstractFile, content: Array[Char]) extends interfaces.SourceFile {
- def this(_file: AbstractFile) = this(_file, _file.toCharArray)
- def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
- def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
+ def this(_file: AbstractFile, codec: Codec) = this(_file, new String(_file.toByteArray, codec.charSet).toCharArray)
+ def this(sourceName: String, cs: Seq[Char]) = this(new VirtualFile(sourceName), cs.toArray)
+ def this(file: AbstractFile, cs: Seq[Char]) = this(file, cs.toArray)
/** Tab increment; can be overridden */
def tabInc = 8
@@ -96,7 +97,7 @@ case class SourceFile(file: AbstractFile, content: Array[Char]) extends interfac
private lazy val lineIndices: Array[Int] = calculateLineIndices(content)
/** Map line to offset of first character in line */
- def lineToOffset(index : Int): Int = lineIndices(index)
+ def lineToOffset(index: Int): Int = lineIndices(index)
/** A cache to speed up offsetToLine searches to similar lines */
private var lastLine = 0
@@ -139,5 +140,6 @@ case class SourceFile(file: AbstractFile, content: Array[Char]) extends interfac
@sharable object NoSource extends SourceFile("<no source>", Nil) {
override def exists = false
+ override def atPos(pos: Position): SourcePosition = NoSourcePosition
}
diff --git a/src/dotty/tools/dotc/util/SourcePosition.scala b/src/dotty/tools/dotc/util/SourcePosition.scala
index 0b2b2aa0b..595ea34ca 100644
--- a/src/dotty/tools/dotc/util/SourcePosition.scala
+++ b/src/dotty/tools/dotc/util/SourcePosition.scala
@@ -5,7 +5,8 @@ package util
import Positions.{Position, NoPosition}
/** A source position is comprised of a position in a source file */
-case class SourcePosition(source: SourceFile, pos: Position) extends interfaces.SourcePosition {
+case class SourcePosition(source: SourceFile, pos: Position, outer: SourcePosition = NoSourcePosition)
+extends interfaces.SourcePosition {
def exists = pos.exists
def lineContent: String = source.lineContent(point)
@@ -13,6 +14,23 @@ case class SourcePosition(source: SourceFile, pos: Position) extends interfaces.
def point: Int = pos.point
/** The line of the position, starting at 0 */
def line: Int = source.offsetToLine(point)
+
+ /** The lines of the position */
+ def lines: List[Int] =
+ List.range(source.offsetToLine(start), source.offsetToLine(end + 1)) match {
+ case Nil => line :: Nil
+ case xs => xs
+ }
+
+ def lineOffsets: List[Int] =
+ lines.map(source.lineToOffset(_))
+
+ def lineContent(lineNumber: Int): String =
+ source.lineContent(source.lineToOffset(lineNumber))
+
+ def beforeAndAfterPoint: (List[Int], List[Int]) =
+ lineOffsets.partition(_ < point)
+
/** The column of the position, starting at 0 */
def column: Int = source.column(point)
@@ -24,6 +42,8 @@ case class SourcePosition(source: SourceFile, pos: Position) extends interfaces.
def endLine: Int = source.offsetToLine(end)
def endColumn: Int = source.column(end)
+ def withOuter(outer: SourcePosition) = new SourcePosition(source, pos, outer)
+
override def toString =
if (source.exists) s"${source.file}:${line + 1}"
else s"(no source file, offset = ${pos.point})"
@@ -32,5 +52,6 @@ case class SourcePosition(source: SourceFile, pos: Position) extends interfaces.
/** A sentinel for a non-existing source position */
@sharable object NoSourcePosition extends SourcePosition(NoSource, NoPosition) {
override def toString = "?"
+ override def withOuter(outer: SourcePosition) = outer
}
diff --git a/src/dotty/tools/dotc/util/Stats.scala b/src/dotty/tools/dotc/util/Stats.scala
index fdd3602c9..b7e0996f5 100644
--- a/src/dotty/tools/dotc/util/Stats.scala
+++ b/src/dotty/tools/dotc/util/Stats.scala
@@ -7,27 +7,34 @@ import collection.mutable
@sharable object Stats {
- final val enabled = true
+ final val enabled = false
/** The period in ms in which stack snapshots are displayed */
final val HeartBeatPeriod = 250
+ var monitored = false
+
@volatile private var stack: List[String] = Nil
val hits = new mutable.HashMap[String, Int] {
override def default(key: String): Int = 0
}
- def record(fn: String, n: Int = 1) = {
+ @inline
+ def record(fn: String, n: Int = 1) =
+ if (enabled) doRecord(fn, n)
+
+ private def doRecord(fn: String, n: Int) =
if (monitored) {
val name = if (fn.startsWith("member-")) "member" else fn
hits(name) += n
}
- }
-
- var monitored = false
+ @inline
def track[T](fn: String)(op: => T) =
+ if (enabled) doTrack(fn)(op) else op
+
+ def doTrack[T](fn: String)(op: => T) =
if (monitored) {
stack = fn :: stack
record(fn)
diff --git a/src/scala/Eq.scala b/src/scala/Eq.scala
new file mode 100644
index 000000000..d6d617cab
--- /dev/null
+++ b/src/scala/Eq.scala
@@ -0,0 +1,14 @@
+package scala
+
+import annotation.implicitNotFound
+
+/** A marker trait indicating that values of type `L` can be compared to values of type `R`. */
+@implicitNotFound("Values of types ${L} and ${R} cannot be compared with == or !=")
+sealed trait Eq[-L, -R]
+
+/** Besides being a companion object, this object
+ * can also be used as a value that's compatible with
+ * any instance of `Eq`.
+ */
+object Eq extends Eq[Any, Any]
+
diff --git a/src/scala/Function23.scala b/src/scala/Function23.scala
new file mode 100644
index 000000000..254772d53
--- /dev/null
+++ b/src/scala/Function23.scala
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala
+
+
+/** A function of 23 parameters. Used as a temporary fix until arity limit is dropped.
+ *
+ */
+trait Function23[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, -T23, +R] extends AnyRef { self =>
+ /** Apply the body of this function to the arguments.
+ * @return the result of function application.
+ */
+ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22, v23: T23): R
+
+ override def toString() = "<function23>"
+}
diff --git a/src/scala/Function24.scala b/src/scala/Function24.scala
new file mode 100644
index 000000000..8af8ed995
--- /dev/null
+++ b/src/scala/Function24.scala
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala
+
+
+/** A function of 24 parameters. Used as a temporary fix until arity limit is dropped.
+ *
+ */
+trait Function24[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, -T23, -T24, +R] extends AnyRef { self =>
+ /** Apply the body of this function to the arguments.
+ * @return the result of function application.
+ */
+ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22, v23: T23, v24: T24): R
+
+ override def toString() = "<function24>"
+}
diff --git a/src/scala/Function25.scala b/src/scala/Function25.scala
new file mode 100644
index 000000000..6df740b5b
--- /dev/null
+++ b/src/scala/Function25.scala
@@ -0,0 +1,21 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala
+
+
+/** A function of 25 parameters. Used as a temporary fix until arity limit is dropped.
+ *
+ */
+trait Function25[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, -T23, -T24, -T25, +R] extends AnyRef { self =>
+ /** Apply the body of this function to the arguments.
+ * @return the result of function application.
+ */
+ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22, v23: T23, v24: T24, v25: T25): R
+
+ override def toString() = "<function25>"
+}
diff --git a/src/scala/Function26.scala b/src/scala/Function26.scala
new file mode 100644
index 000000000..5daccb6f0
--- /dev/null
+++ b/src/scala/Function26.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala
+
+/** A function of 26 parameters. Used as a temporary fix until arity limit is dropped.
+ *
+ */
+trait Function26[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, -T23, -T24, -T25, -T26, +R] extends AnyRef { self =>
+ /** Apply the body of this function to the arguments.
+ * @return the result of function application.
+ */
+ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22, v23: T23, v24: T24, v25: T25, v26: T26): R
+
+ override def toString() = "<function26>"
+}
diff --git a/src/scala/Function27.scala b/src/scala/Function27.scala
new file mode 100644
index 000000000..daebd3ed4
--- /dev/null
+++ b/src/scala/Function27.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala
+
+/** A function of 27 parameters. Used as a temporary fix until arity limit is dropped.
+ *
+ */
+trait Function27[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, -T23, -T24, -T25, -T26, -T27, +R] extends AnyRef { self =>
+ /** Apply the body of this function to the arguments.
+ * @return the result of function application.
+ */
+ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22, v23: T23, v24: T24, v25: T25, v26: T26, v27: T27): R
+
+ override def toString() = "<function27>"
+}
diff --git a/src/scala/Function28.scala b/src/scala/Function28.scala
new file mode 100644
index 000000000..82912caea
--- /dev/null
+++ b/src/scala/Function28.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala
+
+/** A function of 28 parameters. Used as a temporary fix until arity limit is dropped.
+ *
+ */
+trait Function28[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, -T23, -T24, -T25, -T26, -T27, -T28, +R] extends AnyRef { self =>
+ /** Apply the body of this function to the arguments.
+ * @return the result of function application.
+ */
+ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22, v23: T23, v24: T24, v25: T25, v26: T26, v27: T27, v28: T28): R
+
+ override def toString() = "<function28>"
+}
diff --git a/src/scala/Function29.scala b/src/scala/Function29.scala
new file mode 100644
index 000000000..6dabf2051
--- /dev/null
+++ b/src/scala/Function29.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala
+
+/** A function of 29 parameters. Used as a temporary fix until arity limit is dropped.
+ *
+ */
+trait Function29[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, -T23, -T24, -T25, -T26, -T27, -T28, -T29, +R] extends AnyRef { self =>
+ /** Apply the body of this function to the arguments.
+ * @return the result of function application.
+ */
+ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22, v23: T23, v24: T24, v25: T25, v26: T26, v27: T27, v28: T28, v29: T29): R
+
+ override def toString() = "<function29>"
+}
diff --git a/src/scala/Function30.scala b/src/scala/Function30.scala
new file mode 100644
index 000000000..20fd970dd
--- /dev/null
+++ b/src/scala/Function30.scala
@@ -0,0 +1,20 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala
+
+/** A function of 30 parameters. Used as a temporary fix until arity limit is dropped.
+ *
+ */
+trait Function30[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, -T13, -T14, -T15, -T16, -T17, -T18, -T19, -T20, -T21, -T22, -T23, -T24, -T25, -T26, -T27, -T28, -T29, -T30, +R] extends AnyRef { self =>
+ /** Apply the body of this function to the arguments.
+ * @return the result of function application.
+ */
+ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22, v23: T23, v24: T24, v25: T25, v26: T26, v27: T27, v28: T28, v29: T29, v30: T30): R
+
+ override def toString() = "<function30>"
+}
diff --git a/src/scala/compat/java8/JFunction.java b/src/scala/compat/java8/JFunction.java
index 8e5a77d47..d68805d08 100644
--- a/src/scala/compat/java8/JFunction.java
+++ b/src/scala/compat/java8/JFunction.java
@@ -11,96 +11,183 @@ public final class JFunction {
private JFunction() {}
public static <R> scala.Function0<R> func(JFunction0<R> f) { return f; }
public static scala.Function0<BoxedUnit> proc(JProcedure0 p) { return p; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<BoxedUnit> procSpecialized(JFunction0$mcV$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Byte> funcSpecialized(JFunction0$mcB$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Short> funcSpecialized(JFunction0$mcS$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Integer> funcSpecialized(JFunction0$mcI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Long> funcSpecialized(JFunction0$mcJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Character> funcSpecialized(JFunction0$mcC$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Float> funcSpecialized(JFunction0$mcF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Double> funcSpecialized(JFunction0$mcD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function0<Boolean> funcSpecialized(JFunction0$mcZ$sp f) { return f; }
public static <T1, R> scala.Function1<T1, R> func(JFunction1<T1, R> f) { return f; }
public static <T1> scala.Function1<T1, BoxedUnit> proc(JProcedure1<T1> p) { return p; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, BoxedUnit> procSpecialized(JFunction1$mcVI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Boolean> funcSpecialized(JFunction1$mcZI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Integer> funcSpecialized(JFunction1$mcII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Float> funcSpecialized(JFunction1$mcFI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Long> funcSpecialized(JFunction1$mcJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Integer, Double> funcSpecialized(JFunction1$mcDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, BoxedUnit> procSpecialized(JFunction1$mcVJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Boolean> funcSpecialized(JFunction1$mcZJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Integer> funcSpecialized(JFunction1$mcIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Float> funcSpecialized(JFunction1$mcFJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Long> funcSpecialized(JFunction1$mcJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Long, Double> funcSpecialized(JFunction1$mcDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, BoxedUnit> procSpecialized(JFunction1$mcVF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Boolean> funcSpecialized(JFunction1$mcZF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Integer> funcSpecialized(JFunction1$mcIF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Float> funcSpecialized(JFunction1$mcFF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Long> funcSpecialized(JFunction1$mcJF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Float, Double> funcSpecialized(JFunction1$mcDF$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, BoxedUnit> procSpecialized(JFunction1$mcVD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Boolean> funcSpecialized(JFunction1$mcZD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Integer> funcSpecialized(JFunction1$mcID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Float> funcSpecialized(JFunction1$mcFD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Long> funcSpecialized(JFunction1$mcJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function1<Double, Double> funcSpecialized(JFunction1$mcDD$sp f) { return f; }
public static <T1, T2, R> scala.Function2<T1, T2, R> func(JFunction2<T1, T2, R> f) { return f; }
public static <T1, T2> scala.Function2<T1, T2, BoxedUnit> proc(JProcedure2<T1, T2> p) { return p; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, BoxedUnit> procSpecialized(JFunction2$mcVII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Boolean> funcSpecialized(JFunction2$mcZII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Integer> funcSpecialized(JFunction2$mcIII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Float> funcSpecialized(JFunction2$mcFII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Long> funcSpecialized(JFunction2$mcJII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Integer, Double> funcSpecialized(JFunction2$mcDII$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, BoxedUnit> procSpecialized(JFunction2$mcVIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Boolean> funcSpecialized(JFunction2$mcZIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Integer> funcSpecialized(JFunction2$mcIIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Float> funcSpecialized(JFunction2$mcFIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Long> funcSpecialized(JFunction2$mcJIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Long, Double> funcSpecialized(JFunction2$mcDIJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, BoxedUnit> procSpecialized(JFunction2$mcVID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Boolean> funcSpecialized(JFunction2$mcZID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Integer> funcSpecialized(JFunction2$mcIID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Float> funcSpecialized(JFunction2$mcFID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Long> funcSpecialized(JFunction2$mcJID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Integer, Double, Double> funcSpecialized(JFunction2$mcDID$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, BoxedUnit> procSpecialized(JFunction2$mcVJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Boolean> funcSpecialized(JFunction2$mcZJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Integer> funcSpecialized(JFunction2$mcIJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Float> funcSpecialized(JFunction2$mcFJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Long> funcSpecialized(JFunction2$mcJJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Integer, Double> funcSpecialized(JFunction2$mcDJI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, BoxedUnit> procSpecialized(JFunction2$mcVJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Boolean> funcSpecialized(JFunction2$mcZJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Integer> funcSpecialized(JFunction2$mcIJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Float> funcSpecialized(JFunction2$mcFJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Long> funcSpecialized(JFunction2$mcJJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Long, Double> funcSpecialized(JFunction2$mcDJJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, BoxedUnit> procSpecialized(JFunction2$mcVJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Boolean> funcSpecialized(JFunction2$mcZJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Integer> funcSpecialized(JFunction2$mcIJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Float> funcSpecialized(JFunction2$mcFJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Long> funcSpecialized(JFunction2$mcJJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Long, Double, Double> funcSpecialized(JFunction2$mcDJD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, BoxedUnit> procSpecialized(JFunction2$mcVDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Boolean> funcSpecialized(JFunction2$mcZDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Integer> funcSpecialized(JFunction2$mcIDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Float> funcSpecialized(JFunction2$mcFDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Long> funcSpecialized(JFunction2$mcJDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Integer, Double> funcSpecialized(JFunction2$mcDDI$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, BoxedUnit> procSpecialized(JFunction2$mcVDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Boolean> funcSpecialized(JFunction2$mcZDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Integer> funcSpecialized(JFunction2$mcIDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Float> funcSpecialized(JFunction2$mcFDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Long> funcSpecialized(JFunction2$mcJDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Long, Double> funcSpecialized(JFunction2$mcDDJ$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, BoxedUnit> procSpecialized(JFunction2$mcVDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Boolean> funcSpecialized(JFunction2$mcZDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Integer> funcSpecialized(JFunction2$mcIDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Float> funcSpecialized(JFunction2$mcFDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Long> funcSpecialized(JFunction2$mcJDD$sp f) { return f; }
+ @SuppressWarnings("unchecked")
public static scala.Function2<Double, Double, Double> funcSpecialized(JFunction2$mcDDD$sp f) { return f; }
public static <T1, T2, T3, R> scala.Function3<T1, T2, T3, R> func(JFunction3<T1, T2, T3, R> f) { return f; }
public static <T1, T2, T3> scala.Function3<T1, T2, T3, BoxedUnit> proc(JProcedure3<T1, T2, T3> p) { return p; }
diff --git a/src/scala/compat/java8/JFunction1.java b/src/scala/compat/java8/JFunction1.java
index cbd896282..69176cdea 100644
--- a/src/scala/compat/java8/JFunction1.java
+++ b/src/scala/compat/java8/JFunction1.java
@@ -11,229 +11,303 @@ public interface JFunction1<T1, R> extends scala.Function1<T1, R> {
};
@Override
+ @SuppressWarnings("unchecked")
default <A> scala.Function1<T1, A> andThen(scala.Function1<R, A> g) {
return scala.Function1$class.andThen(this, g);
}
@Override
+ @SuppressWarnings("unchecked")
default <A> scala.Function1<A, R> compose(scala.Function1<A, T1> g) {
return scala.Function1$class.compose(this, g);
}
+ @SuppressWarnings("unchecked")
default void apply$mcVI$sp(int v1) {
apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZI$sp(int v1) {
return (Boolean) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default int apply$mcII$sp(int v1) {
return (Integer) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFI$sp(int v1) {
return (Float) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJI$sp(int v1) {
return (Long) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDI$sp(int v1) {
return (Double) apply((T1) ((Integer) v1));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVJ$sp(long v1) {
apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZJ$sp(long v1) {
return (Boolean) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIJ$sp(long v1) {
return (Integer) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFJ$sp(long v1) {
return (Float) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJJ$sp(long v1) {
return (Long) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDJ$sp(long v1) {
return (Double) apply((T1) ((Long) v1));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVF$sp(float v1) {
apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZF$sp(float v1) {
return (Boolean) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIF$sp(float v1) {
return (Integer) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFF$sp(float v1) {
return (Float) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJF$sp(float v1) {
return (Long) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDF$sp(float v1) {
return (Double) apply((T1) ((Float) v1));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVD$sp(double v1) {
apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZD$sp(double v1) {
return (Boolean) apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default int apply$mcID$sp(double v1) {
return (Integer) apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFD$sp(double v1) {
return (Float) apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJD$sp(double v1) {
return (Long) apply((T1) ((Double) v1));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDD$sp(double v1) {
return (Double) apply((T1) ((Double) v1));
}
-
+
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcVI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcZI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcII$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcFI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcJI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcDI$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcVJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcZJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcIJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcFJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcJJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcDJ$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcVF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcZF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcIF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcFF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcJF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcDF$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcVD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcZD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcID$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcFD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcJD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 compose$mcDD$sp(scala.Function1 g) {
return compose(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcVI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcZI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcII$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcFI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcJI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcDI$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcVJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcZJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcIJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcFJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcJJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcDJ$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcVF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcZF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcIF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcFF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcJF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcDF$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcVD$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcZD$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcID$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcFD$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcJD$sp(scala.Function1 g) {
return andThen(g);
}
+ @SuppressWarnings("unchecked")
default scala.Function1 andThen$mcDD$sp(scala.Function1 g) {
return andThen(g);
}
diff --git a/src/scala/compat/java8/JFunction10.java b/src/scala/compat/java8/JFunction10.java
index c4d190760..8519ac223 100644
--- a/src/scala/compat/java8/JFunction10.java
+++ b/src/scala/compat/java8/JFunction10.java
@@ -10,10 +10,12 @@ public interface JFunction10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, R> extends
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, R>>>>>>>>>> curried() {
return scala.Function10$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple10<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>, R> tupled() {
return scala.Function10$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction11.java b/src/scala/compat/java8/JFunction11.java
index faa352cf7..032ee40af 100644
--- a/src/scala/compat/java8/JFunction11.java
+++ b/src/scala/compat/java8/JFunction11.java
@@ -10,10 +10,12 @@ public interface JFunction11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, R> ex
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, R>>>>>>>>>>> curried() {
return scala.Function11$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple11<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11>, R> tupled() {
return scala.Function11$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction12.java b/src/scala/compat/java8/JFunction12.java
index 1d67e25c9..3f3eff492 100644
--- a/src/scala/compat/java8/JFunction12.java
+++ b/src/scala/compat/java8/JFunction12.java
@@ -10,10 +10,12 @@ public interface JFunction12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, R>>>>>>>>>>>> curried() {
return scala.Function12$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple12<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12>, R> tupled() {
return scala.Function12$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction13.java b/src/scala/compat/java8/JFunction13.java
index 7872108d1..2bed6ee97 100644
--- a/src/scala/compat/java8/JFunction13.java
+++ b/src/scala/compat/java8/JFunction13.java
@@ -10,10 +10,12 @@ public interface JFunction13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, R>>>>>>>>>>>>> curried() {
return scala.Function13$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple13<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13>, R> tupled() {
return scala.Function13$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction14.java b/src/scala/compat/java8/JFunction14.java
index 34b45d634..77fac237d 100644
--- a/src/scala/compat/java8/JFunction14.java
+++ b/src/scala/compat/java8/JFunction14.java
@@ -10,10 +10,12 @@ public interface JFunction14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, R>>>>>>>>>>>>>> curried() {
return scala.Function14$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple14<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14>, R> tupled() {
return scala.Function14$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction15.java b/src/scala/compat/java8/JFunction15.java
index 55e4607fe..25adc1679 100644
--- a/src/scala/compat/java8/JFunction15.java
+++ b/src/scala/compat/java8/JFunction15.java
@@ -10,10 +10,12 @@ public interface JFunction15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, R>>>>>>>>>>>>>>> curried() {
return scala.Function15$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple15<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15>, R> tupled() {
return scala.Function15$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction16.java b/src/scala/compat/java8/JFunction16.java
index d2795ebf5..fda1ea0c7 100644
--- a/src/scala/compat/java8/JFunction16.java
+++ b/src/scala/compat/java8/JFunction16.java
@@ -10,10 +10,12 @@ public interface JFunction16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, R>>>>>>>>>>>>>>>> curried() {
return scala.Function16$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple16<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16>, R> tupled() {
return scala.Function16$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction17.java b/src/scala/compat/java8/JFunction17.java
index 9ab7715d1..89cb1b312 100644
--- a/src/scala/compat/java8/JFunction17.java
+++ b/src/scala/compat/java8/JFunction17.java
@@ -10,10 +10,12 @@ public interface JFunction17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, R>>>>>>>>>>>>>>>>> curried() {
return scala.Function17$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple17<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17>, R> tupled() {
return scala.Function17$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction18.java b/src/scala/compat/java8/JFunction18.java
index fba19eb38..428cee961 100644
--- a/src/scala/compat/java8/JFunction18.java
+++ b/src/scala/compat/java8/JFunction18.java
@@ -10,10 +10,12 @@ public interface JFunction18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, R>>>>>>>>>>>>>>>>>> curried() {
return scala.Function18$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple18<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18>, R> tupled() {
return scala.Function18$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction19.java b/src/scala/compat/java8/JFunction19.java
index f56551431..95e47df7e 100644
--- a/src/scala/compat/java8/JFunction19.java
+++ b/src/scala/compat/java8/JFunction19.java
@@ -10,10 +10,12 @@ public interface JFunction19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, scala.Function1<T19, R>>>>>>>>>>>>>>>>>>> curried() {
return scala.Function19$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple19<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19>, R> tupled() {
return scala.Function19$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction2.java b/src/scala/compat/java8/JFunction2.java
index aa023a19f..bad1493bb 100644
--- a/src/scala/compat/java8/JFunction2.java
+++ b/src/scala/compat/java8/JFunction2.java
@@ -10,499 +10,663 @@ public interface JFunction2<T1, T2, R> extends scala.Function2<T1, T2, R> {
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, R>> curried() {
return scala.Function2$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple2<T1, T2>, R> tupled() {
return scala.Function2$class.tupled(this);
}
+ @SuppressWarnings("unchecked")
default void apply$mcVII$sp(int v1, int v2) {
apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZII$sp(int v1, int v2) {
return (Boolean) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIII$sp(int v1, int v2) {
return (Integer) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFII$sp(int v1, int v2) {
return (Float) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJII$sp(int v1, int v2) {
return (Long) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDII$sp(int v1, int v2) {
return (Double) apply((T1) ((Integer) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVIJ$sp(int v1, long v2) {
apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZIJ$sp(int v1, long v2) {
return (Boolean) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIIJ$sp(int v1, long v2) {
return (Integer) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFIJ$sp(int v1, long v2) {
return (Float) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJIJ$sp(int v1, long v2) {
return (Long) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDIJ$sp(int v1, long v2) {
return (Double) apply((T1) ((Integer) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVID$sp(int v1, double v2) {
apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZID$sp(int v1, double v2) {
return (Boolean) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIID$sp(int v1, double v2) {
return (Integer) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFID$sp(int v1, double v2) {
return (Float) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJID$sp(int v1, double v2) {
return (Long) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDID$sp(int v1, double v2) {
return (Double) apply((T1) ((Integer) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVJI$sp(long v1, int v2) {
apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZJI$sp(long v1, int v2) {
return (Boolean) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIJI$sp(long v1, int v2) {
return (Integer) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFJI$sp(long v1, int v2) {
return (Float) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJJI$sp(long v1, int v2) {
return (Long) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDJI$sp(long v1, int v2) {
return (Double) apply((T1) ((Long) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVJJ$sp(long v1, long v2) {
apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZJJ$sp(long v1, long v2) {
return (Boolean) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIJJ$sp(long v1, long v2) {
return (Integer) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFJJ$sp(long v1, long v2) {
return (Float) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJJJ$sp(long v1, long v2) {
return (Long) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDJJ$sp(long v1, long v2) {
return (Double) apply((T1) ((Long) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVJD$sp(long v1, double v2) {
apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZJD$sp(long v1, double v2) {
return (Boolean) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIJD$sp(long v1, double v2) {
return (Integer) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFJD$sp(long v1, double v2) {
return (Float) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJJD$sp(long v1, double v2) {
return (Long) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDJD$sp(long v1, double v2) {
return (Double) apply((T1) ((Long) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVDI$sp(double v1, int v2) {
apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZDI$sp(double v1, int v2) {
return (Boolean) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIDI$sp(double v1, int v2) {
return (Integer) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFDI$sp(double v1, int v2) {
return (Float) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJDI$sp(double v1, int v2) {
return (Long) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDDI$sp(double v1, int v2) {
return (Double) apply((T1) ((Double) v1), (T2) ((Integer) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVDJ$sp(double v1, long v2) {
apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZDJ$sp(double v1, long v2) {
return (Boolean) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIDJ$sp(double v1, long v2) {
return (Integer) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFDJ$sp(double v1, long v2) {
return (Float) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJDJ$sp(double v1, long v2) {
return (Long) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDDJ$sp(double v1, long v2) {
return (Double) apply((T1) ((Double) v1), (T2) ((Long) v2));
}
+ @SuppressWarnings("unchecked")
default void apply$mcVDD$sp(double v1, double v2) {
apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default boolean apply$mcZDD$sp(double v1, double v2) {
return (Boolean) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default int apply$mcIDD$sp(double v1, double v2) {
return (Integer) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default float apply$mcFDD$sp(double v1, double v2) {
return (Float) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default long apply$mcJDD$sp(double v1, double v2) {
return (Long) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default double apply$mcDDD$sp(double v1, double v2) {
return (Double) apply((T1) ((Double) v1), (T2) ((Double) v2));
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDII$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDIJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDID$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDJI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDJJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDJD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDDI$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDDJ$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcVDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcZDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcIDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcFDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcJDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 curried$mcDDD$sp() {
return curried();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDII$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDIJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDID$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDJI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDJJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDJD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDDI$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDDJ$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcVDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcZDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcIDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcFDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcJDD$sp() {
return tupled();
}
+ @SuppressWarnings("unchecked")
default scala.Function1 tupled$mcDDD$sp() {
return tupled();
}
diff --git a/src/scala/compat/java8/JFunction20.java b/src/scala/compat/java8/JFunction20.java
index dd517c349..ed30b41ef 100644
--- a/src/scala/compat/java8/JFunction20.java
+++ b/src/scala/compat/java8/JFunction20.java
@@ -10,10 +10,12 @@ public interface JFunction20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, scala.Function1<T19, scala.Function1<T20, R>>>>>>>>>>>>>>>>>>>> curried() {
return scala.Function20$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple20<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20>, R> tupled() {
return scala.Function20$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction21.java b/src/scala/compat/java8/JFunction21.java
index c2e19b30a..aa6da8084 100644
--- a/src/scala/compat/java8/JFunction21.java
+++ b/src/scala/compat/java8/JFunction21.java
@@ -10,10 +10,12 @@ public interface JFunction21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, scala.Function1<T19, scala.Function1<T20, scala.Function1<T21, R>>>>>>>>>>>>>>>>>>>>> curried() {
return scala.Function21$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple21<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21>, R> tupled() {
return scala.Function21$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction22.java b/src/scala/compat/java8/JFunction22.java
index 76aa230ea..532145157 100644
--- a/src/scala/compat/java8/JFunction22.java
+++ b/src/scala/compat/java8/JFunction22.java
@@ -10,10 +10,12 @@ public interface JFunction22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12,
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, scala.Function1<T10, scala.Function1<T11, scala.Function1<T12, scala.Function1<T13, scala.Function1<T14, scala.Function1<T15, scala.Function1<T16, scala.Function1<T17, scala.Function1<T18, scala.Function1<T19, scala.Function1<T20, scala.Function1<T21, scala.Function1<T22, R>>>>>>>>>>>>>>>>>>>>>> curried() {
return scala.Function22$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple22<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22>, R> tupled() {
return scala.Function22$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction3.java b/src/scala/compat/java8/JFunction3.java
index 938e10a05..731608b2e 100644
--- a/src/scala/compat/java8/JFunction3.java
+++ b/src/scala/compat/java8/JFunction3.java
@@ -10,10 +10,12 @@ public interface JFunction3<T1, T2, T3, R> extends scala.Function3<T1, T2, T3, R
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, R>>> curried() {
return scala.Function3$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple3<T1, T2, T3>, R> tupled() {
return scala.Function3$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction4.java b/src/scala/compat/java8/JFunction4.java
index 08687fbfd..9bff4c835 100644
--- a/src/scala/compat/java8/JFunction4.java
+++ b/src/scala/compat/java8/JFunction4.java
@@ -10,10 +10,12 @@ public interface JFunction4<T1, T2, T3, T4, R> extends scala.Function4<T1, T2, T
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, R>>>> curried() {
return scala.Function4$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple4<T1, T2, T3, T4>, R> tupled() {
return scala.Function4$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction5.java b/src/scala/compat/java8/JFunction5.java
index 84b9b97cb..a5566a5ba 100644
--- a/src/scala/compat/java8/JFunction5.java
+++ b/src/scala/compat/java8/JFunction5.java
@@ -10,10 +10,12 @@ public interface JFunction5<T1, T2, T3, T4, T5, R> extends scala.Function5<T1, T
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, R>>>>> curried() {
return scala.Function5$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple5<T1, T2, T3, T4, T5>, R> tupled() {
return scala.Function5$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction6.java b/src/scala/compat/java8/JFunction6.java
index 957d5470d..b30fb5f47 100644
--- a/src/scala/compat/java8/JFunction6.java
+++ b/src/scala/compat/java8/JFunction6.java
@@ -10,10 +10,12 @@ public interface JFunction6<T1, T2, T3, T4, T5, T6, R> extends scala.Function6<T
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, R>>>>>> curried() {
return scala.Function6$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple6<T1, T2, T3, T4, T5, T6>, R> tupled() {
return scala.Function6$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction7.java b/src/scala/compat/java8/JFunction7.java
index 51352aba6..4a4a92cba 100644
--- a/src/scala/compat/java8/JFunction7.java
+++ b/src/scala/compat/java8/JFunction7.java
@@ -10,10 +10,12 @@ public interface JFunction7<T1, T2, T3, T4, T5, T6, T7, R> extends scala.Functio
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, R>>>>>>> curried() {
return scala.Function7$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple7<T1, T2, T3, T4, T5, T6, T7>, R> tupled() {
return scala.Function7$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction8.java b/src/scala/compat/java8/JFunction8.java
index 5e0a776ff..825236e48 100644
--- a/src/scala/compat/java8/JFunction8.java
+++ b/src/scala/compat/java8/JFunction8.java
@@ -10,10 +10,12 @@ public interface JFunction8<T1, T2, T3, T4, T5, T6, T7, T8, R> extends scala.Fun
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, R>>>>>>>> curried() {
return scala.Function8$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple8<T1, T2, T3, T4, T5, T6, T7, T8>, R> tupled() {
return scala.Function8$class.tupled(this);
}
diff --git a/src/scala/compat/java8/JFunction9.java b/src/scala/compat/java8/JFunction9.java
index dc9b8e71c..d0e40c316 100644
--- a/src/scala/compat/java8/JFunction9.java
+++ b/src/scala/compat/java8/JFunction9.java
@@ -10,10 +10,12 @@ public interface JFunction9<T1, T2, T3, T4, T5, T6, T7, T8, T9, R> extends scala
default void $init$() {
};
+ @SuppressWarnings("unchecked")
default scala.Function1<T1, scala.Function1<T2, scala.Function1<T3, scala.Function1<T4, scala.Function1<T5, scala.Function1<T6, scala.Function1<T7, scala.Function1<T8, scala.Function1<T9, R>>>>>>>>> curried() {
return scala.Function9$class.curried(this);
}
+ @SuppressWarnings("unchecked")
default scala.Function1<scala.Tuple9<T1, T2, T3, T4, T5, T6, T7, T8, T9>, R> tupled() {
return scala.Function9$class.tupled(this);
}
diff --git a/src/strawman/collections/CollectionStrawMan4.scala b/src/strawman/collections/CollectionStrawMan4.scala
index 9159b1cfc..7e8de2c82 100644
--- a/src/strawman/collections/CollectionStrawMan4.scala
+++ b/src/strawman/collections/CollectionStrawMan4.scala
@@ -2,6 +2,8 @@ package strawman.collections
import Predef.{augmentString => _, wrapString => _, _}
import scala.reflect.ClassTag
+import annotation.unchecked.uncheckedVariance
+import annotation.tailrec
/** A strawman architecture for new collections. It contains some
* example collection classes and methods with the intent to expose
@@ -10,6 +12,8 @@ import scala.reflect.ClassTag
* strengths and weaknesses of different collection architectures.
*
* For a test file, see tests/run/CollectionTests.scala.
+ *
+ * Strawman4 is like strawman1, but built over views instead of by-name iterators
*/
object CollectionStrawMan4 {
@@ -20,14 +24,7 @@ object CollectionStrawMan4 {
def iterator: Iterator[A]
}
- /** Base trait for generic collections */
- trait Iterable[+A] extends IterableOnce[A] with FromIterable[Iterable] {
- def iterator: Iterator[A]
- def view: View[A] = View.fromIterator(iterator)
- def knownLength: Int = -1
- }
-
- /** Base trait for instances that can construct a collection from an iterator */
+ /** Base trait for instances that can construct a collection from an iterable */
trait FromIterable[+C[X] <: Iterable[X]] {
def fromIterable[B](v: Iterable[B]): C[B]
}
@@ -38,16 +35,27 @@ object CollectionStrawMan4 {
def apply[A](xs: A*): C[A] = fromIterable(View.Elems(xs: _*))
}
+ /** Base trait for generic collections */
+ trait Iterable[+A] extends IterableOnce[A] with FromIterable[Iterable] {
+ def view: View[A] = View.fromIterator(iterator) // view is overridden, cannot be defined in ops
+ def knownLength: Int = -1
+ }
+
/** Base trait for sequence collections */
trait Seq[+A] extends Iterable[A] with FromIterable[Seq] {
def apply(i: Int): A
def length: Int
}
+ /** Base trait for collection builders */
trait Builder[-A, +To] {
def +=(x: A): this.type
- def ++=(xs: IterableOnce[A]): Unit = xs.iterator.foreach(+=)
def result: To
+
+ def ++=(xs: IterableOnce[A]): this.type = {
+ xs.iterator.foreach(+=)
+ this
+ }
}
/* ------------ Operations ----------------------------------- */
@@ -134,17 +142,18 @@ object CollectionStrawMan4 {
require(!isEmpty)
if (i == 0) head else tail.apply(i - 1)
}
- def :::[B >: A](prefix: List[B]): List[B] =
- if (prefix.isEmpty) this
- else Cons(prefix.head, prefix.tail ::: this)
def length: Int =
if (isEmpty) 0 else 1 + tail.length
+ def ++:[B >: A](prefix: List[B]): List[B] =
+ if (prefix.isEmpty) this
+ else Cons(prefix.head, prefix.tail ++: this)
}
- case class Cons[+A](x: A, xs: List[A]) extends List[A] {
+ case class Cons[+A](x: A, private[collections] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally
+ extends List[A] {
def isEmpty = false
def head = x
- def tail = xs
+ def tail = next
}
case object Nil extends List[Nothing] {
@@ -157,20 +166,64 @@ object CollectionStrawMan4 {
def fromIterator[B](it: Iterator[B]): List[B] =
if (it.hasNext) Cons(it.next, fromIterator(it)) else Nil
def fromIterable[B](c: Iterable[B]): List[B] = c match {
- case View.Concat(xs, ys: Iterable[B]) =>
- fromIterable(xs) ::: fromIterable(ys)
+ case View.Concat(xs, ys: List[B]) =>
+ fromIterable(xs) ++: ys
case View.Drop(xs: List[B], n) =>
- var i = 0
- var ys = xs
- while (i < n && !xs.isEmpty) {
- ys = ys.tail
- i += 1
- }
- ys
+ @tailrec def loop(xs: List[B], n: Int): List[B] =
+ if (n > 0) loop(xs.tail, n - 1) else xs
+ loop(xs, n)
+ case c: List[B] => c
case _ => fromIterator(c.iterator)
}
}
+ /** Concrete collection type: ListBuffer */
+ class ListBuffer[A] extends Seq[A] with FromIterable[ListBuffer] with Builder[A, List[A]] {
+ private var first, last: List[A] = Nil
+ private var aliased = false
+ def iterator = first.iterator
+ def fromIterable[B](coll: Iterable[B]) = ListBuffer.fromIterable(coll)
+ def apply(i: Int) = first.apply(i)
+ def length = first.length
+
+ private def copyElems(): Unit = {
+ val buf = ListBuffer.fromIterable(result)
+ first = buf.first
+ last = buf.last
+ aliased = false
+ }
+ def result = {
+ aliased = true
+ first
+ }
+ def +=(elem: A) = {
+ if (aliased) copyElems()
+ val last1 = Cons(elem, Nil)
+ last match {
+ case last: Cons[A] => last.next = last1
+ case _ => first = last1
+ }
+ last = last1
+ this
+ }
+ override def toString: String =
+ if (first.isEmpty) "ListBuffer()"
+ else {
+ val b = new StringBuilder("ListBuffer(").append(first.head)
+ first.tail.foldLeft(b)(_.append(", ").append(_)).append(")").toString
+ }
+ }
+
+ object ListBuffer extends IterableFactory[ListBuffer] {
+ def fromIterable[B](coll: Iterable[B]): ListBuffer[B] = coll match {
+ case pd @ View.Partitioned(partition: View.Partition[B] @unchecked) =>
+ partition.distribute(new ListBuffer[B]())
+ new ListBuffer[B] ++= pd.forced.get
+ case _ =>
+ new ListBuffer[B] ++= coll
+ }
+ }
+
/** Concrete collection type: ArrayBuffer */
class ArrayBuffer[A] private (initElems: Array[AnyRef], initLength: Int)
extends Seq[A] with FromIterable[ArrayBuffer] with Builder[A, ArrayBuffer[A]] {
@@ -214,7 +267,7 @@ object CollectionStrawMan4 {
Array.copy(fst.elems, fst.start, elems, 0, fst.length)
Array.copy(snd.elems, snd.start, elems, fst.length, snd.length)
new ArrayBuffer(elems, elems.length)
- case pd @ View.Partitioned(partition: View.Partition[B]) =>
+ case pd @ View.Partitioned(partition: View.Partition[B] @unchecked) =>
partition.distribute(new ArrayBuffer[B]())
pd.forced.get.asInstanceOf[ArrayBuffer[B]]
case c if c.knownLength >= 0 =>
@@ -234,12 +287,6 @@ object CollectionStrawMan4 {
def apply(n: Int) = elems(start + n).asInstanceOf[A]
}
- case class StringView(s: String) extends RandomAccessView[Char] {
- val start = 0
- val end = s.length
- def apply(n: Int) = s.charAt(n)
- }
-
/** Concrete collection type: String */
implicit class StringOps(val s: String) extends AnyVal with Ops[Char] {
def iterator: Iterator[Char] = new StringView(s).iterator
@@ -277,6 +324,12 @@ object CollectionStrawMan4 {
def ++(xs: String): String = s + xs
}
+ case class StringView(s: String) extends RandomAccessView[Char] {
+ val start = 0
+ val end = s.length
+ def apply(n: Int) = s.charAt(n)
+ }
+
/* ------------ Views --------------------------------------- */
/** A lazy iterable */
@@ -322,6 +375,8 @@ object CollectionStrawMan4 {
}
case class Partition[A](val underlying: Iterable[A], p: A => Boolean) {
val left, right = Partitioned(this)
+ // `distribute` makes up for the lack of generic push-based functionality.
+ // It forces both halves of the partition with a given builder.
def distribute(bf: => Builder[A, Iterable[A]]) = {
val lb, rb = bf
val it = underlying.iterator
diff --git a/src/strawman/collections/CollectionStrawMan5.scala b/src/strawman/collections/CollectionStrawMan5.scala
new file mode 100644
index 000000000..5d04c2c98
--- /dev/null
+++ b/src/strawman/collections/CollectionStrawMan5.scala
@@ -0,0 +1,522 @@
+package strawman.collections
+
+import Predef.{augmentString => _, wrapString => _, _}
+import scala.reflect.ClassTag
+import annotation.unchecked.uncheckedVariance
+import annotation.tailrec
+
+/** A strawman architecture for new collections. It contains some
+ * example collection classes and methods with the intent to expose
+ * some key issues. It would be good to compare this to other
+ * implementations of the same functionality, to get an idea of the
+ * strengths and weaknesses of different collection architectures.
+ *
+ * For a test file, see tests/run/CollectionTests.scala.
+ *
+ * Strawman5 is like strawman4, but using inheritance through ...Like traits
+ * instead of decorators.
+ *
+ * Advantage: Much easier to specialize. See partition for strict (buildable) collections
+ * or drop for Lists.
+ *
+ * Disadvantage: More "weird" types in base traits; some awkwardness with
+ * @uncheckedVariance.
+ */
+object CollectionStrawMan5 {
+
+ /* ------------ Base Traits -------------------------------- */
+
+ /** Iterator can be used only once */
+ trait IterableOnce[+A] {
+ def iterator: Iterator[A]
+ }
+
+ /** Base trait for instances that can construct a collection from an iterable */
+ trait FromIterable[+C[X] <: Iterable[X]] {
+ def fromIterable[B](it: Iterable[B]): C[B]
+ }
+
+ /** Base trait for companion objects of collections */
+ trait IterableFactory[+C[X] <: Iterable[X]] extends FromIterable[C] {
+ def empty[X]: C[X] = fromIterable(View.Empty)
+ def apply[A](xs: A*): C[A] = fromIterable(View.Elems(xs: _*))
+ }
+
+ /** Base trait for generic collections */
+ trait Iterable[+A] extends IterableOnce[A] with IterableLike[A, Iterable] {
+ protected def coll: Iterable[A] = this
+ def knownLength: Int = -1
+ }
+
+ /** Base trait for sequence collections */
+ trait Seq[+A] extends Iterable[A] with SeqLike[A, Seq] {
+ def apply(i: Int): A
+ def length: Int
+ }
+
+ /** Base trait for strict collections */
+ trait Buildable[+A, +To <: Iterable[A]] extends Iterable[A] {
+ protected[this] def newBuilder: Builder[A, To]
+ override def partition(p: A => Boolean): (To, To) = {
+ val l, r = newBuilder
+ iterator.foreach(x => (if (p(x)) l else r) += x)
+ (l.result, r.result)
+ }
+ // one might also override other transforms here to avoid generating
+ // iterators if it helps efficiency.
+ }
+
+ /** Base trait for collection builders */
+ trait Builder[-A, +To] {
+ def +=(x: A): this.type
+ def result: To
+
+ def ++=(xs: IterableOnce[A]): this.type = {
+ xs.iterator.foreach(+=)
+ this
+ }
+ }
+
+ /* ------------ Operations ----------------------------------- */
+
+ /** Base trait for Iterable operations
+ *
+ * VarianceNote
+ * ============
+ *
+ * We require that for all child classes of Iterable the variance of
+ * the child class and the variance of the `C` parameter passed to `IterableLike`
+ * are the same. We cannot express this since we lack variance polymorphism. That's
+ * why we have to resort at some places to write `C[A @uncheckedVariance]`.
+ *
+ */
+ trait IterableLike[+A, +C[X] <: Iterable[X]]
+ extends FromIterable[C]
+ with IterableOps[A]
+ with IterableMonoTransforms[A, C[A @uncheckedVariance]] // sound bcs of VarianceNote
+ with IterablePolyTransforms[A, C] {
+ protected[this] def fromLikeIterable(coll: Iterable[A]): C[A] = fromIterable(coll)
+ }
+
+ /** Base trait for Seq operations */
+ trait SeqLike[+A, +C[X] <: Seq[X]]
+ extends IterableLike[A, C] with SeqMonoTransforms[A, C[A @uncheckedVariance]] // sound bcs of VarianceNote
+
+ trait IterableOps[+A] extends Any {
+ def iterator: Iterator[A]
+ def foreach(f: A => Unit): Unit = iterator.foreach(f)
+ def foldLeft[B](z: B)(op: (B, A) => B): B = iterator.foldLeft(z)(op)
+ def foldRight[B](z: B)(op: (A, B) => B): B = iterator.foldRight(z)(op)
+ def indexWhere(p: A => Boolean): Int = iterator.indexWhere(p)
+ def isEmpty: Boolean = !iterator.hasNext
+ def head: A = iterator.next
+ def view: View[A] = View.fromIterator(iterator)
+ }
+
+ trait IterableMonoTransforms[+A, +Repr] extends Any {
+ protected def coll: Iterable[A]
+ protected[this] def fromLikeIterable(coll: Iterable[A]): Repr
+ def filter(p: A => Boolean): Repr = fromLikeIterable(View.Filter(coll, p))
+ def partition(p: A => Boolean): (Repr, Repr) = {
+ val pn = View.Partition(coll, p)
+ (fromLikeIterable(pn.left), fromLikeIterable(pn.right))
+ }
+ def drop(n: Int): Repr = fromLikeIterable(View.Drop(coll, n))
+ def to[C[X] <: Iterable[X]](fi: FromIterable[C]): C[A @uncheckedVariance] =
+ // variance seems sound because `to` could just as well have been added
+ // as a decorator. We should investigate this further to be sure.
+ fi.fromIterable(coll)
+ }
+
+ trait IterablePolyTransforms[+A, +C[A]] extends Any {
+ protected def coll: Iterable[A]
+ def fromIterable[B](coll: Iterable[B]): C[B]
+ def map[B](f: A => B): C[B] = fromIterable(View.Map(coll, f))
+ def flatMap[B](f: A => IterableOnce[B]): C[B] = fromIterable(View.FlatMap(coll, f))
+ def ++[B >: A](xs: IterableOnce[B]): C[B] = fromIterable(View.Concat(coll, xs))
+ def zip[B](xs: IterableOnce[B]): C[(A @uncheckedVariance, B)] = fromIterable(View.Zip(coll, xs))
+ // sound bcs of VarianceNote
+ }
+
+ trait SeqMonoTransforms[+A, +Repr] extends Any with IterableMonoTransforms[A, Repr] {
+ def reverse: Repr = {
+ var xs: List[A] = Nil
+ var it = coll.iterator
+ while (it.hasNext) xs = new Cons(it.next, xs)
+ fromLikeIterable(xs)
+ }
+ }
+
+ /* --------- Concrete collection types ------------------------------- */
+
+ /** Concrete collection type: List */
+ sealed trait List[+A] extends Seq[A] with SeqLike[A, List] with Buildable[A, List[A]] { self =>
+ def isEmpty: Boolean
+ def head: A
+ def tail: List[A]
+ def iterator = new Iterator[A] {
+ private[this] var current = self
+ def hasNext = !current.isEmpty
+ def next = { val r = current.head; current = current.tail; r }
+ }
+ def fromIterable[B](c: Iterable[B]): List[B] = List.fromIterable(c)
+ def apply(i: Int): A = {
+ require(!isEmpty)
+ if (i == 0) head else tail.apply(i - 1)
+ }
+ def length: Int =
+ if (isEmpty) 0 else 1 + tail.length
+ protected[this] def newBuilder = new ListBuffer[A]
+ def ++:[B >: A](prefix: List[B]): List[B] =
+ if (prefix.isEmpty) this
+ else Cons(prefix.head, prefix.tail ++: this)
+ override def ++[B >: A](xs: IterableOnce[B]): List[B] = xs match {
+ case xs: List[B] => this ++: xs
+ case _ => super.++(xs)
+ }
+ @tailrec final override def drop(n: Int) =
+ if (n > 0) tail.drop(n - 1) else this
+ }
+
+ case class Cons[+A](x: A, private[collections] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally
+ extends List[A] {
+ override def isEmpty = false
+ override def head = x
+ def tail = next
+ }
+
+ case object Nil extends List[Nothing] {
+ override def isEmpty = true
+ override def head = ???
+ def tail = ???
+ }
+
+ object List extends IterableFactory[List] {
+ def fromIterable[B](coll: Iterable[B]): List[B] = coll match {
+ case coll: List[B] => coll
+ case _ => ListBuffer.fromIterable(coll).result
+ }
+ }
+
+ /** Concrete collection type: ListBuffer */
+ class ListBuffer[A] extends Seq[A] with SeqLike[A, ListBuffer] with Builder[A, List[A]] {
+ private var first, last: List[A] = Nil
+ private var aliased = false
+ def iterator = first.iterator
+ def fromIterable[B](coll: Iterable[B]) = ListBuffer.fromIterable(coll)
+ def apply(i: Int) = first.apply(i)
+ def length = first.length
+
+ private def copyElems(): Unit = {
+ val buf = ListBuffer.fromIterable(result)
+ first = buf.first
+ last = buf.last
+ aliased = false
+ }
+ def result = {
+ aliased = true
+ first
+ }
+ def +=(elem: A) = {
+ if (aliased) copyElems()
+ val last1 = Cons(elem, Nil)
+ last match {
+ case last: Cons[A] => last.next = last1
+ case _ => first = last1
+ }
+ last = last1
+ this
+ }
+ override def toString: String =
+ if (first.isEmpty) "ListBuffer()"
+ else {
+ val b = new StringBuilder("ListBuffer(").append(first.head)
+ first.tail.foldLeft(b)(_.append(", ").append(_)).append(")").toString
+ }
+ }
+
+ object ListBuffer extends IterableFactory[ListBuffer] {
+ def fromIterable[B](coll: Iterable[B]): ListBuffer[B] = new ListBuffer[B] ++= coll
+ }
+
+ /** Concrete collection type: ArrayBuffer */
+ class ArrayBuffer[A] private (initElems: Array[AnyRef], initLength: Int)
+ extends Seq[A] with SeqLike[A, ArrayBuffer] with Builder[A, ArrayBuffer[A]] {
+ def this() = this(new Array[AnyRef](16), 0)
+ private var elems: Array[AnyRef] = initElems
+ private var start = 0
+ private var end = initLength
+ def apply(n: Int) = elems(start + n).asInstanceOf[A]
+ def length = end - start
+ override def knownLength = length
+ override def view = new ArrayBufferView(elems, start, end)
+ def iterator = view.iterator
+ def fromIterable[B](it: Iterable[B]): ArrayBuffer[B] =
+ ArrayBuffer.fromIterable(it)
+ def +=(elem: A): this.type = {
+ if (end == elems.length) {
+ if (start > 0) {
+ Array.copy(elems, start, elems, 0, length)
+ end -= start
+ start = 0
+ }
+ else {
+ val newelems = new Array[AnyRef](end * 2)
+ Array.copy(elems, 0, newelems, 0, end)
+ elems = newelems
+ }
+ }
+ elems(end) = elem.asInstanceOf[AnyRef]
+ end += 1
+ this
+ }
+ def result = this
+ def trimStart(n: Int): Unit = start += (n max 0)
+ override def ++[B >: A](xs: IterableOnce[B]): ArrayBuffer[B] = xs match {
+ case xs: ArrayBuffer[B] =>
+ val elems = new Array[AnyRef](length + xs.length)
+ Array.copy(this.elems, this.start, elems, 0, this.length)
+ Array.copy(xs.elems, xs.start, elems, this.length, xs.length)
+ new ArrayBuffer(elems, elems.length)
+ case _ => super.++(xs)
+ }
+
+ override def toString = s"ArrayBuffer(${elems.slice(start, end).mkString(", ")})"
+ }
+
+ object ArrayBuffer extends IterableFactory[ArrayBuffer] {
+ def fromIterable[B](coll: Iterable[B]): ArrayBuffer[B] =
+ if (coll.knownLength >= 0) {
+ val elems = new Array[AnyRef](coll.knownLength)
+ val it = coll.iterator
+ for (i <- 0 until elems.length) elems(i) = it.next().asInstanceOf[AnyRef]
+ new ArrayBuffer[B](elems, elems.length)
+ }
+ else {
+ val buf = new ArrayBuffer[B]
+ val it = coll.iterator
+ while (it.hasNext) buf += it.next()
+ buf
+ }
+ }
+
+ class ArrayBufferView[A](val elems: Array[AnyRef], val start: Int, val end: Int) extends RandomAccessView[A] {
+ def apply(n: Int) = elems(start + n).asInstanceOf[A]
+ }
+
+ /** Concrete collection type: String */
+ implicit class StringOps(val s: String)
+ extends AnyVal with IterableOps[Char]
+ with SeqMonoTransforms[Char, String]
+ with IterablePolyTransforms[Char, List] {
+ protected def coll = new StringView(s)
+ def iterator = coll.iterator
+ protected def fromLikeIterable(coll: Iterable[Char]): String = {
+ val sb = new StringBuilder
+ for (ch <- coll) sb.append(ch)
+ sb.toString
+ }
+ def fromIterable[B](coll: Iterable[B]): List[B] = List.fromIterable(coll)
+ def map(f: Char => Char): String = {
+ val sb = new StringBuilder
+ for (ch <- s) sb.append(f(ch))
+ sb.toString
+ }
+ def flatMap(f: Char => String): String = {
+ val sb = new StringBuilder
+ for (ch <- s) sb.append(f(ch))
+ sb.toString
+ }
+ def ++(xs: IterableOnce[Char]): String = {
+ val sb = new StringBuilder(s)
+ for (ch <- xs.iterator) sb.append(ch)
+ sb.toString
+ }
+ def ++(xs: String): String = s + xs
+ }
+
+ case class StringView(s: String) extends RandomAccessView[Char] {
+ val start = 0
+ val end = s.length
+ def apply(n: Int) = s.charAt(n)
+ }
+
+/* ---------- Views -------------------------------------------------------*/
+
+ /** Concrete collection type: View */
+ trait View[+A] extends Iterable[A] with IterableLike[A, View] {
+ override def view = this
+ override def fromIterable[B](c: Iterable[B]): View[B] = c match {
+ case c: View[B] => c
+ case _ => View.fromIterator(c.iterator)
+ }
+ }
+
+ /** View defined in terms of indexing a range */
+ trait RandomAccessView[+A] extends View[A] {
+ def start: Int
+ def end: Int
+ def apply(i: Int): A
+ def iterator: Iterator[A] = new Iterator[A] {
+ private var current = start
+ def hasNext = current < end
+ def next: A = {
+ val r = apply(current)
+ current += 1
+ r
+ }
+ }
+ override def knownLength = end - start max 0
+ }
+
+ object View {
+ def fromIterator[A](it: => Iterator[A]): View[A] = new View[A] {
+ def iterator = it
+ }
+ case object Empty extends View[Nothing] {
+ def iterator = Iterator.empty
+ override def knownLength = 0
+ }
+ case class Elems[A](xs: A*) extends View[A] {
+ def iterator = Iterator(xs: _*)
+ override def knownLength = xs.length
+ }
+ case class Filter[A](val underlying: Iterable[A], p: A => Boolean) extends View[A] {
+ def iterator = underlying.iterator.filter(p)
+ }
+ case class Partition[A](val underlying: Iterable[A], p: A => Boolean) {
+ val left = Partitioned(this, true)
+ val right = Partitioned(this, false)
+ }
+ case class Partitioned[A](partition: Partition[A], cond: Boolean) extends View[A] {
+ def iterator = partition.underlying.iterator.filter(x => partition.p(x) == cond)
+ }
+ case class Drop[A](underlying: Iterable[A], n: Int) extends View[A] {
+ def iterator = underlying.iterator.drop(n)
+ override def knownLength =
+ if (underlying.knownLength >= 0) underlying.knownLength - n max 0 else -1
+ }
+ case class Map[A, B](underlying: Iterable[A], f: A => B) extends View[B] {
+ def iterator = underlying.iterator.map(f)
+ override def knownLength = underlying.knownLength
+ }
+ case class FlatMap[A, B](underlying: Iterable[A], f: A => IterableOnce[B]) extends View[B] {
+ def iterator = underlying.iterator.flatMap(f)
+ }
+ case class Concat[A](underlying: Iterable[A], other: IterableOnce[A]) extends View[A] {
+ def iterator = underlying.iterator ++ other
+ override def knownLength = other match {
+ case other: Iterable[_] if underlying.knownLength >= 0 && other.knownLength >= 0 =>
+ underlying.knownLength + other.knownLength
+ case _ =>
+ -1
+ }
+ }
+ case class Zip[A, B](underlying: Iterable[A], other: IterableOnce[B]) extends View[(A, B)] {
+ def iterator = underlying.iterator.zip(other)
+ override def knownLength = other match {
+ case other: Iterable[_] if underlying.knownLength >= 0 && other.knownLength >= 0 =>
+ underlying.knownLength min other.knownLength
+ case _ =>
+ -1
+ }
+ }
+ }
+
+/* ---------- Iterators ---------------------------------------------------*/
+
+ /** A core Iterator class */
+ trait Iterator[+A] extends IterableOnce[A] { self =>
+ def hasNext: Boolean
+ def next(): A
+ def iterator = this
+ def foldLeft[B](z: B)(op: (B, A) => B): B =
+ if (hasNext) foldLeft(op(z, next))(op) else z
+ def foldRight[B](z: B)(op: (A, B) => B): B =
+ if (hasNext) op(next(), foldRight(z)(op)) else z
+ def foreach(f: A => Unit): Unit =
+ while (hasNext) f(next())
+ def indexWhere(p: A => Boolean): Int = {
+ var i = 0
+ while (hasNext) {
+ if (p(next())) return i
+ i += 1
+ }
+ -1
+ }
+ def filter(p: A => Boolean): Iterator[A] = new Iterator[A] {
+ private var hd: A = _
+ private var hdDefined: Boolean = false
+
+ def hasNext: Boolean = hdDefined || {
+ do {
+ if (!self.hasNext) return false
+ hd = self.next()
+ } while (!p(hd))
+ hdDefined = true
+ true
+ }
+
+ def next() =
+ if (hasNext) {
+ hdDefined = false
+ hd
+ }
+ else Iterator.empty.next()
+ }
+
+ def map[B](f: A => B): Iterator[B] = new Iterator[B] {
+ def hasNext = self.hasNext
+ def next() = f(self.next())
+ }
+
+ def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = new Iterator[B] {
+ private var myCurrent: Iterator[B] = Iterator.empty
+ private def current = {
+ while (!myCurrent.hasNext && self.hasNext)
+ myCurrent = f(self.next()).iterator
+ myCurrent
+ }
+ def hasNext = current.hasNext
+ def next() = current.next()
+ }
+ def ++[B >: A](xs: IterableOnce[B]): Iterator[B] = new Iterator[B] {
+ private var myCurrent: Iterator[B] = self
+ private var first = true
+ private def current = {
+ if (!myCurrent.hasNext && first) {
+ myCurrent = xs.iterator
+ first = false
+ }
+ myCurrent
+ }
+ def hasNext = current.hasNext
+ def next() = current.next()
+ }
+ def drop(n: Int): Iterator[A] = {
+ var i = 0
+ while (i < n && hasNext) {
+ next()
+ i += 1
+ }
+ this
+ }
+ def zip[B](that: IterableOnce[B]): Iterator[(A, B)] = new Iterator[(A, B)] {
+ val thatIterator = that.iterator
+ def hasNext = self.hasNext && thatIterator.hasNext
+ def next() = (self.next(), thatIterator.next())
+ }
+ }
+
+ object Iterator {
+ val empty: Iterator[Nothing] = new Iterator[Nothing] {
+ def hasNext = false
+ def next = throw new NoSuchElementException("next on empty iterator")
+ }
+ def apply[A](xs: A*): Iterator[A] = new RandomAccessView[A] {
+ val start = 0
+ val end = xs.length
+ def apply(n: Int) = xs(n)
+ }.iterator
+ }
+}
diff --git a/src/strawman/collections/CollectionStrawMan6.scala b/src/strawman/collections/CollectionStrawMan6.scala
new file mode 100644
index 000000000..50de63488
--- /dev/null
+++ b/src/strawman/collections/CollectionStrawMan6.scala
@@ -0,0 +1,1045 @@
+package strawman.collections
+
+import Predef.{augmentString => _, wrapString => _, _}
+import scala.reflect.ClassTag
+import annotation.unchecked.uncheckedVariance
+import annotation.tailrec
+
+class LowPriority {
+ import CollectionStrawMan6._
+
+ /** Convert array to iterable via view. Lower priority than ArrayOps */
+ implicit def arrayToView[T](xs: Array[T]): ArrayView[T] = new ArrayView[T](xs)
+
+ /** Convert string to iterable via view. Lower priority than StringOps */
+ implicit def stringToView(s: String): StringView = new StringView(s)
+}
+
+/** A strawman architecture for new collections. It contains some
+ * example collection classes and methods with the intent to expose
+ * some key issues. It would be good to compare this to odether
+ * implementations of the same functionality, to get an idea of the
+ * strengths and weaknesses of different collection architectures.
+ *
+ * For a test file, see tests/run/CollectionTests.scala.
+ *
+ * Strawman6 is like strawman5, and adds lazy lists (i.e. lazie streams), arrays
+ * and some utilitity methods (take, tail, mkString, toArray). Also, systematically
+ * uses builders for all strict collections.
+ *
+ * Types covered in this strawman:
+ *
+ * 1. Collection base types:
+ *
+ * IterableOnce, Iterable, Seq, LinearSeq, View, IndexedView
+ *
+ * 2. Collection creator base types:
+ *
+ * FromIterable, IterableFactory, Buildable, Builder
+ *
+ * 3. Types that bundle operations:
+ *
+ * IterableOps, IterableMonoTransforms, IterablePolyTransforms, IterableLike
+ * SeqMonoTransforms, SeqLike
+ *
+ * 4. Concrete collection types:
+ *
+ * List, LazyList, ListBuffer, ArrayBuffer, ArrayBufferView, StringView, ArrayView
+ *
+ * 5. Decorators for existing types
+ *
+ * StringOps, ArrayOps
+ *
+ * 6. Related non collection types:
+ *
+ * Iterator, StringBuilder
+ *
+ * Operations covered in this strawman:
+ *
+ * 1. Abstract operations, or expected to be overridden:
+ *
+ * For iterables:
+ *
+ * iterator, fromIterable, fromIterableWithSameElemType, knownLength, className
+ *
+ * For sequences:
+ *
+ * apply, length
+ *
+ * For buildables:
+ *
+ * newBuilder
+ *
+ * For builders:
+ *
+ * +=, result
+ *
+ * 2. Utility methods, might be overridden for performance:
+ *
+ * Operations returning not necessarily a collection:
+ *
+ * foreach, foldLeft, foldRight, indexWhere, isEmpty, head, size, mkString
+ *
+ * Operations returning a collection of a fixed type constructor:
+ *
+ * view, to, toArray, copyToArray
+ *
+ * Type-preserving generic transforms:
+ *
+ * filter, partition, take, drop, tail, reverse
+ *
+ * Generic transforms returning collections of different element types:
+ *
+ * map, flatMap, ++, zip
+ */
+object CollectionStrawMan6 extends LowPriority {
+
+ /* ------------ Base Traits -------------------------------- */
+
+ /** Iterator can be used only once */
+ trait IterableOnce[+A] {
+ def iterator: Iterator[A]
+ }
+
+ /** Base trait for instances that can construct a collection from an iterable */
+ trait FromIterable[+C[X] <: Iterable[X]] {
+ def fromIterable[B](it: Iterable[B]): C[B]
+ }
+
+ /** Base trait for companion objects of collections */
+ trait IterableFactory[+C[X] <: Iterable[X]] extends FromIterable[C] {
+ def empty[X]: C[X] = fromIterable(View.Empty)
+ def apply[A](xs: A*): C[A] = fromIterable(View.Elems(xs: _*))
+ }
+
+ /** Base trait for generic collections */
+ trait Iterable[+A] extends IterableOnce[A] with IterableLike[A, Iterable] {
+ /** The collection itself */
+ protected def coll: this.type = this
+ }
+
+ /** A trait representing indexable collections with finite length */
+ trait ArrayLike[+A] extends Any {
+ def length: Int
+ def apply(i: Int): A
+ }
+
+ /** Base trait for sequence collections */
+ trait Seq[+A] extends Iterable[A] with SeqLike[A, Seq] with ArrayLike[A]
+
+ /** Base trait for linearly accessed sequences that have efficient `head` and
+ * `tail` operations.
+ * Known subclasses: List, LazyList
+ */
+ trait LinearSeq[+A] extends Seq[A] with LinearSeqLike[A, LinearSeq] { self =>
+
+ /** To be overridden in implementations: */
+ def isEmpty: Boolean
+ def head: A
+ def tail: LinearSeq[A]
+
+ /** `iterator` is overridden in terms of `head` and `tail` */
+ def iterator = new Iterator[A] {
+ private[this] var current: Seq[A] = self
+ def hasNext = !current.isEmpty
+ def next = { val r = current.head; current = current.tail; r }
+ }
+
+ /** `length is defined in terms of `iterator` */
+ def length: Int = iterator.length
+
+ /** `apply` is defined in terms of `drop`, which is in turn defined in
+ * terms of `tail`.
+ */
+ override def apply(n: Int): A = {
+ if (n < 0) throw new IndexOutOfBoundsException(n.toString)
+ val skipped = drop(n)
+ if (skipped.isEmpty) throw new IndexOutOfBoundsException(n.toString)
+ skipped.head
+ }
+ }
+
+ type IndexedSeq[+A] = Seq[A] { def view: IndexedView[A] }
+
+ /** Base trait for strict collections that can be built using a builder.
+ * @param A the element type of the collection
+ * @param Repr the type of the underlying collection
+ */
+ trait Buildable[+A, +Repr] extends Any with IterableMonoTransforms[A, Repr] {
+
+ /** Creates a new builder. */
+ protected[this] def newBuilder: Builder[A, Repr]
+
+ /** Optimized, push-based version of `partition`. */
+ override def partition(p: A => Boolean): (Repr, Repr) = {
+ val l, r = newBuilder
+ coll.iterator.foreach(x => (if (p(x)) l else r) += x)
+ (l.result, r.result)
+ }
+
+ // one might also override other transforms here to avoid generating
+ // iterators if it helps efficiency.
+ }
+
+ /** Base trait for collection builders */
+ trait Builder[-A, +To] { self =>
+
+ /** Append an element */
+ def +=(x: A): this.type
+
+ /** Result collection consisting of all elements appended so far. */
+ def result: To
+
+ /** Bulk append. Can be overridden if specialized implementations are available. */
+ def ++=(xs: IterableOnce[A]): this.type = {
+ xs.iterator.foreach(+=)
+ this
+ }
+
+ /** A builder resulting from this builder my mapping the result using `f`. */
+ def mapResult[NewTo](f: To => NewTo) = new Builder[A, NewTo] {
+ def +=(x: A): this.type = { self += x; this }
+ override def ++=(xs: IterableOnce[A]): this.type = { self ++= xs; this }
+ def result: NewTo = f(self.result)
+ }
+ }
+
+ /* ------------ Operations ----------------------------------- */
+
+ /** Base trait for Iterable operations
+ *
+ * VarianceNote
+ * ============
+ *
+ * We require that for all child classes of Iterable the variance of
+ * the child class and the variance of the `C` parameter passed to `IterableLike`
+ * are the same. We cannot express this since we lack variance polymorphism. That's
+ * why we have to resort at some places to write `C[A @uncheckedVariance]`.
+ *
+ */
+ trait IterableLike[+A, +C[X] <: Iterable[X]]
+ extends FromIterable[C]
+ with IterableOps[A]
+ with IterableMonoTransforms[A, C[A @uncheckedVariance]] // sound bcs of VarianceNote
+ with IterablePolyTransforms[A, C] {
+
+ /** Create a collection of type `C[A]` from the elements of `coll`, which has
+ * the same element type as this collection. Overridden in StringOps and ArrayOps.
+ */
+ protected[this] def fromIterableWithSameElemType(coll: Iterable[A]): C[A] = fromIterable(coll)
+ }
+
+ /** Base trait for Seq operations */
+ trait SeqLike[+A, +C[X] <: Seq[X]]
+ extends IterableLike[A, C]
+ with SeqMonoTransforms[A, C[A @uncheckedVariance]] // sound bcs of VarianceNote
+
+ /** Base trait for linear Seq operations */
+ trait LinearSeqLike[+A, +C[X] <: LinearSeq[X]] extends SeqLike[A, C] {
+
+ /** Optimized version of `drop` that avoids copying
+ * Note: `drop` is defined here, rather than in a trait like `LinearSeqMonoTransforms`,
+ * because the `...MonoTransforms` traits make no assumption about the type of `Repr`
+ * whereas we need to assume here that `Repr` is the same as the underlying
+ * collection type.
+ */
+ override def drop(n: Int): C[A @uncheckedVariance] = { // sound bcs of VarianceNote
+ def loop(n: Int, s: Iterable[A]): C[A] =
+ if (n <= 0) s.asInstanceOf[C[A]]
+ // implicit contract to guarantee success of asInstanceOf:
+ // (1) coll is of type C[A]
+ // (2) The tail of a LinearSeq is of the same type as the type of the sequence itself
+ // it's surprisingly tricky/ugly to turn this into actual types, so we
+ // leave this contract implicit.
+ else loop(n - 1, s.tail)
+ loop(n, coll)
+ }
+ }
+
+ /** Operations over iterables. No operation defined here is generic in the
+ * type of the underlying collection.
+ */
+ trait IterableOps[+A] extends Any {
+ protected def coll: Iterable[A]
+ private def iterator = coll.iterator
+
+ /** Apply `f` to each element for tis side effects */
+ def foreach(f: A => Unit): Unit = iterator.foreach(f)
+
+ /** Fold left */
+ def foldLeft[B](z: B)(op: (B, A) => B): B = iterator.foldLeft(z)(op)
+
+ /** Fold right */
+ def foldRight[B](z: B)(op: (A, B) => B): B = iterator.foldRight(z)(op)
+
+ /** The index of the first element in this collection for which `p` holds. */
+ def indexWhere(p: A => Boolean): Int = iterator.indexWhere(p)
+
+ /** Is the collection empty? */
+ def isEmpty: Boolean = !iterator.hasNext
+
+ /** The first element of the collection. */
+ def head: A = iterator.next()
+
+ /** The number of elements in this collection, if it can be cheaply computed,
+ * -1 otherwise. Cheaply usually means: Not requiring a collection traversal.
+ */
+ def knownSize: Int = -1
+
+ /** The number of elements in this collection. Does not terminate for
+ * infinite collections.
+ */
+ def size: Int = if (knownSize >= 0) knownSize else iterator.length
+
+ /** A view representing the elements of this collection. */
+ def view: View[A] = View.fromIterator(iterator)
+
+ /** Given a collection factory `fi` for collections of type constructor `C`,
+ * convert this collection to one of type `C[A]`. Example uses:
+ *
+ * xs.to(List)
+ * xs.to(ArrayBuffer)
+ */
+ def to[C[X] <: Iterable[X]](fi: FromIterable[C]): C[A @uncheckedVariance] =
+ // variance seems sound because `to` could just as well have been added
+ // as a decorator. We should investigate this further to be sure.
+ fi.fromIterable(coll)
+
+ /** Convert collection to array. */
+ def toArray[B >: A: ClassTag]: Array[B] =
+ if (knownSize >= 0) copyToArray(new Array[B](knownSize), 0)
+ else ArrayBuffer.fromIterable(coll).toArray[B]
+
+ /** Copy all elements of this collection to array `xs`, starting at `start`. */
+ def copyToArray[B >: A](xs: Array[B], start: Int = 0): xs.type = {
+ var i = start
+ val it = iterator
+ while (it.hasNext) {
+ xs(i) = it.next()
+ i += 1
+ }
+ xs
+ }
+
+ /** The class name of this collection. To be used for converting to string.
+ * Collections generally print like this:
+ *
+ * <className>(elem_1, ..., elem_n)
+ */
+ def className = getClass.getName
+
+ /** A string showing all elements of this collection, separated by string `sep`. */
+ def mkString(sep: String): String = {
+ var first: Boolean = true
+ val b = new StringBuilder()
+ foreach { elem =>
+ if (!first) b ++= sep
+ first = false
+ b ++= String.valueOf(elem)
+ }
+ b.result
+ }
+
+ override def toString = s"$className(${mkString(", ")})"
+ }
+
+ /** Type-preserving transforms over iterables.
+ * Operations defined here return in their result iterables of the same type
+ * as the one they are invoked on.
+ */
+ trait IterableMonoTransforms[+A, +Repr] extends Any {
+ protected def coll: Iterable[A]
+ protected[this] def fromIterableWithSameElemType(coll: Iterable[A]): Repr
+
+ /** All elements satisfying predicate `p` */
+ def filter(p: A => Boolean): Repr = fromIterableWithSameElemType(View.Filter(coll, p))
+
+ /** A pair of, first, all elements that satisfy prediacte `p` and, second,
+ * all elements that do not. Interesting because it splits a collection in two.
+ *
+ * The default implementation provided here needs to traverse the collection twice.
+ * Strict collections have an overridden version of `partition` in `Buildable`,
+ * which requires only a single traversal.
+ */
+ def partition(p: A => Boolean): (Repr, Repr) = {
+ val pn = View.Partition(coll, p)
+ (fromIterableWithSameElemType(pn.left), fromIterableWithSameElemType(pn.right))
+ }
+
+ /** A collection containing the first `n` elements of this collection. */
+ def take(n: Int): Repr = fromIterableWithSameElemType(View.Take(coll, n))
+
+ /** The rest of the collection without its `n` first elements. For
+ * linear, immutable collections this should avoid making a copy.
+ */
+ def drop(n: Int): Repr = fromIterableWithSameElemType(View.Drop(coll, n))
+
+ /** The rest of the collection without its first element. */
+ def tail: Repr = drop(1)
+ }
+
+ /** Transforms over iterables that can return collections of different element types.
+ */
+ trait IterablePolyTransforms[+A, +C[A]] extends Any {
+ protected def coll: Iterable[A]
+ def fromIterable[B](coll: Iterable[B]): C[B]
+
+ /** Map */
+ def map[B](f: A => B): C[B] = fromIterable(View.Map(coll, f))
+
+ /** Flatmap */
+ def flatMap[B](f: A => IterableOnce[B]): C[B] = fromIterable(View.FlatMap(coll, f))
+
+ /** Concatenation */
+ def ++[B >: A](xs: IterableOnce[B]): C[B] = fromIterable(View.Concat(coll, xs))
+
+ /** Zip. Interesting because it requires to align to source collections. */
+ def zip[B](xs: IterableOnce[B]): C[(A @uncheckedVariance, B)] = fromIterable(View.Zip(coll, xs))
+ // sound bcs of VarianceNote
+ }
+
+ /** Type-preserving transforms over sequences. */
+ trait SeqMonoTransforms[+A, +Repr] extends Any with IterableMonoTransforms[A, Repr] {
+ def reverse: Repr = coll.view match {
+ case v: IndexedView[A] => fromIterableWithSameElemType(v.reverse)
+ case _ =>
+ var xs: List[A] = Nil
+ var it = coll.iterator
+ while (it.hasNext) xs = it.next() :: xs
+ fromIterableWithSameElemType(xs)
+ }
+ }
+
+ /* --------- Concrete collection types ------------------------------- */
+
+ /** Concrete collection type: List */
+ sealed trait List[+A]
+ extends LinearSeq[A]
+ with SeqLike[A, List]
+ with Buildable[A, List[A]] {
+
+ def fromIterable[B](c: Iterable[B]): List[B] = List.fromIterable(c)
+
+ protected[this] def newBuilder = new ListBuffer[A].mapResult(_.toList)
+
+ /** Prepend element */
+ def :: [B >: A](elem: B): List[B] = new ::(elem, this)
+
+ /** Prepend operation that avoids copying this list */
+ def ++:[B >: A](prefix: List[B]): List[B] =
+ if (prefix.isEmpty) this
+ else prefix.head :: prefix.tail ++: this
+
+ /** When concatenating with another list `xs`, avoid copying `xs` */
+ override def ++[B >: A](xs: IterableOnce[B]): List[B] = xs match {
+ case xs: List[B] => this ++: xs
+ case _ => super.++(xs)
+ }
+
+ override def className = "List"
+ }
+
+ case class :: [+A](x: A, private[collections] var next: List[A @uncheckedVariance]) // sound because `next` is used only locally
+ extends List[A] {
+ override def isEmpty = false
+ override def head = x
+ override def tail = next
+ }
+
+ case object Nil extends List[Nothing] {
+ override def isEmpty = true
+ override def head = ???
+ override def tail = ???
+ }
+
+ object List extends IterableFactory[List] {
+ def fromIterable[B](coll: Iterable[B]): List[B] = coll match {
+ case coll: List[B] => coll
+ case _ => ListBuffer.fromIterable(coll).toList
+ }
+ }
+
+ /** Concrete collection type: ListBuffer */
+ class ListBuffer[A]
+ extends Seq[A]
+ with SeqLike[A, ListBuffer]
+ with Buildable[A, ListBuffer[A]]
+ with Builder[A, ListBuffer[A]] {
+
+ private var first, last: List[A] = Nil
+ private var aliased = false
+ private var len = 0
+
+ def iterator = first.iterator
+
+ def fromIterable[B](coll: Iterable[B]) = ListBuffer.fromIterable(coll)
+
+ def apply(i: Int) = first.apply(i)
+
+ def length = len
+ override def knownSize = len
+
+ protected[this] def newBuilder = new ListBuffer[A]
+
+ private def copyElems(): Unit = {
+ val buf = ListBuffer.fromIterable(result)
+ first = buf.first
+ last = buf.last
+ aliased = false
+ }
+
+ /** Convert to list; avoids copying where possible. */
+ def toList = {
+ aliased = true
+ first
+ }
+
+ def +=(elem: A) = {
+ if (aliased) copyElems()
+ val last1 = elem :: Nil
+ last match {
+ case last: ::[A] => last.next = last1
+ case _ => first = last1
+ }
+ last = last1
+ len += 1
+ this
+ }
+
+ def result = this
+
+ override def className = "ListBuffer"
+ }
+
+ object ListBuffer extends IterableFactory[ListBuffer] {
+ def fromIterable[B](coll: Iterable[B]): ListBuffer[B] = new ListBuffer[B] ++= coll
+ }
+
+ /** Concrete collection type: ArrayBuffer */
+ class ArrayBuffer[A] private (initElems: Array[AnyRef], initLength: Int)
+ extends Seq[A]
+ with SeqLike[A, ArrayBuffer]
+ with Buildable[A, ArrayBuffer[A]]
+ with Builder[A, ArrayBuffer[A]] {
+
+ def this() = this(new Array[AnyRef](16), 0)
+
+ private var elems: Array[AnyRef] = initElems
+ private var start = 0
+ private var end = initLength
+
+ def apply(n: Int) = elems(start + n).asInstanceOf[A]
+
+ def length = end - start
+ override def knownSize = length
+
+ override def view = new ArrayBufferView(elems, start, end)
+
+ def iterator = view.iterator
+
+ def fromIterable[B](it: Iterable[B]): ArrayBuffer[B] =
+ ArrayBuffer.fromIterable(it)
+
+ protected[this] def newBuilder = new ArrayBuffer[A]
+
+ def +=(elem: A): this.type = {
+ if (end == elems.length) {
+ if (start > 0) {
+ Array.copy(elems, start, elems, 0, length)
+ end -= start
+ start = 0
+ }
+ else {
+ val newelems = new Array[AnyRef](end * 2)
+ Array.copy(elems, 0, newelems, 0, end)
+ elems = newelems
+ }
+ }
+ elems(end) = elem.asInstanceOf[AnyRef]
+ end += 1
+ this
+ }
+
+ def result = this
+
+ /** New operation: destructively drop elements at start of buffer. */
+ def trimStart(n: Int): Unit = start += (n max 0)
+
+ /** Overridden to use array copying for efficiency where possible. */
+ override def ++[B >: A](xs: IterableOnce[B]): ArrayBuffer[B] = xs match {
+ case xs: ArrayBuffer[B] =>
+ val elems = new Array[AnyRef](length + xs.length)
+ Array.copy(this.elems, this.start, elems, 0, this.length)
+ Array.copy(xs.elems, xs.start, elems, this.length, xs.length)
+ new ArrayBuffer(elems, elems.length)
+ case _ => super.++(xs)
+ }
+
+ override def take(n: Int) = {
+ val elems = new Array[AnyRef](n min length)
+ Array.copy(this.elems, this.start, elems, 0, elems.length)
+ new ArrayBuffer(elems, elems.length)
+ }
+
+ override def className = "ArrayBuffer"
+ }
+
+ object ArrayBuffer extends IterableFactory[ArrayBuffer] {
+
+ /** Avoid reallocation of buffer if length is known. */
+ def fromIterable[B](coll: Iterable[B]): ArrayBuffer[B] =
+ if (coll.knownSize >= 0) {
+ val elems = new Array[AnyRef](coll.knownSize)
+ val it = coll.iterator
+ for (i <- 0 until elems.length) elems(i) = it.next().asInstanceOf[AnyRef]
+ new ArrayBuffer[B](elems, elems.length)
+ }
+ else new ArrayBuffer[B] ++= coll
+ }
+
+ class ArrayBufferView[A](val elems: Array[AnyRef], val start: Int, val end: Int) extends IndexedView[A] {
+ def length = end - start
+ def apply(n: Int) = elems(start + n).asInstanceOf[A]
+ override def className = "ArrayBufferView"
+ }
+
+ class LazyList[+A](expr: => LazyList.Evaluated[A])
+ extends LinearSeq[A] with SeqLike[A, LazyList] {
+ private[this] var evaluated = false
+ private[this] var result: LazyList.Evaluated[A] = _
+
+ def force: LazyList.Evaluated[A] = {
+ if (!evaluated) {
+ result = expr
+ evaluated = true
+ }
+ result
+ }
+
+ override def isEmpty = force.isEmpty
+ override def head = force.get._1
+ override def tail = force.get._2
+
+ def #:: [B >: A](elem: => B): LazyList[B] = new LazyList(Some((elem, this)))
+
+ def fromIterable[B](c: Iterable[B]): LazyList[B] = LazyList.fromIterable(c)
+
+ override def className = "LazyList"
+
+ override def toString =
+ if (evaluated)
+ result match {
+ case None => "Empty"
+ case Some((hd, tl)) => s"$hd #:: $tl"
+ }
+ else "LazyList(?)"
+ }
+
+ object LazyList extends IterableFactory[LazyList] {
+
+ type Evaluated[+A] = Option[(A, LazyList[A])]
+
+ object Empty extends LazyList[Nothing](None)
+
+ object #:: {
+ def unapply[A](s: LazyList[A]): Evaluated[A] = s.force
+ }
+
+ def fromIterable[B](coll: Iterable[B]): LazyList[B] = coll match {
+ case coll: LazyList[B] => coll
+ case _ => fromIterator(coll.iterator)
+ }
+
+ def fromIterator[B](it: Iterator[B]): LazyList[B] =
+ new LazyList(if (it.hasNext) Some(it.next(), fromIterator(it)) else None)
+ }
+
+ // ------------------ Decorators to add collection ops to existing types -----------------------
+
+ /** Decorator to add collection operations to strings.
+ */
+ implicit class StringOps(val s: String)
+ extends AnyVal with IterableOps[Char]
+ with SeqMonoTransforms[Char, String]
+ with IterablePolyTransforms[Char, List]
+ with Buildable[Char, String]
+ with ArrayLike[Char] {
+
+ protected def coll = new StringView(s)
+ def iterator = coll.iterator
+
+ protected def fromIterableWithSameElemType(coll: Iterable[Char]): String = {
+ val sb = new StringBuilder
+ for (ch <- coll) sb += ch
+ sb.result
+ }
+
+ def fromIterable[B](coll: Iterable[B]): List[B] = List.fromIterable(coll)
+
+ protected[this] def newBuilder = new StringBuilder
+
+ def length = s.length
+ def apply(i: Int) = s.charAt(i)
+
+ override def knownSize = s.length
+
+ override def className = "String"
+
+ /** Overloaded version of `map` that gives back a string, where the inherited
+ * version gives back a sequence.
+ */
+ def map(f: Char => Char): String = {
+ val sb = new StringBuilder
+ for (ch <- s) sb += f(ch)
+ sb.result
+ }
+
+ /** Overloaded version of `flatMap` that gives back a string, where the inherited
+ * version gives back a sequence.
+ */
+ def flatMap(f: Char => String): String = {
+ val sb = new StringBuilder
+ for (ch <- s) sb ++= f(ch)
+ sb.result
+ }
+
+ /** Overloaded version of `++` that gives back a string, where the inherited
+ * version gives back a sequence.
+ */
+ def ++(xs: IterableOnce[Char]): String = {
+ val sb = new StringBuilder() ++= s
+ for (ch <- xs.iterator) sb += ch
+ sb.result
+ }
+
+ /** Another overloaded version of `++`. */
+ def ++(xs: String): String = s + xs
+ }
+
+ class StringBuilder extends Builder[Char, String] {
+ private val sb = new java.lang.StringBuilder
+
+ def += (x: Char) = { sb.append(x); this }
+
+ /** Overloaded version of `++=` that takes a string */
+ def ++= (s: String) = { sb.append(s); this }
+
+ def result = sb.toString
+
+ override def toString = result
+ }
+
+ case class StringView(s: String) extends IndexedView[Char] {
+ def length = s.length
+ def apply(n: Int) = s.charAt(n)
+ override def className = "StringView"
+ }
+
+ /** Decorator to add collection operations to arrays.
+ */
+ implicit class ArrayOps[A](val xs: Array[A])
+ extends AnyVal with IterableOps[A]
+ with SeqMonoTransforms[A, Array[A]]
+ with Buildable[A, Array[A]]
+ with ArrayLike[A] {
+
+ protected def coll = new ArrayView(xs)
+ def iterator = coll.iterator
+
+ def length = xs.length
+ def apply(i: Int) = xs.apply(i)
+
+ override def view = new ArrayView(xs)
+
+ def elemTag: ClassTag[A] = ClassTag(xs.getClass.getComponentType)
+
+ protected def fromIterableWithSameElemType(coll: Iterable[A]): Array[A] = coll.toArray[A](elemTag)
+
+ def fromIterable[B: ClassTag](coll: Iterable[B]): Array[B] = coll.toArray[B]
+
+ protected[this] def newBuilder = new ArrayBuffer[A].mapResult(_.toArray(elemTag))
+
+ override def knownSize = xs.length
+
+ override def className = "Array"
+
+ def map[B: ClassTag](f: A => B): Array[B] = fromIterable(View.Map(coll, f))
+ def flatMap[B: ClassTag](f: A => IterableOnce[B]): Array[B] = fromIterable(View.FlatMap(coll, f))
+ def ++[B >: A : ClassTag](xs: IterableOnce[B]): Array[B] = fromIterable(View.Concat(coll, xs))
+ def zip[B: ClassTag](xs: IterableOnce[B]): Array[(A, B)] = fromIterable(View.Zip(coll, xs))
+ }
+
+ case class ArrayView[A](xs: Array[A]) extends IndexedView[A] {
+ def length = xs.length
+ def apply(n: Int) = xs(n)
+ override def className = "ArrayView"
+ }
+
+ /* ---------- Views -------------------------------------------------------*/
+
+ /** Concrete collection type: View */
+ trait View[+A] extends Iterable[A] with IterableLike[A, View] {
+ override def view = this
+
+ /** Avoid copying if source collection is already a view. */
+ override def fromIterable[B](c: Iterable[B]): View[B] = c match {
+ case c: View[B] => c
+ case _ => View.fromIterator(c.iterator)
+ }
+ override def className = "View"
+ }
+
+ /** This object reifies operations on views as case classes */
+ object View {
+ def fromIterator[A](it: => Iterator[A]): View[A] = new View[A] {
+ def iterator = it
+ }
+
+ /** The empty view */
+ case object Empty extends View[Nothing] {
+ def iterator = Iterator.empty
+ override def knownSize = 0
+ }
+
+ /** A view with given elements */
+ case class Elems[A](xs: A*) extends View[A] {
+ def iterator = Iterator(xs: _*)
+ override def knownSize = xs.length // should be: xs.knownSize, but A*'s are not sequences in this strawman.
+ }
+
+ /** A view that filters an underlying collection. */
+ case class Filter[A](val underlying: Iterable[A], p: A => Boolean) extends View[A] {
+ def iterator = underlying.iterator.filter(p)
+ }
+
+ /** A view that partitions an underlying collection into two views */
+ case class Partition[A](val underlying: Iterable[A], p: A => Boolean) {
+
+ /** The view consisting of all elements of the underlying collection
+ * that satisfy `p`.
+ */
+ val left = Partitioned(this, true)
+
+ /** The view consisting of all elements of the underlying collection
+ * that do not satisfy `p`.
+ */
+ val right = Partitioned(this, false)
+ }
+
+ /** A view representing one half of a partition. */
+ case class Partitioned[A](partition: Partition[A], cond: Boolean) extends View[A] {
+ def iterator = partition.underlying.iterator.filter(x => partition.p(x) == cond)
+ }
+
+ /** A view that drops leading elements of the underlying collection. */
+ case class Drop[A](underlying: Iterable[A], n: Int) extends View[A] {
+ def iterator = underlying.iterator.drop(n)
+ protected val normN = n max 0
+ override def knownSize =
+ if (underlying.knownSize >= 0) (underlying.knownSize - normN) max 0 else -1
+ }
+
+ /** A view that takes leading elements of the underlying collection. */
+ case class Take[A](underlying: Iterable[A], n: Int) extends View[A] {
+ def iterator = underlying.iterator.take(n)
+ protected val normN = n max 0
+ override def knownSize =
+ if (underlying.knownSize >= 0) underlying.knownSize min normN else -1
+ }
+
+ /** A view that maps elements of the underlying collection. */
+ case class Map[A, B](underlying: Iterable[A], f: A => B) extends View[B] {
+ def iterator = underlying.iterator.map(f)
+ override def knownSize = underlying.knownSize
+ }
+
+ /** A view that flatmaps elements of the underlying collection. */
+ case class FlatMap[A, B](underlying: Iterable[A], f: A => IterableOnce[B]) extends View[B] {
+ def iterator = underlying.iterator.flatMap(f)
+ }
+
+ /** A view that concatenates elements of the underlying collection with the elements
+ * of another collection or iterator.
+ */
+ case class Concat[A](underlying: Iterable[A], other: IterableOnce[A]) extends View[A] {
+ def iterator = underlying.iterator ++ other
+ override def knownSize = other match {
+ case other: Iterable[_] if underlying.knownSize >= 0 && other.knownSize >= 0 =>
+ underlying.knownSize + other.knownSize
+ case _ =>
+ -1
+ }
+ }
+
+ /** A view that zips elements of the underlying collection with the elements
+ * of another collection or iterator.
+ */
+ case class Zip[A, B](underlying: Iterable[A], other: IterableOnce[B]) extends View[(A, B)] {
+ def iterator = underlying.iterator.zip(other)
+ override def knownSize = other match {
+ case other: Iterable[_] if underlying.knownSize >= 0 && other.knownSize >= 0 =>
+ underlying.knownSize min other.knownSize
+ case _ =>
+ -1
+ }
+ }
+ }
+
+ /** View defined in terms of indexing a range */
+ trait IndexedView[+A] extends View[A] with ArrayLike[A] { self =>
+
+ def iterator: Iterator[A] = new Iterator[A] {
+ private var current = 0
+ def hasNext = current < self.length
+ def next: A = {
+ val r = apply(current)
+ current += 1
+ r
+ }
+ }
+
+ override def take(n: Int): IndexedView[A] = new IndexedView.Take(this, n)
+ override def drop(n: Int): IndexedView[A] = new IndexedView.Drop(this, n)
+ override def map[B](f: A => B): IndexedView[B] = new IndexedView.Map(this, f)
+ def reverse: IndexedView[A] = new IndexedView.Reverse(this)
+ }
+
+ object IndexedView {
+
+ class Take[A](underlying: IndexedView[A], n: Int)
+ extends View.Take(underlying, n) with IndexedView[A] {
+ override def iterator = super.iterator // needed to avoid "conflicting overrides" error
+ def length = underlying.length min normN
+ def apply(i: Int) = underlying.apply(i)
+ }
+
+ class Drop[A](underlying: IndexedView[A], n: Int)
+ extends View.Take(underlying, n) with IndexedView[A] {
+ override def iterator = super.iterator
+ def length = (underlying.length - normN) max 0
+ def apply(i: Int) = underlying.apply(i + normN)
+ }
+
+ class Map[A, B](underlying: IndexedView[A], f: A => B)
+ extends View.Map(underlying, f) with IndexedView[B] {
+ override def iterator = super.iterator
+ def length = underlying.length
+ def apply(n: Int) = f(underlying.apply(n))
+ }
+
+ case class Reverse[A](underlying: IndexedView[A]) extends IndexedView[A] {
+ def length = underlying.length
+ def apply(i: Int) = underlying.apply(length - 1 - i)
+ }
+ }
+
+/* ---------- Iterators ---------------------------------------------------*/
+
+ /** A core Iterator class */
+ trait Iterator[+A] extends IterableOnce[A] { self =>
+ def hasNext: Boolean
+ def next(): A
+ def iterator = this
+ def foldLeft[B](z: B)(op: (B, A) => B): B =
+ if (hasNext) foldLeft(op(z, next))(op) else z
+ def foldRight[B](z: B)(op: (A, B) => B): B =
+ if (hasNext) op(next(), foldRight(z)(op)) else z
+ def foreach(f: A => Unit): Unit =
+ while (hasNext) f(next())
+ def indexWhere(p: A => Boolean): Int = {
+ var i = 0
+ while (hasNext) {
+ if (p(next())) return i
+ i += 1
+ }
+ -1
+ }
+ def length = {
+ var len = 0
+ while (hasNext) { len += 1; next() }
+ len
+ }
+ def filter(p: A => Boolean): Iterator[A] = new Iterator[A] {
+ private var hd: A = _
+ private var hdDefined: Boolean = false
+
+ def hasNext: Boolean = hdDefined || {
+ do {
+ if (!self.hasNext) return false
+ hd = self.next()
+ } while (!p(hd))
+ hdDefined = true
+ true
+ }
+
+ def next() =
+ if (hasNext) {
+ hdDefined = false
+ hd
+ }
+ else Iterator.empty.next()
+ }
+ def map[B](f: A => B): Iterator[B] = new Iterator[B] {
+ def hasNext = self.hasNext
+ def next() = f(self.next())
+ }
+
+ def flatMap[B](f: A => IterableOnce[B]): Iterator[B] = new Iterator[B] {
+ private var myCurrent: Iterator[B] = Iterator.empty
+ private def current = {
+ while (!myCurrent.hasNext && self.hasNext)
+ myCurrent = f(self.next()).iterator
+ myCurrent
+ }
+ def hasNext = current.hasNext
+ def next() = current.next()
+ }
+ def ++[B >: A](xs: IterableOnce[B]): Iterator[B] = new Iterator[B] {
+ private var myCurrent: Iterator[B] = self
+ private var first = true
+ private def current = {
+ if (!myCurrent.hasNext && first) {
+ myCurrent = xs.iterator
+ first = false
+ }
+ myCurrent
+ }
+ def hasNext = current.hasNext
+ def next() = current.next()
+ }
+ def take(n: Int): Iterator[A] = new Iterator[A] {
+ private var i = 0
+ def hasNext = self.hasNext && i < n
+ def next =
+ if (hasNext) {
+ i += 1
+ self.next()
+ }
+ else Iterator.empty.next()
+ }
+ def drop(n: Int): Iterator[A] = {
+ var i = 0
+ while (i < n && hasNext) {
+ next()
+ i += 1
+ }
+ this
+ }
+ def zip[B](that: IterableOnce[B]): Iterator[(A, B)] = new Iterator[(A, B)] {
+ val thatIterator = that.iterator
+ def hasNext = self.hasNext && thatIterator.hasNext
+ def next() = (self.next(), thatIterator.next())
+ }
+ }
+
+ object Iterator {
+ val empty: Iterator[Nothing] = new Iterator[Nothing] {
+ def hasNext = false
+ def next = throw new NoSuchElementException("next on empty iterator")
+ }
+ def apply[A](xs: A*): Iterator[A] = new IndexedView[A] {
+ val length = xs.length
+ def apply(n: Int) = xs(n)
+ }.iterator
+ }
+}